From f5e59484a62c45a94730fb57c6913abe4d1256dd Mon Sep 17 00:00:00 2001 From: asuessenbach Date: Sun, 12 Apr 2020 21:49:12 +0200 Subject: [PATCH] Introduce usage of clang-format to format vulkan.hpp and the other sources. --- .clang-format | 89 + CMakeLists.txt | 8 + README.md | 6 + VulkanHppGenerator.cpp | 5452 +- VulkanHppGenerator.hpp | 836 +- samples/01_InitInstance/01_InitInstance.cpp | 21 +- .../02_EnumerateDevices.cpp | 25 +- samples/03_InitDevice/03_InitDevice.cpp | 41 +- .../04_InitCommandBuffer.cpp | 39 +- samples/05_InitSwapchain/05_InitSwapchain.cpp | 159 +- .../06_InitDepthBuffer/06_InitDepthBuffer.cpp | 88 +- .../07_InitUniformBuffer.cpp | 79 +- .../08_InitPipelineLayout.cpp | 40 +- .../09_InitDescriptorSet.cpp | 56 +- .../10_InitRenderPass/10_InitRenderPass.cpp | 77 +- samples/11_InitShaders/11_InitShaders.cpp | 48 +- .../12_InitFrameBuffers.cpp | 64 +- .../13_InitVertexBuffer.cpp | 117 +- samples/14_InitPipeline/14_InitPipeline.cpp | 182 +- samples/15_DrawCube/15_DrawCube.cpp | 179 +- samples/16_Vulkan_1_1/16_Vulkan_1_1.cpp | 82 +- samples/CopyBlitImage/CopyBlitImage.cpp | 226 +- .../CreateDebugUtilsMessenger.cpp | 140 +- samples/DrawTexturedCube/DrawTexturedCube.cpp | 186 +- samples/DynamicUniform/DynamicUniform.cpp | 222 +- .../EnableValidationWithCallback.cpp | 180 +- .../EnumerateDevicesAdvanced.cpp | 41 +- samples/Events/Events.cpp | 122 +- samples/ImmutableSampler/ImmutableSampler.cpp | 202 +- samples/InitTexture/InitTexture.cpp | 190 +- samples/InputAttachment/InputAttachment.cpp | 257 +- .../InstanceExtensionProperties.cpp | 30 +- .../InstanceLayerExtensionProperties.cpp | 48 +- .../InstanceLayerProperties.cpp | 27 +- samples/InstanceVersion/InstanceVersion.cpp | 26 +- samples/MultipleSets/MultipleSets.cpp | 212 +- samples/OcclusionQuery/OcclusionQuery.cpp | 229 +- .../PhysicalDeviceExtensions.cpp | 38 +- .../PhysicalDeviceFeatures.cpp | 817 +- .../PhysicalDeviceGroups.cpp | 64 +- .../PhysicalDeviceMemoryProperties.cpp | 69 +- .../PhysicalDeviceProperties.cpp | 1506 +- .../PhysicalDeviceQueueFamilyProperties.cpp | 70 +- samples/PipelineCache/PipelineCache.cpp | 296 +- .../PipelineDerivative/PipelineDerivative.cpp | 288 +- samples/PushConstants/PushConstants.cpp | 193 +- samples/PushDescriptors/PushDescriptors.cpp | 209 +- samples/RayTracing/CameraManipulator.cpp | 239 +- samples/RayTracing/RayTracing.cpp | 1114 +- .../SecondaryCommandBuffer.cpp | 248 +- .../SeparateImageSampler.cpp | 244 +- .../SurfaceCapabilities.cpp | 145 +- samples/SurfaceFormats/SurfaceFormats.cpp | 40 +- samples/Template/Template.cpp | 186 +- samples/TexelBuffer/TexelBuffer.cpp | 180 +- samples/utils/math.cpp | 36 +- samples/utils/math.hpp | 7 +- samples/utils/shaders.cpp | 89 +- samples/utils/shaders.hpp | 16 +- samples/utils/utils.cpp | 1217 +- samples/utils/utils.hpp | 404 +- tests/DeviceFunctions/DeviceFunctions.cpp | 51 +- .../DispatchLoaderDynamic.cpp | 29 +- tests/Hash/Hash.cpp | 27 +- tests/NoExceptions/NoExceptions.cpp | 56 +- tests/StructureChain/StructureChain.cpp | 106 +- .../UniqueHandleDefaultArguments.cpp | 6 +- vulkan/vulkan.hpp | 73634 ++++++++++------ 68 files changed, 56064 insertions(+), 35586 deletions(-) create mode 100644 .clang-format diff --git a/.clang-format b/.clang-format new file mode 100644 index 0000000..b32b2b4 --- /dev/null +++ b/.clang-format @@ -0,0 +1,89 @@ +--- +AccessModifierOffset : -2 +AlignAfterOpenBracket : Align +AlignConsecutiveAssignments : true +AlignConsecutiveDeclarations : true +AlignConsecutiveMacros : true +AlignEscapedNewlines : Left +AlignOperands : true +AlignTrailingComments : true +AllowAllArgumentsOnNextLine : true +AllowAllConstructorInitializersOnNextLine : true +AllowAllParametersOfDeclarationOnNextLine : true +AllowShortBlocksOnASingleLine : Empty +AllowShortCaseLabelsOnASingleLine : true +AllowShortFunctionsOnASingleLine : Empty +AllowShortIfStatementsOnASingleLine : Never +AllowShortLambdasOnASingleLine : Inline +AllowShortLoopsOnASingleLine : false +AlwaysBreakAfterReturnType : None +AlwaysBreakBeforeMultilineStrings : true +AlwaysBreakTemplateDeclarations : Yes +BinPackArguments : false +BinPackParameters : false +BraceWrapping : + AfterCaseLabel : true + AfterClass : true + AfterControlStatement : Always + AfterEnum : true + AfterFunction : true + AfterNamespace : true + AfterStruct : true + AfterUnion : true + AfterExternBlock : true + BeforeCatch : true + BeforeElse : true +# BeforeLambdaBody : true + IndentBraces : false + SplitEmptyFunction : false + SplitEmptyRecord : false + SplitEmptyNamespace : false +BreakBeforeBinaryOperators : None +BreakBeforeBraces : Custom +BreakBeforeTernaryOperators : true +BreakConstructorInitializers : BeforeColon +BreakInheritanceList : BeforeComma +BreakStringLiterals : false +ColumnLimit : 120 +CompactNamespaces : false +ConstructorInitializerAllOnOneLineOrOnePerLine : true +ConstructorInitializerIndentWidth : 2 +ContinuationIndentWidth : 2 +Cpp11BracedListStyle : false +FixNamespaceComments : true +IncludeBlocks : Regroup +# IndentCaseBlocks : true +IndentCaseLabels : true +IndentPPDirectives : AfterHash +IndentWidth : 2 +IndentWrappedFunctionNames : true +KeepEmptyLinesAtTheStartOfBlocks : false +MaxEmptyLinesToKeep : 1 +NamespaceIndentation : All +PointerAlignment : Middle +ReflowComments : true +SortIncludes : true +SortUsingDeclarations : true +SpaceAfterCStyleCast : false +SpaceAfterLogicalNot : false +SpaceAfterTemplateKeyword : true +SpaceBeforeAssignmentOperators : true +SpaceBeforeCpp11BracedList : false +SpaceBeforeCtorInitializerColon : true +SpaceBeforeInheritanceColon : true +SpaceBeforeParens : ControlStatements +SpaceBeforeRangeBasedForLoopColon : true +SpaceBeforeSquareBrackets : false +SpaceInEmptyBlock : false +SpaceInEmptyParentheses : false +SpacesBeforeTrailingComments : 2 +SpacesInAngles : false +SpacesInCStyleCastParentheses : false +# SpacesInConditionalStatement : true +SpacesInContainerLiterals : true +SpacesInParentheses : true +SpacesInSquareBrackets : false +Standard : Latest +UseCRLF : true +UseTab : Never +... diff --git a/CMakeLists.txt b/CMakeLists.txt index fb519bf..0fa5b6c 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -30,6 +30,14 @@ set_property(GLOBAL PROPERTY USE_FOLDERS ON) project(VulkanHppGenerator) +find_program(CLANG_FORMAT_EXECUTABLE NAMES clang-format) + +if(CLANG_FORMAT_EXECUTABLE) + add_definitions(-DCLANG_FORMAT_EXECUTABLE="${CLANG_FORMAT_EXECUTABLE}") +else() + message(WARNGING " Could not find clang-format. Generated vulkan.hpp will not be nicely formatted.") +endif() + if(MSVC) add_compile_options(/W4) else(MSVC) diff --git a/README.md b/README.md index 3407fce..645cea2 100644 --- a/README.md +++ b/README.md @@ -17,6 +17,12 @@ Vulkan-Hpp requires a C++11 capable compiler to compile. The following compilers * GCC >= 4.8.2 (earlier version might work, but are untested) * Clang >= 3.3 +### Optional Features + +#### Formatting + +If the program clang-format is found by CMake, the define CLANG_FORMAT_EXECUTABLE is set accordingly. In that case, the generated vulkan.hpp is formatted using the .clang-format file located in the root directory of this project. Otherwise it's formatted as hard-coded in the generator. + ## Usage ### namespace vk diff --git a/VulkanHppGenerator.cpp b/VulkanHppGenerator.cpp index 471a874..cc025be 100644 --- a/VulkanHppGenerator.cpp +++ b/VulkanHppGenerator.cpp @@ -12,72 +12,106 @@ // See the License for the specific language governing permissions and // limitations under the License. -#include +#include "VulkanHppGenerator.hpp" + #include +#include +#include #include #include #include -#include #include -#include -#include "VulkanHppGenerator.hpp" -void appendArgumentCount(std::string & str, size_t vectorIndex, std::string const& vectorName, std::string const& counterName, size_t returnParamIndex, size_t templateParamIndex, bool twoStep, bool singular); -std::string appendFunctionBodyEnhancedLocalReturnVariableSingular(std::string & str, std::string const& indentation, std::string const& returnName, std::string const& typeName, bool isStructureChain); -void appendReinterpretCast(std::string & str, bool leadingConst, std::string const& type, bool trailingPointerToConst); -void appendTypesafeStuff(std::string &str, std::string const& typesafeCheck); -void appendVersionCheck(std::string & str, std::string const& version); -bool beginsWith(std::string const& text, std::string const& prefix); -bool endsWith(std::string const& text, std::string const& postfix); -void check(bool condition, int line, std::string const& message); -void checkAttributes(int line, std::map const& attributes, std::map> const& required, std::map> const& optional); -void checkElements(int line, std::vector const& elements, std::map const& required, std::set const& optional = {}); -void cleanup(std::stringstream & ss); -std::string constructArraySize(std::vector const& sizes); -std::string constructStandardArray(std::string const& type, std::vector const& sizes); -std::string createEnumValueName(std::string const& name, std::string const& prefix, std::string const& postfix, bool bitmask, std::string const& tag); -std::string createSuccessCode(std::string const& code, std::set const& tags); -std::string determineCommandName(std::string const& vulkanCommandName, std::string const& firstArgumentType); -std::set determineSkippedParams(size_t returnParamIndex, std::map const& vectorParamIndices); -bool determineStructureChaining(std::string const& structType, std::set const& extendedStructs, std::map const& structureAliases); -std::string extractTag(int line, std::string const& name, std::set const& tags); -std::string findTag(std::set const& tags, std::string const& name, std::string const& postfix = ""); -std::pair generateFunctionBodyStandardReturn(std::string const& returnType); -std::map getAttributes(tinyxml2::XMLElement const* element); -template std::vector getChildElements(ElementContainer const* element); -std::string getEnumPostfix(std::string const& name, std::set const& tags, std::string & prefix); -std::string getEnumPrefix(int line, std::string const& name, bool bitmask); -std::string readTypePostfix(tinyxml2::XMLNode const* node); -std::string readTypePrefix(tinyxml2::XMLNode const* node); -std::string replaceWithMap(std::string const &input, std::map replacements); -std::string startLowerCase(std::string const& input); -std::string startUpperCase(std::string const& input); -std::string stripPostfix(std::string const& value, std::string const& postfix); -std::string stripPluralS(std::string const& name); -std::string stripPrefix(std::string const& value, std::string const& prefix); -std::string toCamelCase(std::string const& value); -std::string toUpperCase(std::string const& name); -std::vector tokenize(std::string const& tokenString, std::string const& separator); -std::string trim(std::string const& input); -std::string trimEnd(std::string const& input); -void warn(bool condition, int line, std::string const& message); +void appendArgumentCount( std::string & str, + size_t vectorIndex, + std::string const & vectorName, + std::string const & counterName, + size_t returnParamIndex, + size_t templateParamIndex, + bool twoStep, + bool singular ); +std::string appendFunctionBodyEnhancedLocalReturnVariableSingular( std::string & str, + std::string const & indentation, + std::string const & returnName, + std::string const & typeName, + bool isStructureChain ); +void appendReinterpretCast( std::string & str, + bool leadingConst, + std::string const & type, + bool trailingPointerToConst ); +void appendTypesafeStuff( std::string & str, std::string const & typesafeCheck ); +void appendVersionCheck( std::string & str, std::string const & version ); +bool beginsWith( std::string const & text, std::string const & prefix ); +bool endsWith( std::string const & text, std::string const & postfix ); +void check( bool condition, int line, std::string const & message ); +void checkAttributes( int line, + std::map const & attributes, + std::map> const & required, + std::map> const & optional ); +void checkElements( int line, + std::vector const & elements, + std::map const & required, + std::set const & optional = {} ); +void cleanup( std::stringstream & ss ); +std::string constructArraySize( std::vector const & sizes ); +std::string constructStandardArray( std::string const & type, std::vector const & sizes ); +std::string createEnumValueName( std::string const & name, + std::string const & prefix, + std::string const & postfix, + bool bitmask, + std::string const & tag ); +std::string createSuccessCode( std::string const & code, std::set const & tags ); +std::string determineCommandName( std::string const & vulkanCommandName, std::string const & firstArgumentType ); +std::set determineSkippedParams( size_t returnParamIndex, std::map const & vectorParamIndices ); +bool determineStructureChaining( std::string const & structType, + std::set const & extendedStructs, + std::map const & structureAliases ); +std::string extractTag( int line, std::string const & name, std::set const & tags ); +std::string findTag( std::set const & tags, std::string const & name, std::string const & postfix = "" ); +std::pair generateFunctionBodyStandardReturn( std::string const & returnType ); +std::map getAttributes( tinyxml2::XMLElement const * element ); +template +std::vector getChildElements( ElementContainer const * element ); +std::string getEnumPostfix( std::string const & name, std::set const & tags, std::string & prefix ); +std::string getEnumPrefix( int line, std::string const & name, bool bitmask ); +std::string readTypePostfix( tinyxml2::XMLNode const * node ); +std::string readTypePrefix( tinyxml2::XMLNode const * node ); +std::string replaceWithMap( std::string const & input, std::map replacements ); +std::string startLowerCase( std::string const & input ); +std::string startUpperCase( std::string const & input ); +std::string stripPostfix( std::string const & value, std::string const & postfix ); +std::string stripPluralS( std::string const & name ); +std::string stripPrefix( std::string const & value, std::string const & prefix ); +std::string toCamelCase( std::string const & value ); +std::string toUpperCase( std::string const & name ); +std::vector tokenize( std::string const & tokenString, std::string const & separator ); +std::string trim( std::string const & input ); +std::string trimEnd( std::string const & input ); +void warn( bool condition, int line, std::string const & message ); const std::set nonConstSTypeStructs = { "VkBaseInStructure", "VkBaseOutStructure" }; -void appendArgumentCount(std::string & str, size_t vectorIndex, std::string const& vectorName, std::string const& counterName, size_t returnParamIndex, size_t templateParamIndex, bool twoStep, bool singular) +void appendArgumentCount( std::string & str, + size_t vectorIndex, + std::string const & vectorName, + std::string const & counterName, + size_t returnParamIndex, + size_t templateParamIndex, + bool twoStep, + bool singular ) { // this parameter is a count parameter for a vector parameter - if ((returnParamIndex == vectorIndex) && twoStep) + if ( ( returnParamIndex == vectorIndex ) && twoStep ) { // the corresponding vector parameter is the return parameter and it's a two-step algorithm // -> use the pointer to a local variable named like the counter parameter without leading 'p' - assert((counterName[0] == 'p') && isupper(counterName[1])); - str += "&" + startLowerCase(stripPrefix(counterName, "p")); + assert( ( counterName[0] == 'p' ) && isupper( counterName[1] ) ); + str += "&" + startLowerCase( stripPrefix( counterName, "p" ) ); } else { // the corresponding vector parameter is not the return parameter, or it's not a two-step algorithm - if (singular) + if ( singular ) { // for the singular version, the count is just 1. str += "1 "; @@ -86,10 +120,10 @@ void appendArgumentCount(std::string & str, size_t vectorIndex, std::string cons { // for the non-singular version, the count is the size of the vector parameter // -> use the vector parameter name without leading 'p' to get the size (in number of elements, not in bytes) - assert(vectorName[0] == 'p'); - str += startLowerCase(stripPrefix(vectorName, "p")) + ".size() "; + assert( vectorName[0] == 'p' ); + str += startLowerCase( stripPrefix( vectorName, "p" ) ) + ".size() "; } - if (templateParamIndex == vectorIndex) + if ( templateParamIndex == vectorIndex ) { // if the vector parameter is templatized -> multiply by the size of that type to get the size in bytes str += "* sizeof( T ) "; @@ -97,15 +131,18 @@ void appendArgumentCount(std::string & str, size_t vectorIndex, std::string cons } } -std::string appendFunctionBodyEnhancedLocalReturnVariableSingular(std::string & str, std::string const& indentation, std::string const& returnName, std::string const& typeName, bool isStructureChain) +std::string appendFunctionBodyEnhancedLocalReturnVariableSingular( std::string & str, + std::string const & indentation, + std::string const & returnName, + std::string const & typeName, + bool isStructureChain ) { - std::string strippedReturnName = stripPluralS(returnName); - if (isStructureChain) + std::string strippedReturnName = stripPluralS( returnName ); + if ( isStructureChain ) { // For StructureChains use the template parameters - str += - "StructureChain structureChain;\n" + - indentation + " " + typeName + "& " + strippedReturnName + " = structureChain.template get<" + typeName + ">()"; + str += "StructureChain structureChain;\n" + indentation + " " + typeName + "& " + strippedReturnName + + " = structureChain.template get<" + typeName + ">()"; strippedReturnName = "structureChain"; } else @@ -116,53 +153,60 @@ std::string appendFunctionBodyEnhancedLocalReturnVariableSingular(std::string & return strippedReturnName; } -void appendReinterpretCast(std::string & str, bool leadingConst, std::string const& type, bool trailingPointerToConst) +void appendReinterpretCast( std::string & str, + bool leadingConst, + std::string const & type, + bool trailingPointerToConst ) { str += "reinterpret_cast<"; - if (leadingConst) + if ( leadingConst ) { str += "const "; } str += type; - if (trailingPointerToConst) + if ( trailingPointerToConst ) { str += "* const"; } str += "*>"; } -void appendTypesafeStuff(std::string & str, std::string const& typesafeCheck) +void appendTypesafeStuff( std::string & str, std::string const & typesafeCheck ) { - str += "// 32-bit vulkan is not typesafe for handles, so don't allow copy constructors on this platform by default.\n" - "// To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION\n" - + typesafeCheck + "\n" + str += + "// 32-bit vulkan is not typesafe for handles, so don't allow copy constructors on this platform by default.\n" + "// To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION\n" + + typesafeCheck + + "\n" "# if !defined( VULKAN_HPP_TYPESAFE_CONVERSION )\n" "# define VULKAN_HPP_TYPESAFE_CONVERSION\n" "# endif\n" "#endif\n"; } -void appendVersionCheck(std::string & str, std::string const& version) +void appendVersionCheck( std::string & str, std::string const & version ) { - str += "static_assert( VK_HEADER_VERSION == " + version + " , \"Wrong VK_HEADER_VERSION!\" );\n" - "\n"; + str += "static_assert( VK_HEADER_VERSION == " + version + + " , \"Wrong VK_HEADER_VERSION!\" );\n" + "\n"; } -bool beginsWith(std::string const& text, std::string const& prefix) +bool beginsWith( std::string const & text, std::string const & prefix ) { - return prefix.empty() || text.substr(0, prefix.length()) == prefix; + return prefix.empty() || text.substr( 0, prefix.length() ) == prefix; } -bool endsWith(std::string const& text, std::string const& postfix) +bool endsWith( std::string const & text, std::string const & postfix ) { - return postfix.empty() || ((postfix.length() <= text.length()) && (text.substr(text.length() - postfix.length()) == postfix)); + return postfix.empty() || + ( ( postfix.length() <= text.length() ) && ( text.substr( text.length() - postfix.length() ) == postfix ) ); } -void check(bool condition, int line, std::string const& message) +void check( bool condition, int line, std::string const & message ) { - if (!condition) + if ( !condition ) { - throw std::runtime_error("Spec error on line " + std::to_string(line) + ": " + message); + throw std::runtime_error( "Spec error on line " + std::to_string( line ) + ": " + message ); } } @@ -171,109 +215,123 @@ void check(bool condition, int line, std::string const& message) // attributes : the map of name/value pairs of the encountered attributes // required : the required attributes, with a set of allowed values per attribute // optional : the optional attributes, with a set of allowed values per attribute -void checkAttributes(int line, std::map const& attributes, std::map> const& required, std::map> const& optional) +void checkAttributes( int line, + std::map const & attributes, + std::map> const & required, + std::map> const & optional ) { - // check if all required attributes are included and if there is a set of allowed values, check if the actual value is part of that set - for (auto const& r : required) + // check if all required attributes are included and if there is a set of allowed values, check if the actual value is + // part of that set + for ( auto const & r : required ) { - auto attributesIt = attributes.find(r.first); - check(attributesIt != attributes.end(), line, "missing attribute <" + r.first + ">"); - check(r.second.empty() || (r.second.find(attributesIt->second) != r.second.end()), line, "unexpected attribute value <" + attributesIt->second + "> in attribute <" + r.first + ">"); + auto attributesIt = attributes.find( r.first ); + check( attributesIt != attributes.end(), line, "missing attribute <" + r.first + ">" ); + check( r.second.empty() || ( r.second.find( attributesIt->second ) != r.second.end() ), + line, + "unexpected attribute value <" + attributesIt->second + "> in attribute <" + r.first + ">" ); } - // check if all not required attributes or optional, and if there is a set of allowed values, check if the actual value is part of that set - for (auto const& a : attributes) + // check if all not required attributes or optional, and if there is a set of allowed values, check if the actual + // value is part of that set + for ( auto const & a : attributes ) { - if (required.find(a.first) == required.end()) + if ( required.find( a.first ) == required.end() ) { - auto optionalIt = optional.find(a.first); - if (optionalIt == optional.end()) + auto optionalIt = optional.find( a.first ); + if ( optionalIt == optional.end() ) { - warn(false, line, "unknown attribute <" + a.first + ">"); + warn( false, line, "unknown attribute <" + a.first + ">" ); continue; } - if (!optionalIt->second.empty()) + if ( !optionalIt->second.empty() ) { - std::vector values = tokenize(a.second, ","); - for (auto const& v : values) + std::vector values = tokenize( a.second, "," ); + for ( auto const & v : values ) { - warn(optionalIt->second.find(v) != optionalIt->second.end(), line, "unexpected attribute value <" + v + "> in attribute <" + a.first + ">"); + warn( optionalIt->second.find( v ) != optionalIt->second.end(), + line, + "unexpected attribute value <" + v + "> in attribute <" + a.first + ">" ); } } } } } -void checkElements(int line, std::vector const& elements, std::map const& required, std::set const& optional) +void checkElements( int line, + std::vector const & elements, + std::map const & required, + std::set const & optional ) { std::map encountered; - for (auto const& e : elements) + for ( auto const & e : elements ) { std::string value = e->Value(); encountered[value]++; - warn((required.find(value) != required.end()) || (optional.find(value) != optional.end()), e->GetLineNum(), "unknown element <" + value + ">"); + warn( ( required.find( value ) != required.end() ) || ( optional.find( value ) != optional.end() ), + e->GetLineNum(), + "unknown element <" + value + ">" ); } - for (auto const& r : required) + for ( auto const & r : required ) { - auto encounteredIt = encountered.find(r.first); - check(encounteredIt != encountered.end(), line, "missing required element <" + r.first + ">"); + auto encounteredIt = encountered.find( r.first ); + check( encounteredIt != encountered.end(), line, "missing required element <" + r.first + ">" ); // check: r.second (means: required excactly once) => (encouteredIt->second == 1) - check(!r.second || (encounteredIt->second == 1), line, "required element <" + r.first + "> is supposed to be listed exactly once, but is listed " + std::to_string(encounteredIt->second)); + check( !r.second || ( encounteredIt->second == 1 ), + line, + "required element <" + r.first + "> is supposed to be listed exactly once, but is listed " + + std::to_string( encounteredIt->second ) ); } } -void cleanup(std::string &str) +void cleanup( std::string & str ) { - std::map replacements = + std::map replacements = { { "\n\n\n", "\n\n" }, + { "{\n\n", "{\n" }, + { "\n\n }", "\n }" } }; + for ( auto const & repl : replacements ) { - { "\n\n\n", "\n\n" }, - { "{\n\n", "{\n" }, - { "\n\n }", "\n }" } - }; - for (auto const& repl : replacements) - { - std::string::size_type pos = str.find(repl.first); - while (pos != std::string::npos) + std::string::size_type pos = str.find( repl.first ); + while ( pos != std::string::npos ) { - str.replace(pos, repl.first.length(), repl.second); - pos = str.find(repl.first, pos); + str.replace( pos, repl.first.length(), repl.second ); + pos = str.find( repl.first, pos ); } } } -std::string constructArraySize(std::vector const& sizes) +std::string constructArraySize( std::vector const & sizes ) { std::string arraySize; - for (auto const& s : sizes) + for ( auto const & s : sizes ) { arraySize += s + " * "; } - return arraySize.substr(0, arraySize.length() - 3); + return arraySize.substr( 0, arraySize.length() - 3 ); } -std::string constructCArraySizes(std::vector const& sizes) +std::string constructCArraySizes( std::vector const & sizes ) { std::string arraySizes; - for (auto const& s : sizes) + for ( auto const & s : sizes ) { arraySizes += "[" + s + "]"; } return arraySizes; } -std::string constructStandardArray(std::string const& type, std::vector const& sizes) +std::string constructStandardArray( std::string const & type, std::vector const & sizes ) { std::string arrayString = "std::array<" + type + "," + sizes.back() + ">"; - for (size_t i = sizes.size() - 2; i < sizes.size(); i--) + for ( size_t i = sizes.size() - 2; i < sizes.size(); i-- ) { arrayString = "std::array<" + arrayString + "," + sizes[i] + ">"; } return arrayString; } -std::string constructStandardArrayWrapper(std::string const& type, std::vector const& sizes) +std::string constructStandardArrayWrapper( std::string const & type, std::vector const & sizes ) { - std::string arrayString = "VULKAN_HPP_NAMESPACE::ArrayWrapper" + std::to_string(sizes.size()) + "D<" + type; - for (auto const& size : sizes) + std::string arrayString = "VULKAN_HPP_NAMESPACE::ArrayWrapper" + std::to_string( sizes.size() ) + "D<" + type; + for ( auto const & size : sizes ) { arrayString += ", " + size; } @@ -281,148 +339,160 @@ std::string constructStandardArrayWrapper(std::string const& type, std::vector const& tags) +std::string createSuccessCode( std::string const & code, std::set const & tags ) { - std::string tag = findTag(tags, code); + std::string tag = findTag( tags, code ); // on each success code: prepend 'e', strip "VK_" and a tag, convert it to camel case, and add the tag again - return "e" + toCamelCase(stripPostfix(stripPrefix(code, "VK_"), tag)) + tag; + return "e" + toCamelCase( stripPostfix( stripPrefix( code, "VK_" ), tag ) ) + tag; } -std::string determineCommandName(std::string const& vulkanCommandName, std::string const& firstArgumentType) +std::string determineCommandName( std::string const & vulkanCommandName, std::string const & firstArgumentType ) { - std::string commandName(startLowerCase(stripPrefix(vulkanCommandName, "vk"))); - std::string searchName = stripPrefix(firstArgumentType, "Vk"); - size_t pos = commandName.find(searchName); - if ((pos == std::string::npos) && isupper(searchName[0])) + std::string commandName( startLowerCase( stripPrefix( vulkanCommandName, "vk" ) ) ); + std::string searchName = stripPrefix( firstArgumentType, "Vk" ); + size_t pos = commandName.find( searchName ); + if ( ( pos == std::string::npos ) && isupper( searchName[0] ) ) { - searchName[0] = static_cast(tolower(searchName[0])); - pos = commandName.find(searchName); + searchName[0] = static_cast( tolower( searchName[0] ) ); + pos = commandName.find( searchName ); } - if (pos != std::string::npos) + if ( pos != std::string::npos ) { - commandName.erase(pos, searchName.length()); + commandName.erase( pos, searchName.length() ); } - else if ((searchName == "commandBuffer") && beginsWith(commandName, "cmd")) + else if ( ( searchName == "commandBuffer" ) && beginsWith( commandName, "cmd" ) ) { - commandName.erase(0, 3); + commandName.erase( 0, 3 ); pos = 0; } - if ((pos == 0) && isupper(commandName[0])) + if ( ( pos == 0 ) && isupper( commandName[0] ) ) { - commandName[0] = static_cast(tolower(commandName[0])); + commandName[0] = static_cast( tolower( commandName[0] ) ); } return commandName; } -std::set determineSkippedParams(size_t returnParamIndex, std::map const& vectorParamIndices) +std::set determineSkippedParams( size_t returnParamIndex, std::map const & vectorParamIndices ) { std::set skippedParams; // the size-parameters of vector parameters are not explicitly used in the enhanced API - std::for_each(vectorParamIndices.begin(), vectorParamIndices.end(), [&skippedParams](std::pair const& vp) { if (vp.second != INVALID_INDEX) skippedParams.insert(vp.second); }); + std::for_each( + vectorParamIndices.begin(), vectorParamIndices.end(), [&skippedParams]( std::pair const & vp ) { + if ( vp.second != INVALID_INDEX ) + skippedParams.insert( vp.second ); + } ); // and the return parameter is also skipped - if (returnParamIndex != INVALID_INDEX) + if ( returnParamIndex != INVALID_INDEX ) { - skippedParams.insert(returnParamIndex); + skippedParams.insert( returnParamIndex ); } return skippedParams; } -bool determineStructureChaining(std::string const& structType, std::set const& extendedStructs, std::map const& structureAliases) +bool determineStructureChaining( std::string const & structType, + std::set const & extendedStructs, + std::map const & structureAliases ) { - bool isStructureChained = (extendedStructs.find(structType) != extendedStructs.end()); - if (!isStructureChained) + bool isStructureChained = ( extendedStructs.find( structType ) != extendedStructs.end() ); + if ( !isStructureChained ) { - auto aliasIt = structureAliases.find(structType); - if ((aliasIt != structureAliases.end())) + auto aliasIt = structureAliases.find( structType ); + if ( ( aliasIt != structureAliases.end() ) ) { - isStructureChained = (extendedStructs.find(aliasIt->second) != extendedStructs.end()); + isStructureChained = ( extendedStructs.find( aliasIt->second ) != extendedStructs.end() ); } } return isStructureChained; } -std::string findTag(std::set const& tags, std::string const& name, std::string const& postfix) +std::string findTag( std::set const & tags, std::string const & name, std::string const & postfix ) { - auto tagIt = std::find_if(tags.begin(), tags.end(), [&name, &postfix](std::string const& t) { return endsWith(name, t + postfix); }); - return (tagIt != tags.end()) ? *tagIt : ""; + auto tagIt = std::find_if( + tags.begin(), tags.end(), [&name, &postfix]( std::string const & t ) { return endsWith( name, t + postfix ); } ); + return ( tagIt != tags.end() ) ? *tagIt : ""; } -std::pair generateFunctionBodyStandardReturn(std::string const& returnType) +std::pair generateFunctionBodyStandardReturn( std::string const & returnType ) { - bool castReturn = false; + bool castReturn = false; std::string ret; - if (returnType != "void") + if ( returnType != "void" ) { // there's something to return... ret = "return "; - castReturn = beginsWith(returnType, "Vk"); - if (castReturn) + castReturn = beginsWith( returnType, "Vk" ); + if ( castReturn ) { // the return-type is a vulkan type -> need to cast to VULKAN_HPP_NAMESPACE-type - ret += "static_cast<" + stripPrefix(returnType, "Vk") + ">( "; + ret += "static_cast<" + stripPrefix( returnType, "Vk" ) + ">( "; } } - return std::make_pair(castReturn, ret); + return std::make_pair( castReturn, ret ); } -std::map getAttributes(tinyxml2::XMLElement const* element) +std::map getAttributes( tinyxml2::XMLElement const * element ) { std::map attributes; - for (auto attribute = element->FirstAttribute(); attribute; attribute = attribute->Next()) + for ( auto attribute = element->FirstAttribute(); attribute; attribute = attribute->Next() ) { - assert(attributes.find(attribute->Name()) == attributes.end()); + assert( attributes.find( attribute->Name() ) == attributes.end() ); attributes[attribute->Name()] = attribute->Value(); } return attributes; } template -std::vector getChildElements(ElementContainer const* element) +std::vector getChildElements( ElementContainer const * element ) { - std::vector childElements; - for (tinyxml2::XMLElement const* childElement = element->FirstChildElement(); childElement; childElement = childElement->NextSiblingElement()) + std::vector childElements; + for ( tinyxml2::XMLElement const * childElement = element->FirstChildElement(); childElement; + childElement = childElement->NextSiblingElement() ) { - childElements.push_back(childElement); + childElements.push_back( childElement ); } return childElements; } -std::string getEnumPostfix(std::string const& name, std::set const& tags, std::string & prefix) +std::string getEnumPostfix( std::string const & name, std::set const & tags, std::string & prefix ) { std::string postfix; - if (name != "VkResult") + if ( name != "VkResult" ) { // if the enum name contains a tag move it from the prefix to the postfix to generate correct enum value names. - for (auto const& tag : tags) + for ( auto const & tag : tags ) { - if (endsWith(prefix, tag + "_")) + if ( endsWith( prefix, tag + "_" ) ) { - prefix.erase(prefix.length() - tag.length() - 1); + prefix.erase( prefix.length() - tag.length() - 1 ); postfix = "_" + tag; break; } - else if (endsWith(name, tag)) + else if ( endsWith( name, tag ) ) { postfix = "_" + tag; break; @@ -432,120 +502,122 @@ std::string getEnumPostfix(std::string const& name, std::set const& return postfix; } -std::string getEnumPrefix(int line, std::string const& name, bool bitmask) +std::string getEnumPrefix( int line, std::string const & name, bool bitmask ) { std::string prefix; - if (name == "VkResult") + if ( name == "VkResult" ) { prefix = "VK_"; } - else if (bitmask) + else if ( bitmask ) { // for a bitmask enum, start with "VK", cut off the trailing "FlagBits", and convert that name to upper case // end that with "Bit" - size_t pos = name.find("FlagBits"); - check(pos != std::string::npos, line, "bitmask <" + name + "> does not contain "); - prefix = toUpperCase(name.substr(0, pos)) + "_"; + size_t pos = name.find( "FlagBits" ); + check( pos != std::string::npos, line, "bitmask <" + name + "> does not contain " ); + prefix = toUpperCase( name.substr( 0, pos ) ) + "_"; } else { // for a non-bitmask enum, convert the name to upper case - prefix = toUpperCase(name) + "_"; + prefix = toUpperCase( name ) + "_"; } return prefix; } -std::string extractTag(int line, std::string const& name, std::set const& tags) +std::string extractTag( int line, std::string const & name, std::set const & tags ) { // extract the tag from the name, which is supposed to look like VK__ - size_t tagStart = name.find('_'); - check(tagStart != std::string::npos, line, "name <" + name + "> is missing an underscore '_'"); - size_t tagEnd = name.find('_', tagStart + 1); - check(tagEnd != std::string::npos, line, "name <" + name + "> is missing an underscore '_'"); - std::string tag = name.substr(tagStart + 1, tagEnd - tagStart - 1); - check(tags.find(tag) != tags.end(), line, "name <" + name + "> is using an unknown tag <" + tag + ">"); + size_t tagStart = name.find( '_' ); + check( tagStart != std::string::npos, line, "name <" + name + "> is missing an underscore '_'" ); + size_t tagEnd = name.find( '_', tagStart + 1 ); + check( tagEnd != std::string::npos, line, "name <" + name + "> is missing an underscore '_'" ); + std::string tag = name.substr( tagStart + 1, tagEnd - tagStart - 1 ); + check( tags.find( tag ) != tags.end(), line, "name <" + name + "> is using an unknown tag <" + tag + ">" ); return tag; } -std::pair, std::string> readModifiers(tinyxml2::XMLNode const* node) +std::pair, std::string> readModifiers( tinyxml2::XMLNode const * node ) { std::vector arraySizes; - std::string bitCount; - if (node && node->ToText()) + std::string bitCount; + if ( node && node->ToText() ) { // following the name there might be some array size std::string value = node->Value(); - assert(!value.empty()); - if (value[0] == '[') + assert( !value.empty() ); + if ( value[0] == '[' ) { std::string::size_type endPos = 0; - while (endPos + 1 != value.length()) + while ( endPos + 1 != value.length() ) { - std::string::size_type startPos = value.find('[', endPos); - check(startPos != std::string::npos, node->GetLineNum(), "could not find '[' in <" + value + ">"); - endPos = value.find(']', startPos); - check(endPos != std::string::npos, node->GetLineNum(), "could not find ']' in <" + value + ">"); - check(startPos + 2 <= endPos, node->GetLineNum(), "missing content between '[' and ']' in <" + value + ">"); - arraySizes.push_back(value.substr(startPos + 1, endPos - startPos - 1)); + std::string::size_type startPos = value.find( '[', endPos ); + check( startPos != std::string::npos, node->GetLineNum(), "could not find '[' in <" + value + ">" ); + endPos = value.find( ']', startPos ); + check( endPos != std::string::npos, node->GetLineNum(), "could not find ']' in <" + value + ">" ); + check( startPos + 2 <= endPos, node->GetLineNum(), "missing content between '[' and ']' in <" + value + ">" ); + arraySizes.push_back( value.substr( startPos + 1, endPos - startPos - 1 ) ); } } - else if (value[0] == ':') + else if ( value[0] == ':' ) { - bitCount = value.substr(1); + bitCount = value.substr( 1 ); } else { - check((value[0] == ';') || (value[0] == ')'), node->GetLineNum(), "unknown modifier <" + value + ">"); + check( ( value[0] == ';' ) || ( value[0] == ')' ), node->GetLineNum(), "unknown modifier <" + value + ">" ); } } - return std::make_pair(arraySizes, bitCount);; + return std::make_pair( arraySizes, bitCount ); + ; } -std::string readTypePostfix(tinyxml2::XMLNode const* node) +std::string readTypePostfix( tinyxml2::XMLNode const * node ) { std::string postfix; - if (node && node->ToText()) + if ( node && node->ToText() ) { - postfix = trimEnd(node->Value()); + postfix = trimEnd( node->Value() ); } return postfix; } -std::string readTypePrefix(tinyxml2::XMLNode const* node) +std::string readTypePrefix( tinyxml2::XMLNode const * node ) { std::string prefix; - if (node && node->ToText()) + if ( node && node->ToText() ) { - prefix = trim(node->Value()); + prefix = trim( node->Value() ); } return prefix; } -std::string replaceWithMap(std::string const &input, std::map replacements) +std::string replaceWithMap( std::string const & input, std::map replacements ) { // This will match ${someVariable} and contain someVariable in match group 1 - std::regex re(R"(\$\{([^\}]+)\})"); - auto it = std::sregex_iterator(input.begin(), input.end(), re); - auto end = std::sregex_iterator(); + std::regex re( R"(\$\{([^\}]+)\})" ); + auto it = std::sregex_iterator( input.begin(), input.end(), re ); + auto end = std::sregex_iterator(); // No match, just return the original string - if (it == end) + if ( it == end ) { return input; } std::string result = ""; - while (it != end) + while ( it != end ) { - std::smatch match = *it; - auto itReplacement = replacements.find(match[1].str()); - assert(itReplacement != replacements.end()); + std::smatch match = *it; + auto itReplacement = replacements.find( match[1].str() ); + assert( itReplacement != replacements.end() ); - result += match.prefix().str() + ((itReplacement != replacements.end()) ? itReplacement->second : match[0].str()); + result += + match.prefix().str() + ( ( itReplacement != replacements.end() ) ? itReplacement->second : match[0].str() ); ++it; // we've passed the last match. Append the rest of the orignal string - if (it == end) + if ( it == end ) { result += match.suffix().str(); } @@ -553,176 +625,182 @@ std::string replaceWithMap(std::string const &input, std::map(tolower(input[0])) + input.substr(1); + return input.empty() ? "" : static_cast( tolower( input[0] ) ) + input.substr( 1 ); } -std::string startUpperCase(std::string const& input) +std::string startUpperCase( std::string const & input ) { - return input.empty() ? "" : static_cast(toupper(input[0])) + input.substr(1); + return input.empty() ? "" : static_cast( toupper( input[0] ) ) + input.substr( 1 ); } -std::string stripPostfix(std::string const& value, std::string const& postfix) +std::string stripPostfix( std::string const & value, std::string const & postfix ) { std::string strippedValue = value; - if (endsWith(strippedValue, postfix)) + if ( endsWith( strippedValue, postfix ) ) { - strippedValue.erase(strippedValue.length() - postfix.length()); + strippedValue.erase( strippedValue.length() - postfix.length() ); } return strippedValue; } -std::string stripPluralS(std::string const& name) +std::string stripPluralS( std::string const & name ) { - std::string strippedName(name); - size_t pos = strippedName.rfind('s'); - assert(pos != std::string::npos); - strippedName.erase(pos, 1); + std::string strippedName( name ); + size_t pos = strippedName.rfind( 's' ); + assert( pos != std::string::npos ); + strippedName.erase( pos, 1 ); return strippedName; } -std::string stripPrefix(std::string const& value, std::string const& prefix) +std::string stripPrefix( std::string const & value, std::string const & prefix ) { std::string strippedValue = value; - if (beginsWith(strippedValue, prefix)) + if ( beginsWith( strippedValue, prefix ) ) { - strippedValue.erase(0, prefix.length()); + strippedValue.erase( 0, prefix.length() ); } return strippedValue; } -std::string toCamelCase(std::string const& value) +std::string toCamelCase( std::string const & value ) { - assert(!value.empty() && (isupper(value[0]) || isdigit(value[0]))); + assert( !value.empty() && ( isupper( value[0] ) || isdigit( value[0] ) ) ); std::string result; - result.reserve(value.size()); + result.reserve( value.size() ); bool keepUpper = true; - for (auto c : value) + for ( auto c : value ) { - if (c == '_') + if ( c == '_' ) { keepUpper = true; } - else if (isdigit(c)) + else if ( isdigit( c ) ) { keepUpper = true; - result.push_back(c); + result.push_back( c ); } - else if (keepUpper) + else if ( keepUpper ) { - result.push_back(c); + result.push_back( c ); keepUpper = false; } else { - result.push_back(static_cast(tolower(c))); + result.push_back( static_cast( tolower( c ) ) ); } } return result; } -std::string toUpperCase(std::string const& name) +std::string toUpperCase( std::string const & name ) { std::string convertedName; - convertedName.reserve(name.size()); + convertedName.reserve( name.size() ); bool lowerOrDigit = false; - for (auto c : name) + for ( auto c : name ) { - if (islower(c) || isdigit(c)) + if ( islower( c ) || isdigit( c ) ) { lowerOrDigit = true; } - else if (lowerOrDigit) + else if ( lowerOrDigit ) { - convertedName.push_back('_'); + convertedName.push_back( '_' ); lowerOrDigit = false; } - convertedName.push_back(static_cast(toupper(c))); + convertedName.push_back( static_cast( toupper( c ) ) ); } return convertedName; } -std::vector tokenize(std::string const& tokenString, std::string const& separator) +std::vector tokenize( std::string const & tokenString, std::string const & separator ) { std::vector tokens; - size_t start = 0, end; + size_t start = 0, end; do { - end = tokenString.find(separator, start); - if (start != end) + end = tokenString.find( separator, start ); + if ( start != end ) { - tokens.push_back(tokenString.substr(start, end - start)); + tokens.push_back( tokenString.substr( start, end - start ) ); } start = end + separator.length(); - } while (end != std::string::npos); + } while ( end != std::string::npos ); return tokens; } -std::string trim(std::string const& input) +std::string trim( std::string const & input ) { std::string result = input; - result.erase(result.begin(), std::find_if(result.begin(), result.end(), [](char c) { return !std::isspace(c); })); - result.erase(std::find_if(result.rbegin(), result.rend(), [](char c) { return !std::isspace(c); }).base(), result.end()); + result.erase( result.begin(), + std::find_if( result.begin(), result.end(), []( char c ) { return !std::isspace( c ); } ) ); + result.erase( std::find_if( result.rbegin(), result.rend(), []( char c ) { return !std::isspace( c ); } ).base(), + result.end() ); return result; } -std::string trimEnd(std::string const& input) +std::string trimEnd( std::string const & input ) { std::string result = input; - result.erase(std::find_if(result.rbegin(), result.rend(), [](char c) { return !std::isspace(c); }).base(), result.end()); + result.erase( std::find_if( result.rbegin(), result.rend(), []( char c ) { return !std::isspace( c ); } ).base(), + result.end() ); return result; } -void warn(bool condition, int line, std::string const& message) +void warn( bool condition, int line, std::string const & message ) { - if (!condition) + if ( !condition ) { - std::cerr << "Spec warning on line " << std::to_string(line) << " " << message << "!" << std::endl; + std::cerr << "Spec warning on line " << std::to_string( line ) << " " << message << "!" << std::endl; } } -VulkanHppGenerator::VulkanHppGenerator(tinyxml2::XMLDocument const& document) +VulkanHppGenerator::VulkanHppGenerator( tinyxml2::XMLDocument const & document ) { - m_handles.insert(std::make_pair("", HandleData({}, 0))); // insert the default "handle" without class (for createInstance, and such) + m_handles.insert( std::make_pair( + "", HandleData( {}, 0 ) ) ); // insert the default "handle" without class (for createInstance, and such) - int line = document.GetLineNum(); - std::vector elements = getChildElements(&document); - checkElements(line, elements, { { "registry", true } }); - check(elements.size() == 1, line, "encountered " + std::to_string(elements.size()) + " elments named but only one is allowed"); - readRegistry(elements[0]); + int line = document.GetLineNum(); + std::vector elements = getChildElements( &document ); + checkElements( line, elements, { { "registry", true } } ); + check( elements.size() == 1, + line, + "encountered " + std::to_string( elements.size() ) + " elments named but only one is allowed" ); + readRegistry( elements[0] ); checkCorrectness(); } -void VulkanHppGenerator::appendArgumentPlainType(std::string & str, ParamData const& paramData) const +void VulkanHppGenerator::appendArgumentPlainType( std::string & str, ParamData const & paramData ) const { // this parameter is just a plain type - if (!paramData.type.postfix.empty()) + if ( !paramData.type.postfix.empty() ) { - assert(paramData.type.postfix.back() == '*'); + assert( paramData.type.postfix.back() == '*' ); // it's a pointer - std::string parameterName = startLowerCase(stripPrefix(paramData.name, "p")); - if (paramData.type.prefix.find("const") != std::string::npos) + std::string parameterName = startLowerCase( stripPrefix( paramData.name, "p" ) ); + if ( paramData.type.prefix.find( "const" ) != std::string::npos ) { // it's a const pointer - if (paramData.type.type == "char") + if ( paramData.type.type == "char" ) { // it's a const pointer to char -> it's a string -> get the data via c_str() - str += parameterName + (paramData.optional ? (" ? " + parameterName + "->c_str() : nullptr") : ".c_str()"); + str += parameterName + ( paramData.optional ? ( " ? " + parameterName + "->c_str() : nullptr" ) : ".c_str()" ); } else { // it's const pointer to something else -> just use the name - assert(!paramData.optional); + assert( !paramData.optional ); str += paramData.name; } } else { // it's a non-const pointer, and char is the only type that occurs -> use the address of the parameter - assert(paramData.type.type.find("char") == std::string::npos); + assert( paramData.type.type.find( "char" ) == std::string::npos ); str += "&" + parameterName; } } @@ -733,68 +811,103 @@ void VulkanHppGenerator::appendArgumentPlainType(std::string & str, ParamData co } } -void VulkanHppGenerator::appendArguments(std::string & str, CommandData const& commandData, size_t returnParamIndex, size_t templateParamIndex, std::map const& vectorParamIndices, bool twoStep, bool firstCall, bool singular, size_t from, size_t to) const +void VulkanHppGenerator::appendArguments( std::string & str, + CommandData const & commandData, + size_t returnParamIndex, + size_t templateParamIndex, + std::map const & vectorParamIndices, + bool twoStep, + bool firstCall, + bool singular, + size_t from, + size_t to ) const { - assert(from <= to); + assert( from <= to ); bool encounteredArgument = false; - for (size_t i = from; i < to; i++) + for ( size_t i = from; i < to; i++ ) { - if (encounteredArgument) + if ( encounteredArgument ) { str += ", "; } - auto it = vectorParamIndices.find(i); - if (it != vectorParamIndices.end()) + auto it = vectorParamIndices.find( i ); + if ( it != vectorParamIndices.end() ) { - appendArgumentVector(str, it->first, commandData.params[it->first], returnParamIndex, templateParamIndex, twoStep, firstCall, singular); + appendArgumentVector( str, + it->first, + commandData.params[it->first], + returnParamIndex, + templateParamIndex, + twoStep, + firstCall, + singular ); } else { - it = find_if(vectorParamIndices.begin(), vectorParamIndices.end(), [i](std::pair const& vpi) { return vpi.second == i; }); - if (it != vectorParamIndices.end()) + it = find_if( vectorParamIndices.begin(), vectorParamIndices.end(), [i]( std::pair const & vpi ) { + return vpi.second == i; + } ); + if ( it != vectorParamIndices.end() ) { - appendArgumentCount(str, it->first, commandData.params[it->first].name, commandData.params[it->second].name, returnParamIndex, templateParamIndex, twoStep, singular); + appendArgumentCount( str, + it->first, + commandData.params[it->first].name, + commandData.params[it->second].name, + returnParamIndex, + templateParamIndex, + twoStep, + singular ); } - else if (beginsWith(commandData.params[i].type.type, "Vk")) + else if ( beginsWith( commandData.params[i].type.type, "Vk" ) ) { - appendArgumentVulkanType(str, commandData.params[i]); + appendArgumentVulkanType( str, commandData.params[i] ); } else { - appendArgumentPlainType(str, commandData.params[i]); + appendArgumentPlainType( str, commandData.params[i] ); } } encounteredArgument = true; } } -void VulkanHppGenerator::appendArgumentVector(std::string & str, size_t paramIndex, ParamData const& paramData, size_t returnParamIndex, size_t templateParamIndex, bool twoStep, bool firstCall, bool singular) const +void VulkanHppGenerator::appendArgumentVector( std::string & str, + size_t paramIndex, + ParamData const & paramData, + size_t returnParamIndex, + size_t templateParamIndex, + bool twoStep, + bool firstCall, + bool singular ) const { // this parameter is a vector parameter - assert(paramData.type.postfix.back() == '*'); - if ((returnParamIndex == paramIndex) && twoStep && firstCall) + assert( paramData.type.postfix.back() == '*' ); + if ( ( returnParamIndex == paramIndex ) && twoStep && firstCall ) { // this parameter is the return parameter, and it's the first call of a two-step algorithm -> just just nullptr str += "nullptr"; } else { - std::string parameterName = startLowerCase(stripPrefix(paramData.name, "p")); - if (beginsWith(paramData.type.type, "Vk") || (paramIndex == templateParamIndex)) + std::string parameterName = startLowerCase( stripPrefix( paramData.name, "p" ) ); + if ( beginsWith( paramData.type.type, "Vk" ) || ( paramIndex == templateParamIndex ) ) { // CHECK for !commandData.params[it->first].optional // this parameter is a vulkan type or a templated type -> need to reinterpret cast - appendReinterpretCast(str, paramData.type.prefix.find("const") == 0, paramData.type.type, paramData.type.postfix.rfind("* const") != std::string::npos); - str += "( " + (singular ? ("&" + stripPluralS(parameterName)) : (parameterName + ".data()")) + " )"; + appendReinterpretCast( str, + paramData.type.prefix.find( "const" ) == 0, + paramData.type.type, + paramData.type.postfix.rfind( "* const" ) != std::string::npos ); + str += "( " + ( singular ? ( "&" + stripPluralS( parameterName ) ) : ( parameterName + ".data()" ) ) + " )"; } - else if (paramData.type.type == "char") + else if ( paramData.type.type == "char" ) { // the parameter is a vector to char -> it might be optional // besides that, the parameter now is a std::string -> get the pointer via c_str() - str += parameterName + (paramData.optional ? (" ? " + parameterName + "->c_str() : nullptr") : ".c_str()"); + str += parameterName + ( paramData.optional ? ( " ? " + parameterName + "->c_str() : nullptr" ) : ".c_str()" ); } else { @@ -804,25 +917,27 @@ void VulkanHppGenerator::appendArgumentVector(std::string & str, size_t paramInd } } -void VulkanHppGenerator::appendArgumentVulkanType(std::string & str, ParamData const& paramData) const +void VulkanHppGenerator::appendArgumentVulkanType( std::string & str, ParamData const & paramData ) const { // this parameter is a vulkan type - if (!paramData.type.postfix.empty() || !paramData.arraySizes.empty()) + if ( !paramData.type.postfix.empty() || !paramData.arraySizes.empty() ) { - assert((paramData.type.postfix.empty() || (paramData.type.postfix.back() == '*')) && (paramData.arraySizes.empty() || (paramData.arraySizes.size() == 1))); + assert( ( paramData.type.postfix.empty() || ( paramData.type.postfix.back() == '*' ) ) && + ( paramData.arraySizes.empty() || ( paramData.arraySizes.size() == 1 ) ) ); // it's a pointer -> needs a reinterpret cast to the vulkan type - std::string parameterName = startLowerCase(stripPrefix(paramData.name, "p")); - appendReinterpretCast(str, paramData.type.prefix.find("const") != std::string::npos, paramData.type.type, false); + std::string parameterName = startLowerCase( stripPrefix( paramData.name, "p" ) ); + appendReinterpretCast( + str, paramData.type.prefix.find( "const" ) != std::string::npos, paramData.type.type, false ); str += "( "; - if (paramData.optional) + if ( paramData.optional ) { // for an optional parameter, we need also a static_cast from optional type to const-pointer to pure type - str += "static_cast( " + parameterName + " )"; + str += "static_cast( " + parameterName + " )"; } else { // other parameters can just use the pointer - str += (paramData.arraySizes.empty() ? "&" : "") + parameterName; + str += ( paramData.arraySizes.empty() ? "&" : "" ) + parameterName; } str += " )"; } @@ -833,51 +948,66 @@ void VulkanHppGenerator::appendArgumentVulkanType(std::string & str, ParamData c } } -void VulkanHppGenerator::appendBaseTypes(std::string & str) const +void VulkanHppGenerator::appendBaseTypes( std::string & str ) const { - assert(!m_baseTypes.empty()); - for (auto const& baseType : m_baseTypes) + assert( !m_baseTypes.empty() ); + for ( auto const & baseType : m_baseTypes ) { - if ((baseType.first != "VkFlags") && (baseType.first != "VkFlags64")) // filter out VkFlags and VkFlags64, as they are mapped to our own Flags class + if ( ( baseType.first != "VkFlags" ) && + ( baseType.first != + "VkFlags64" ) ) // filter out VkFlags and VkFlags64, as they are mapped to our own Flags class { - str += " using " + stripPrefix(baseType.first, "Vk") + " = " + baseType.second.type + ";\n"; + str += " using " + stripPrefix( baseType.first, "Vk" ) + " = " + baseType.second.type + ";\n"; } } } -void VulkanHppGenerator::appendBitmasks(std::string & str) const +void VulkanHppGenerator::appendBitmasks( std::string & str ) const { - for (auto const& bitmask : m_bitmasks) + for ( auto const & bitmask : m_bitmasks ) { - auto bitmaskBits = m_enums.find(bitmask.second.requirements); - bool hasBits = (bitmaskBits != m_enums.end()); - check(bitmask.second.requirements.empty() || hasBits, bitmask.second.xmlLine, "bitmask <" + bitmask.first + "> references the undefined requires <" + bitmask.second.requirements + ">"); + auto bitmaskBits = m_enums.find( bitmask.second.requirements ); + bool hasBits = ( bitmaskBits != m_enums.end() ); + check( bitmask.second.requirements.empty() || hasBits, + bitmask.second.xmlLine, + "bitmask <" + bitmask.first + "> references the undefined requires <" + bitmask.second.requirements + ">" ); - std::string strippedBitmaskName = stripPrefix(bitmask.first, "Vk"); - std::string strippedEnumName = hasBits ? stripPrefix(bitmaskBits->first, "Vk") : ""; + std::string strippedBitmaskName = stripPrefix( bitmask.first, "Vk" ); + std::string strippedEnumName = hasBits ? stripPrefix( bitmaskBits->first, "Vk" ) : ""; str += "\n"; - appendPlatformEnter(str, !bitmask.second.alias.empty(), bitmask.second.platform); - appendBitmask(str, strippedBitmaskName, bitmask.second.type, bitmask.second.alias, strippedEnumName, hasBits ? bitmaskBits->second.values : std::vector()); - appendBitmaskToStringFunction(str, strippedBitmaskName, strippedEnumName, hasBits ? bitmaskBits->second.values : std::vector()); - appendPlatformLeave(str, !bitmask.second.alias.empty(), bitmask.second.platform); + appendPlatformEnter( str, !bitmask.second.alias.empty(), bitmask.second.platform ); + appendBitmask( str, + strippedBitmaskName, + bitmask.second.type, + bitmask.second.alias, + strippedEnumName, + hasBits ? bitmaskBits->second.values : std::vector() ); + appendBitmaskToStringFunction( + str, strippedBitmaskName, strippedEnumName, hasBits ? bitmaskBits->second.values : std::vector() ); + appendPlatformLeave( str, !bitmask.second.alias.empty(), bitmask.second.platform ); } } -void VulkanHppGenerator::appendBitmask(std::string & str, std::string const& bitmaskName, std::string const& bitmaskType, std::string const& bitmaskAlias, std::string const& enumName, std::vector const& enumValues) const +void VulkanHppGenerator::appendBitmask( std::string & str, + std::string const & bitmaskName, + std::string const & bitmaskType, + std::string const & bitmaskAlias, + std::string const & enumName, + std::vector const & enumValues ) const { // each Flags class is using the class 'Flags' with the corresponding FlagBits enum as the template parameter // if there's no enum for the FlagBits, introduce an artificial empty one std::string emptyEnumName; - if (enumName.empty()) + if ( enumName.empty() ) { emptyEnumName = bitmaskName; - size_t pos = emptyEnumName.rfind("Flags"); - assert(pos != std::string::npos); - emptyEnumName.replace(pos, 5, "FlagBits"); + size_t pos = emptyEnumName.rfind( "Flags" ); + assert( pos != std::string::npos ); + emptyEnumName.replace( pos, 5, "FlagBits" ); // if this emptyEnumName is not in the list of enums, list it here - if (m_enums.find("Vk" + emptyEnumName) == m_enums.end()) + if ( m_enums.find( "Vk" + emptyEnumName ) == m_enums.end() ) { const std::string templateString = R"x( enum class ${enumName} : ${bitmaskType} {}; @@ -888,19 +1018,21 @@ void VulkanHppGenerator::appendBitmask(std::string & str, std::string const& bit } )x"; - str += replaceWithMap(templateString, { { "enumName", emptyEnumName }, { "bitmaskType", bitmaskType } }); + str += replaceWithMap( templateString, { { "enumName", emptyEnumName }, { "bitmaskType", bitmaskType } } ); } } - std::string name = (enumName.empty() ? emptyEnumName : enumName); - str += "\n" - " using " + bitmaskName + " = Flags<" + name + ">;\n"; + std::string name = ( enumName.empty() ? emptyEnumName : enumName ); + str += + "\n" + " using " + + bitmaskName + " = Flags<" + name + ">;\n"; - if (!enumValues.empty()) + if ( !enumValues.empty() ) { std::string allFlags; - for (auto const& value : enumValues) + for ( auto const & value : enumValues ) { - if (!allFlags.empty()) + if ( !allFlags.empty() ) { allFlags += " | "; } @@ -937,143 +1069,314 @@ void VulkanHppGenerator::appendBitmask(std::string & str, std::string const& bit } )"; - str += replaceWithMap(bitmaskOperatorsTemplate, { { "bitmaskName", bitmaskName }, { "bitmaskType", bitmaskType }, { "enumName", enumName }, { "allFlags", allFlags } }); + str += replaceWithMap( bitmaskOperatorsTemplate, + { { "bitmaskName", bitmaskName }, + { "bitmaskType", bitmaskType }, + { "enumName", enumName }, + { "allFlags", allFlags } } ); } - if (!bitmaskAlias.empty()) + if ( !bitmaskAlias.empty() ) { - str += "\n" - " using " + stripPrefix(bitmaskAlias, "Vk") + " = " + bitmaskName + ";\n"; + str += + "\n" + " using " + + stripPrefix( bitmaskAlias, "Vk" ) + " = " + bitmaskName + ";\n"; } } -void VulkanHppGenerator::appendBitmaskToStringFunction(std::string & str, std::string const& bitmaskName, std::string const& enumName, std::vector const& enumValues) const +void VulkanHppGenerator::appendBitmaskToStringFunction( std::string & str, + std::string const & bitmaskName, + std::string const & enumName, + std::vector const & enumValues ) const { - str += "\n" - " VULKAN_HPP_INLINE std::string to_string( " + bitmaskName + (enumValues.empty() ? " " : " value ") + " )\n" + str += + "\n" + " VULKAN_HPP_INLINE std::string to_string( " + + bitmaskName + ( enumValues.empty() ? " " : " value " ) + + " )\n" " {\n"; - if (enumValues.empty()) + if ( enumValues.empty() ) { str += "\n return \"{}\";\n"; } else { // 'or' together all the bits in the value - str += "\n" + str += + "\n" " if ( !value ) return \"{}\";\n" " std::string result;\n"; - for (auto const& evd : enumValues) + for ( auto const & evd : enumValues ) { - if (evd.singleBit) + if ( evd.singleBit ) { - str += "\n" - " if ( value & " + enumName + "::" + evd.vkValue + " ) result += \"" + evd.vkValue.substr(1) + " | \";"; + str += + "\n" + " if ( value & " + + enumName + "::" + evd.vkValue + " ) result += \"" + evd.vkValue.substr( 1 ) + " | \";"; } } - str += "\n" + str += + "\n" " return \"{ \" + result.substr(0, result.size() - 3) + \" }\";\n"; } str += " }\n"; } -void VulkanHppGenerator::appendCall(std::string & str, std::string const& name, CommandData const& commandData, size_t returnParamIndex, size_t templateParamIndex, std::map const& vectorParamIndices, bool twoStep, bool firstCall, bool singular) const +void VulkanHppGenerator::appendCall( std::string & str, + std::string const & name, + CommandData const & commandData, + size_t returnParamIndex, + size_t templateParamIndex, + std::map const & vectorParamIndices, + bool twoStep, + bool firstCall, + bool singular ) const { // the original function call str += "d." + name + "( "; - assert(m_commandToHandle.find(name) != m_commandToHandle.end()); - std::string const& handle = m_commandToHandle.find(name)->second; - if (!handle.empty()) + assert( m_commandToHandle.find( name ) != m_commandToHandle.end() ); + std::string const & handle = m_commandToHandle.find( name )->second; + if ( !handle.empty() ) { // if it's member of a class -> the first argument is the member variable, starting with "m_" - assert(handle == commandData.params[0].type.type); - str += "m_" + startLowerCase(stripPrefix(handle, "Vk")); - if (1 < commandData.params.size()) + assert( handle == commandData.params[0].type.type ); + str += "m_" + startLowerCase( stripPrefix( handle, "Vk" ) ); + if ( 1 < commandData.params.size() ) { str += ", "; } } - appendArguments(str, commandData, returnParamIndex, templateParamIndex, vectorParamIndices, twoStep, firstCall, singular, handle.empty() ? 0 : 1, commandData.params.size()); + appendArguments( str, + commandData, + returnParamIndex, + templateParamIndex, + vectorParamIndices, + twoStep, + firstCall, + singular, + handle.empty() ? 0 : 1, + commandData.params.size() ); str += " )"; } -void VulkanHppGenerator::appendCommand(std::string & str, std::string const& indentation, std::string const& name, CommandData const& commandData, bool definition) const +void VulkanHppGenerator::appendCommand( std::string & str, + std::string const & indentation, + std::string const & name, + CommandData const & commandData, + bool definition ) const { - bool twoStep = isTwoStepAlgorithm(commandData.params); - std::map vectorParamIndices = determineVectorParamIndices(commandData.params); + bool twoStep = isTwoStepAlgorithm( commandData.params ); + std::map vectorParamIndices = determineVectorParamIndices( commandData.params ); - size_t returnParamIndex = determineReturnParamIndex(commandData, vectorParamIndices, twoStep); - bool isStructureChain = (returnParamIndex != INVALID_INDEX) && determineStructureChaining(commandData.params[returnParamIndex].type.type, m_extendedStructs, m_structureAliases); - std::string enhancedReturnType = determineEnhancedReturnType(commandData, returnParamIndex, vectorParamIndices, false); // get the enhanced return type without structureChain + size_t returnParamIndex = determineReturnParamIndex( commandData, vectorParamIndices, twoStep ); + bool isStructureChain = + ( returnParamIndex != INVALID_INDEX ) && + determineStructureChaining( commandData.params[returnParamIndex].type.type, m_extendedStructs, m_structureAliases ); + std::string enhancedReturnType = + determineEnhancedReturnType( commandData, + returnParamIndex, + vectorParamIndices, + false ); // get the enhanced return type without structureChain - size_t templateParamIndex = determineTemplateParamIndex(commandData.params, vectorParamIndices); + size_t templateParamIndex = determineTemplateParamIndex( commandData.params, vectorParamIndices ); // first create the standard version of the function std::string standard; - appendFunction(standard, indentation, name, commandData, returnParamIndex, templateParamIndex, vectorParamIndices, twoStep, enhancedReturnType, definition, false, false, false, false, false); + appendFunction( standard, + indentation, + name, + commandData, + returnParamIndex, + templateParamIndex, + vectorParamIndices, + twoStep, + enhancedReturnType, + definition, + false, + false, + false, + false, + false ); // then the enhanced version, composed by up to eight parts std::string enhanced; - appendFunction(enhanced, indentation, name, commandData, returnParamIndex, templateParamIndex, vectorParamIndices, twoStep, enhancedReturnType, definition, true, false, false, false, false); + appendFunction( enhanced, + indentation, + name, + commandData, + returnParamIndex, + templateParamIndex, + vectorParamIndices, + twoStep, + enhancedReturnType, + definition, + true, + false, + false, + false, + false ); - if (enhancedReturnType.find("Allocator") != std::string::npos) + if ( enhancedReturnType.find( "Allocator" ) != std::string::npos ) { - appendFunction(enhanced, indentation, name, commandData, returnParamIndex, templateParamIndex, vectorParamIndices, twoStep, enhancedReturnType, definition, true, false, false, false, true); + appendFunction( enhanced, + indentation, + name, + commandData, + returnParamIndex, + templateParamIndex, + vectorParamIndices, + twoStep, + enhancedReturnType, + definition, + true, + false, + false, + false, + true ); } - if (isStructureChain) + if ( isStructureChain ) { - std::string enhancedReturnTypeWithStructureChain = determineEnhancedReturnType(commandData, returnParamIndex, vectorParamIndices, true); - appendFunction(enhanced, indentation, name, commandData, returnParamIndex, templateParamIndex, vectorParamIndices, twoStep, enhancedReturnTypeWithStructureChain, definition, true, false, false, true, false); + std::string enhancedReturnTypeWithStructureChain = + determineEnhancedReturnType( commandData, returnParamIndex, vectorParamIndices, true ); + appendFunction( enhanced, + indentation, + name, + commandData, + returnParamIndex, + templateParamIndex, + vectorParamIndices, + twoStep, + enhancedReturnTypeWithStructureChain, + definition, + true, + false, + false, + true, + false ); - if (enhancedReturnTypeWithStructureChain.find("Allocator") != std::string::npos) + if ( enhancedReturnTypeWithStructureChain.find( "Allocator" ) != std::string::npos ) { - appendFunction(enhanced, indentation, name, commandData, returnParamIndex, templateParamIndex, vectorParamIndices, twoStep, enhancedReturnTypeWithStructureChain, definition, true, false, false, true, true); + appendFunction( enhanced, + indentation, + name, + commandData, + returnParamIndex, + templateParamIndex, + vectorParamIndices, + twoStep, + enhancedReturnTypeWithStructureChain, + definition, + true, + false, + false, + true, + true ); } } // then a singular version, if a sized vector would be returned - std::map::const_iterator returnVector = vectorParamIndices.find(returnParamIndex); - bool singular = (returnVector != vectorParamIndices.end()) && - (returnVector->second != INVALID_INDEX) && - (commandData.params[returnVector->first].type.type != "void") && - (commandData.params[returnVector->second].type.postfix.empty() || (commandData.params[returnVector->second].type.postfix.back() != '*')); - if (singular) + std::map::const_iterator returnVector = vectorParamIndices.find( returnParamIndex ); + bool singular = ( returnVector != vectorParamIndices.end() ) && ( returnVector->second != INVALID_INDEX ) && + ( commandData.params[returnVector->first].type.type != "void" ) && + ( commandData.params[returnVector->second].type.postfix.empty() || + ( commandData.params[returnVector->second].type.postfix.back() != '*' ) ); + if ( singular ) { - appendFunction(enhanced, indentation, name, commandData, returnParamIndex, templateParamIndex, vectorParamIndices, twoStep, enhancedReturnType, definition, true, true, false, false, false); + appendFunction( enhanced, + indentation, + name, + commandData, + returnParamIndex, + templateParamIndex, + vectorParamIndices, + twoStep, + enhancedReturnType, + definition, + true, + true, + false, + false, + false ); } // special handling for createDevice and createInstance ! - bool specialWriteUnique = (name == "vkCreateDevice") || (name == "vkCreateInstance"); + bool specialWriteUnique = ( name == "vkCreateDevice" ) || ( name == "vkCreateInstance" ); - // and then the same for the Unique* versions (a deleteCommand is available for the commandData's class, and the function starts with 'allocate' or 'create') - assert(m_commandToHandle.find(name) != m_commandToHandle.end()); - auto handleIt = m_handles.find(m_commandToHandle.find(name)->second); - assert(handleIt != m_handles.end()); - if ((!handleIt->second.deleteCommand.empty() || specialWriteUnique) - && ((name.substr(2, 8) == "Allocate") || (name.substr(2, 6) == "Create") - || ((name.substr(2, 8) == "Register") && (returnParamIndex + 1 == commandData.params.size())))) + // and then the same for the Unique* versions (a deleteCommand is available for the commandData's class, and the + // function starts with 'allocate' or 'create') + assert( m_commandToHandle.find( name ) != m_commandToHandle.end() ); + auto handleIt = m_handles.find( m_commandToHandle.find( name )->second ); + assert( handleIt != m_handles.end() ); + if ( ( !handleIt->second.deleteCommand.empty() || specialWriteUnique ) && + ( ( name.substr( 2, 8 ) == "Allocate" ) || ( name.substr( 2, 6 ) == "Create" ) || + ( ( name.substr( 2, 8 ) == "Register" ) && ( returnParamIndex + 1 == commandData.params.size() ) ) ) ) { enhanced += "#ifndef VULKAN_HPP_NO_SMART_HANDLE\n"; - appendFunction(enhanced, indentation, name, commandData, returnParamIndex, templateParamIndex, vectorParamIndices, twoStep, enhancedReturnType, definition, true, false, true, false, false); + appendFunction( enhanced, + indentation, + name, + commandData, + returnParamIndex, + templateParamIndex, + vectorParamIndices, + twoStep, + enhancedReturnType, + definition, + true, + false, + true, + false, + false ); - if (enhancedReturnType.find("Allocator") != std::string::npos) + if ( enhancedReturnType.find( "Allocator" ) != std::string::npos ) { - appendFunction(enhanced, indentation, name, commandData, returnParamIndex, templateParamIndex, vectorParamIndices, twoStep, enhancedReturnType, definition, true, false, true, false, true); + appendFunction( enhanced, + indentation, + name, + commandData, + returnParamIndex, + templateParamIndex, + vectorParamIndices, + twoStep, + enhancedReturnType, + definition, + true, + false, + true, + false, + true ); } - if (singular) + if ( singular ) { - appendFunction(enhanced, indentation, name, commandData, returnParamIndex, templateParamIndex, vectorParamIndices, twoStep, enhancedReturnType, definition, true, true, true, false, false); + appendFunction( enhanced, + indentation, + name, + commandData, + returnParamIndex, + templateParamIndex, + vectorParamIndices, + twoStep, + enhancedReturnType, + definition, + true, + true, + true, + false, + false ); } enhanced += "#endif /*VULKAN_HPP_NO_SMART_HANDLE*/\n"; } // and append one or both of them - if (standard == enhanced) + if ( standard == enhanced ) { // standard and enhanced string are equal -> just use one of them and we're done str += standard; @@ -1083,22 +1386,21 @@ void VulkanHppGenerator::appendCommand(std::string & str, std::string const& ind // standard and enhanced string differ -> use both, wrapping the enhanced by !VULKAN_HPP_DISABLE_ENHANCED_MODE // determine the argument list of that standard, and compare it with that of the enhanced // if they are equal -> need to have just one; if they differ -> need to have both - size_t standardStart = standard.find('('); - size_t standardCount = standard.find(')', standardStart) - standardStart; - size_t enhancedStart = enhanced.find('('); - bool unchangedInterface = (standard.substr(standardStart, standardCount) == enhanced.substr(enhancedStart, standardCount)); - if (unchangedInterface) + size_t standardStart = standard.find( '(' ); + size_t standardCount = standard.find( ')', standardStart ) - standardStart; + size_t enhancedStart = enhanced.find( '(' ); + bool unchangedInterface = + ( standard.substr( standardStart, standardCount ) == enhanced.substr( enhancedStart, standardCount ) ); + if ( unchangedInterface ) { str += "#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE\n"; } - str += standard - + (unchangedInterface ? "#else" : "#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE") + "\n" - + enhanced - + "#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/\n"; + str += standard + ( unchangedInterface ? "#else" : "#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE" ) + "\n" + enhanced + + "#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/\n"; } } -void VulkanHppGenerator::appendDispatchLoaderDynamic(std::string & str) +void VulkanHppGenerator::appendDispatchLoaderDynamic( std::string & str ) { str += R"( #if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL @@ -1192,18 +1494,18 @@ void VulkanHppGenerator::appendDispatchLoaderDynamic(std::string & str) public: )"; - for (auto const& handle : m_handles) + for ( auto const & handle : m_handles ) { - for (auto const& command : handle.second.commands) + for ( auto const & command : handle.second.commands ) { std::string enter, leave; - appendPlatformEnter(enter, !command.second.aliases.empty(), command.second.platform); - appendPlatformLeave(leave, !command.second.aliases.empty(), command.second.platform); + appendPlatformEnter( enter, !command.second.aliases.empty(), command.second.platform ); + appendPlatformLeave( leave, !command.second.aliases.empty(), command.second.platform ); str += enter + " PFN_" + command.first + " " + command.first + " = 0;\n" + leave; - for (auto const& alias : command.second.aliases) + for ( auto const & alias : command.second.aliases ) { - assert(enter.empty() && leave.empty()); + assert( enter.empty() && leave.empty() ); str += " PFN_" + alias + " " + alias + " = 0;\n"; } } @@ -1213,53 +1515,60 @@ void VulkanHppGenerator::appendDispatchLoaderDynamic(std::string & str) std::string strDeviceFunctions; std::string strDeviceFunctionsInstance; std::string strInstanceFunctions; - for (auto const& handle : m_handles) + for ( auto const & handle : m_handles ) { - for (auto const& command : handle.second.commands) + for ( auto const & command : handle.second.commands ) { - if ((command.first != "vkGetInstanceProcAddr")) + if ( ( command.first != "vkGetInstanceProcAddr" ) ) { std::string enter, leave; - appendPlatformEnter(enter, !command.second.aliases.empty(), command.second.platform); - appendPlatformLeave(leave, !command.second.aliases.empty(), command.second.platform); + appendPlatformEnter( enter, !command.second.aliases.empty(), command.second.platform ); + appendPlatformLeave( leave, !command.second.aliases.empty(), command.second.platform ); - if (handle.first.empty()) + if ( handle.first.empty() ) { - assert(command.second.aliases.empty()); + assert( command.second.aliases.empty() ); emptyFunctions += enter; - emptyFunctions += " " + command.first + " = PFN_" + command.first + "( vkGetInstanceProcAddr( NULL, \"" + command.first + "\" ) );\n"; + emptyFunctions += " " + command.first + " = PFN_" + command.first + "( vkGetInstanceProcAddr( NULL, \"" + + command.first + "\" ) );\n"; emptyFunctions += leave; } - else if (!command.second.params.empty() - && m_handles.find(command.second.params[0].type.type) != m_handles.end() - && command.second.params[0].type.type != "VkInstance" - && command.second.params[0].type.type != "VkPhysicalDevice") + else if ( !command.second.params.empty() && + m_handles.find( command.second.params[0].type.type ) != m_handles.end() && + command.second.params[0].type.type != "VkInstance" && + command.second.params[0].type.type != "VkPhysicalDevice" ) { strDeviceFunctions += enter; - strDeviceFunctions += " " + command.first + " = PFN_" + command.first + "( vkGetDeviceProcAddr( device, \"" + command.first + "\" ) );\n"; + strDeviceFunctions += " " + command.first + " = PFN_" + command.first + + "( vkGetDeviceProcAddr( device, \"" + command.first + "\" ) );\n"; strDeviceFunctions += leave; strDeviceFunctionsInstance += enter; - strDeviceFunctionsInstance += " " + command.first + " = PFN_" + command.first + "( vkGetInstanceProcAddr( instance, \"" + command.first + "\" ) );\n"; + strDeviceFunctionsInstance += " " + command.first + " = PFN_" + command.first + + "( vkGetInstanceProcAddr( instance, \"" + command.first + "\" ) );\n"; strDeviceFunctionsInstance += leave; - for (auto const& alias : command.second.aliases) + for ( auto const & alias : command.second.aliases ) { - assert(enter.empty() && leave.empty()); - strDeviceFunctions += " " + alias + " = PFN_" + alias + "( vkGetDeviceProcAddr( device, \"" + alias + "\" ) );\n"; - strDeviceFunctionsInstance += " " + alias + " = PFN_" + alias + "( vkGetInstanceProcAddr( instance, \"" + alias + "\" ) );\n"; + assert( enter.empty() && leave.empty() ); + strDeviceFunctions += + " " + alias + " = PFN_" + alias + "( vkGetDeviceProcAddr( device, \"" + alias + "\" ) );\n"; + strDeviceFunctionsInstance += + " " + alias + " = PFN_" + alias + "( vkGetInstanceProcAddr( instance, \"" + alias + "\" ) );\n"; } } else { strInstanceFunctions += enter; - strInstanceFunctions += " " + command.first + " = PFN_" + command.first + "( vkGetInstanceProcAddr( instance, \"" + command.first + "\" ) );\n"; + strInstanceFunctions += " " + command.first + " = PFN_" + command.first + + "( vkGetInstanceProcAddr( instance, \"" + command.first + "\" ) );\n"; strInstanceFunctions += leave; - for (auto const& alias : command.second.aliases) + for ( auto const & alias : command.second.aliases ) { - assert(enter.empty() && leave.empty()); - strInstanceFunctions += " " + alias + " = PFN_" + alias + "( vkGetInstanceProcAddr( instance, \"" + alias + "\" ) );\n"; + assert( enter.empty() && leave.empty() ); + strInstanceFunctions += + " " + alias + " = PFN_" + alias + "( vkGetInstanceProcAddr( instance, \"" + alias + "\" ) );\n"; } } } @@ -1344,7 +1653,7 @@ void VulkanHppGenerator::appendDispatchLoaderDynamic(std::string & str) )"; } -void VulkanHppGenerator::appendDispatchLoaderStatic(std::string & str) +void VulkanHppGenerator::appendDispatchLoaderStatic( std::string & str ) { str += R"( #if !defined(VK_NO_PROTOTYPES) @@ -1352,39 +1661,48 @@ void VulkanHppGenerator::appendDispatchLoaderStatic(std::string & str) { public:)"; - for (auto const& handle : m_handles) + for ( auto const & handle : m_handles ) { - for (auto const& command : handle.second.commands) + for ( auto const & command : handle.second.commands ) { std::string parameterList, parameters; - bool firstParam = true; - for (auto param : command.second.params) + bool firstParam = true; + for ( auto param : command.second.params ) { - if (!firstParam) + if ( !firstParam ) { parameterList += ", "; parameters += ", "; } - parameterList += param.type.prefix + (param.type.prefix.empty() ? "" : " ") + param.type.type + param.type.postfix + " " + param.name + constructCArraySizes(param.arraySizes); + parameterList += param.type.prefix + ( param.type.prefix.empty() ? "" : " " ) + param.type.type + + param.type.postfix + " " + param.name + constructCArraySizes( param.arraySizes ); parameters += param.name; firstParam = false; } - std::string commandName = stripPrefix(command.first, "vk"); + std::string commandName = stripPrefix( command.first, "vk" ); str += "\n"; - appendPlatformEnter(str, !command.second.aliases.empty(), command.second.platform); - str += " " + command.second.returnType + " vk" + commandName + "( " + parameterList + " ) const VULKAN_HPP_NOEXCEPT\n" - " {\n" - " return ::vk" + commandName + "( " + parameters + " );\n" - " }\n"; - appendPlatformLeave(str, !command.second.aliases.empty(), command.second.platform); - for (auto const& alias : command.second.aliases) + appendPlatformEnter( str, !command.second.aliases.empty(), command.second.platform ); + str += " " + command.second.returnType + " vk" + commandName + "( " + parameterList + + " ) const VULKAN_HPP_NOEXCEPT\n" + " {\n" + " return ::vk" + + commandName + "( " + parameters + + " );\n" + " }\n"; + appendPlatformLeave( str, !command.second.aliases.empty(), command.second.platform ); + for ( auto const & alias : command.second.aliases ) { - commandName = stripPrefix(alias, "vk"); - str += "\n" - " " + command.second.returnType + " vk" + commandName + "( " + parameterList + " ) const VULKAN_HPP_NOEXCEPT\n" + commandName = stripPrefix( alias, "vk" ); + str += + "\n" + " " + + command.second.returnType + " vk" + commandName + "( " + parameterList + + " ) const VULKAN_HPP_NOEXCEPT\n" " {\n" - " return ::vk" + commandName + "( " + parameters + " );\n" + " return ::vk" + + commandName + "( " + parameters + + " );\n" " }\n"; } } @@ -1392,9 +1710,10 @@ void VulkanHppGenerator::appendDispatchLoaderStatic(std::string & str) str += " };\n#endif\n"; } -void VulkanHppGenerator::appendDispatchLoaderDefault(std::string & str) +void VulkanHppGenerator::appendDispatchLoaderDefault( std::string & str ) { - str += "\n" + str += + "\n" R"( class DispatchLoaderDynamic; #if !defined(VULKAN_HPP_DISPATCH_LOADER_DYNAMIC) # if defined(VK_NO_PROTOTYPES) @@ -1425,34 +1744,40 @@ void VulkanHppGenerator::appendDispatchLoaderDefault(std::string & str) )"; } -void VulkanHppGenerator::appendEnum(std::string & str, std::pair const& enumData) const +void VulkanHppGenerator::appendEnum( std::string & str, std::pair const & enumData ) const { - str += " enum class " + stripPrefix(enumData.first, "Vk"); - if (enumData.second.isBitmask) + str += " enum class " + stripPrefix( enumData.first, "Vk" ); + if ( enumData.second.isBitmask ) { - auto bitmaskIt = std::find_if(m_bitmasks.begin(), m_bitmasks.end(), [&enumData](auto const& bitmask){ return bitmask.second.requirements == enumData.first; }); - assert(bitmaskIt != m_bitmasks.end()); + auto bitmaskIt = std::find_if( m_bitmasks.begin(), m_bitmasks.end(), [&enumData]( auto const & bitmask ) { + return bitmask.second.requirements == enumData.first; + } ); + assert( bitmaskIt != m_bitmasks.end() ); str += " : " + bitmaskIt->first; } - str += "\n" + str += + "\n" " {"; bool first = true; - for (auto const& value : enumData.second.values) + for ( auto const & value : enumData.second.values ) { - if (!first) + if ( !first ) { str += ","; } str += "\n " + value.vkValue + " = " + value.vulkanValue; first = false; } - for (auto const& value : enumData.second.aliases) + for ( auto const & value : enumData.second.aliases ) { // make sure to only list alias values that differ from all non-alias values - if (std::find_if(enumData.second.values.begin(), enumData.second.values.end(), [&value](EnumValueData const& evd) { return value.second == evd.vkValue; }) == enumData.second.values.end()) + if ( std::find_if( + enumData.second.values.begin(), enumData.second.values.end(), [&value]( EnumValueData const & evd ) { + return value.second == evd.vkValue; + } ) == enumData.second.values.end() ) { - if (!first) + if ( !first ) { str += ","; } @@ -1460,51 +1785,56 @@ void VulkanHppGenerator::appendEnum(std::string & str, std::pair const& arraySizes, std::vector const& values, bool argument) const +void VulkanHppGenerator::appendEnumInitializer( std::string & str, + TypeData const & type, + std::vector const & arraySizes, + std::vector const & values, + bool argument ) const { // enum arguments might need special initialization - assert(type.prefix.empty() && !values.empty()); - std::string value = "VULKAN_HPP_NAMESPACE::" + stripPrefix(type.type, "Vk") + "::" + values.front().vkValue; - if (arraySizes.empty()) + assert( type.prefix.empty() && !values.empty() ); + std::string value = "VULKAN_HPP_NAMESPACE::" + stripPrefix( type.type, "Vk" ) + "::" + values.front().vkValue; + if ( arraySizes.empty() ) { str += value; } else { - assert(arraySizes.size() == 1); - int count = std::stoi(arraySizes[0]); - assert(1 < count); - str += argument ? "{ { " : "{ "; // for function arguments, we need two braces, for default initializers just one + assert( arraySizes.size() == 1 ); + int count = std::stoi( arraySizes[0] ); + assert( 1 < count ); + str += argument ? "{ { " : "{ "; // for function arguments, we need two braces, for default initializers just one str += value; - for (int i = 1; i < count; i++) + for ( int i = 1; i < count; i++ ) { str += ", " + value; } @@ -1512,59 +1842,68 @@ void VulkanHppGenerator::appendEnumInitializer(std::string& str, TypeData const& } } -void VulkanHppGenerator::appendEnumToString(std::string & str, std::pair const& enumData) const +void VulkanHppGenerator::appendEnumToString( std::string & str, + std::pair const & enumData ) const { - std::string enumName = stripPrefix(enumData.first, "Vk"); + std::string enumName = stripPrefix( enumData.first, "Vk" ); - str += "\n" - " VULKAN_HPP_INLINE std::string to_string( " + enumName + (enumData.second.values.empty() ? "" : " value") + " )\n" + str += + "\n" + " VULKAN_HPP_INLINE std::string to_string( " + + enumName + ( enumData.second.values.empty() ? "" : " value" ) + + " )\n" " {"; - if (enumData.second.values.empty()) + if ( enumData.second.values.empty() ) { - str += "\n" + str += + "\n" " return \"(void)\";\n"; } else { - str += "\n" + str += + "\n" " switch ( value )\n" " {\n"; - for (auto const& value : enumData.second.values) + for ( auto const & value : enumData.second.values ) { - str += " case " + enumName + "::" + value.vkValue + " : return \"" + value.vkValue.substr(1) + "\";\n"; + str += " case " + enumName + "::" + value.vkValue + " : return \"" + value.vkValue.substr( 1 ) + "\";\n"; } - str += " default: return \"invalid\";\n" + str += + " default: return \"invalid\";\n" " }\n"; } str += " }\n"; } -void VulkanHppGenerator::appendForwardDeclarations(std::string & str) const +void VulkanHppGenerator::appendForwardDeclarations( std::string & str ) const { str += "\n"; - for (auto const& structure : m_structures) + for ( auto const & structure : m_structures ) { - appendPlatformEnter(str, !structure.second.aliases.empty(), structure.second.platform); - str += std::string(" ") + (structure.second.isUnion ? "union" : "struct") + " " + stripPrefix(structure.first, "Vk") + ";\n"; - for (std::string const& alias : structure.second.aliases) + appendPlatformEnter( str, !structure.second.aliases.empty(), structure.second.platform ); + str += std::string( " " ) + ( structure.second.isUnion ? "union" : "struct" ) + " " + + stripPrefix( structure.first, "Vk" ) + ";\n"; + for ( std::string const & alias : structure.second.aliases ) { - str += " using " + stripPrefix(alias, "Vk") + " = " + stripPrefix(structure.first, "Vk") + ";\n"; + str += " using " + stripPrefix( alias, "Vk" ) + " = " + stripPrefix( structure.first, "Vk" ) + ";\n"; } - appendPlatformLeave(str, !structure.second.aliases.empty(), structure.second.platform); + appendPlatformLeave( str, !structure.second.aliases.empty(), structure.second.platform ); } } -bool needsMultiVectorSizeCheck(size_t returnParamIndex, std::map const& vectorParamIndices) +bool needsMultiVectorSizeCheck( size_t returnParamIndex, std::map const & vectorParamIndices ) { - for (std::map::const_iterator it0 = vectorParamIndices.begin(); it0 != vectorParamIndices.end(); ++it0) + for ( std::map::const_iterator it0 = vectorParamIndices.begin(); it0 != vectorParamIndices.end(); + ++it0 ) { - if (it0->first != returnParamIndex) + if ( it0->first != returnParamIndex ) { - for (std::map::const_iterator it1 = std::next(it0); it1 != vectorParamIndices.end(); ++it1) + for ( std::map::const_iterator it1 = std::next( it0 ); it1 != vectorParamIndices.end(); ++it1 ) { - if ((it1->first != returnParamIndex) && (it0->second == it1->second)) + if ( ( it1->first != returnParamIndex ) && ( it0->second == it1->second ) ) { return true; } @@ -1574,94 +1913,193 @@ bool needsMultiVectorSizeCheck(size_t returnParamIndex, std::map return false; } -void VulkanHppGenerator::appendFunction(std::string & str, std::string const& indentation, std::string const& name, CommandData const& commandData, size_t returnParamIndex, size_t templateParamIndex, std::map const& vectorParamIndices, bool twoStep, std::string const& enhancedReturnType, bool definition, bool enhanced, bool singular, bool unique, bool isStructureChain, bool withAllocator) const +void VulkanHppGenerator::appendFunction( std::string & str, + std::string const & indentation, + std::string const & name, + CommandData const & commandData, + size_t returnParamIndex, + size_t templateParamIndex, + std::map const & vectorParamIndices, + bool twoStep, + std::string const & enhancedReturnType, + bool definition, + bool enhanced, + bool singular, + bool unique, + bool isStructureChain, + bool withAllocator ) const { - appendFunctionHeaderTemplate(str, indentation, returnParamIndex, templateParamIndex, enhancedReturnType, enhanced, singular, unique, !definition, isStructureChain); + appendFunctionHeaderTemplate( str, + indentation, + returnParamIndex, + templateParamIndex, + enhancedReturnType, + enhanced, + singular, + unique, + !definition, + isStructureChain ); - str += indentation + (definition ? "VULKAN_HPP_INLINE " : ""); + str += indentation + ( definition ? "VULKAN_HPP_INLINE " : "" ); - appendFunctionHeaderReturnType(str, commandData, returnParamIndex, vectorParamIndices, enhancedReturnType, enhanced, twoStep, singular, unique, isStructureChain); + appendFunctionHeaderReturnType( str, + commandData, + returnParamIndex, + vectorParamIndices, + enhancedReturnType, + enhanced, + twoStep, + singular, + unique, + isStructureChain ); - assert(m_commandToHandle.find(name) != m_commandToHandle.end()); - std::string const& handle = m_commandToHandle.find(name)->second; - if (definition && !handle.empty()) + assert( m_commandToHandle.find( name ) != m_commandToHandle.end() ); + std::string const & handle = m_commandToHandle.find( name )->second; + if ( definition && !handle.empty() ) { - str += stripPrefix(handle, "Vk") + "::"; + str += stripPrefix( handle, "Vk" ) + "::"; } // append the function header name - std::string commandName = determineCommandName(name, commandData.params[0].type.type); - str += (singular ? stripPluralS(commandName) : commandName); - if (unique) + std::string commandName = determineCommandName( name, commandData.params[0].type.type ); + str += ( singular ? stripPluralS( commandName ) : commandName ); + if ( unique ) { str += "Unique"; } - appendFunctionHeaderArguments(str, name, commandData, returnParamIndex, templateParamIndex, vectorParamIndices, enhanced, singular, !definition, withAllocator); + appendFunctionHeaderArguments( str, + name, + commandData, + returnParamIndex, + templateParamIndex, + vectorParamIndices, + enhanced, + singular, + !definition, + withAllocator ); // Any function that originally does not return VkResult can be marked noexcept, // if it is enhanced it must not include anything with an Allocator or needs size checks on multiple vectors - bool hasAllocator = enhancedReturnType.find("Allocator") != std::string::npos; - if (!enhanced || (commandData.returnType != "VkResult" && !(enhanced && (hasAllocator || needsMultiVectorSizeCheck(returnParamIndex, vectorParamIndices))))) + bool hasAllocator = enhancedReturnType.find( "Allocator" ) != std::string::npos; + if ( !enhanced || + ( commandData.returnType != "VkResult" && + !( enhanced && ( hasAllocator || needsMultiVectorSizeCheck( returnParamIndex, vectorParamIndices ) ) ) ) ) { str += " VULKAN_HPP_NOEXCEPT"; } - str += std::string(definition ? "" : ";") + "\n"; + str += std::string( definition ? "" : ";" ) + "\n"; - if (definition) + if ( definition ) { // append the function body str += indentation + "{\n"; - if (enhanced) + if ( enhanced ) { - appendFunctionBodyEnhanced(str, indentation, name, commandData, returnParamIndex, templateParamIndex, vectorParamIndices, twoStep, enhancedReturnType, singular, unique, isStructureChain, withAllocator); + appendFunctionBodyEnhanced( str, + indentation, + name, + commandData, + returnParamIndex, + templateParamIndex, + vectorParamIndices, + twoStep, + enhancedReturnType, + singular, + unique, + isStructureChain, + withAllocator ); } else { - appendFunctionBodyStandard(str, indentation, name, commandData); + appendFunctionBodyStandard( str, indentation, name, commandData ); } str += indentation + "}\n"; } } -void VulkanHppGenerator::appendFunctionBodyEnhanced(std::string & str, std::string const& indentation, std::string const& name, CommandData const& commandData, size_t returnParamIndex, size_t templateParamIndex, std::map const& vectorParamIndices, bool twoStep, std::string const& enhancedReturnType, bool singular, bool unique, bool isStructureChain, bool withAllocator) const +void VulkanHppGenerator::appendFunctionBodyEnhanced( std::string & str, + std::string const & indentation, + std::string const & name, + CommandData const & commandData, + size_t returnParamIndex, + size_t templateParamIndex, + std::map const & vectorParamIndices, + bool twoStep, + std::string const & enhancedReturnType, + bool singular, + bool unique, + bool isStructureChain, + bool withAllocator ) const { - if (unique && !singular && (vectorParamIndices.find(returnParamIndex) != vectorParamIndices.end())) // returns a vector of UniqueStuff + if ( unique && !singular && + ( vectorParamIndices.find( returnParamIndex ) != vectorParamIndices.end() ) ) // returns a vector of UniqueStuff { - appendFunctionBodyEnhancedVectorOfUniqueHandles(str, indentation, name, commandData, returnParamIndex, templateParamIndex, vectorParamIndices, twoStep, singular, withAllocator); + appendFunctionBodyEnhancedVectorOfUniqueHandles( str, + indentation, + name, + commandData, + returnParamIndex, + templateParamIndex, + vectorParamIndices, + twoStep, + singular, + withAllocator ); } - else if (isStructureChain && (vectorParamIndices.find(returnParamIndex) != vectorParamIndices.end())) + else if ( isStructureChain && ( vectorParamIndices.find( returnParamIndex ) != vectorParamIndices.end() ) ) { - appendFunctionBodyEnhancedVectorOfStructureChain(str, indentation, name, commandData, returnParamIndex, vectorParamIndices, withAllocator); + appendFunctionBodyEnhancedVectorOfStructureChain( + str, indentation, name, commandData, returnParamIndex, vectorParamIndices, withAllocator ); } else { - if (1 < vectorParamIndices.size()) + if ( 1 < vectorParamIndices.size() ) { - appendFunctionBodyEnhancedMultiVectorSizeCheck(str, indentation, name, commandData, returnParamIndex, vectorParamIndices); + appendFunctionBodyEnhancedMultiVectorSizeCheck( + str, indentation, name, commandData, returnParamIndex, vectorParamIndices ); } std::string returnName; - if (returnParamIndex != INVALID_INDEX) + if ( returnParamIndex != INVALID_INDEX ) { - returnName = appendFunctionBodyEnhancedLocalReturnVariable(str, indentation, commandData, returnParamIndex, vectorParamIndices, twoStep, enhancedReturnType, singular, isStructureChain, withAllocator); + returnName = appendFunctionBodyEnhancedLocalReturnVariable( str, + indentation, + commandData, + returnParamIndex, + vectorParamIndices, + twoStep, + enhancedReturnType, + singular, + isStructureChain, + withAllocator ); } - if (twoStep) + if ( twoStep ) { - appendFunctionBodyEnhancedTwoStep(str, indentation, name, commandData, returnParamIndex, templateParamIndex, vectorParamIndices, singular, returnName); + appendFunctionBodyEnhancedTwoStep( str, + indentation, + name, + commandData, + returnParamIndex, + templateParamIndex, + vectorParamIndices, + singular, + returnName ); } else { - appendFunctionBodyEnhancedSingleStep(str, indentation, name, commandData, returnParamIndex, templateParamIndex, vectorParamIndices, singular); + appendFunctionBodyEnhancedSingleStep( + str, indentation, name, commandData, returnParamIndex, templateParamIndex, vectorParamIndices, singular ); } - if ((commandData.returnType == "VkResult") || !commandData.successCodes.empty()) + if ( ( commandData.returnType == "VkResult" ) || !commandData.successCodes.empty() ) { - appendFunctionBodyEnhancedReturnResultValue(str, indentation, returnName, name, commandData, returnParamIndex, twoStep, singular, unique); + appendFunctionBodyEnhancedReturnResultValue( + str, indentation, returnName, name, commandData, returnParamIndex, twoStep, singular, unique ); } - else if ((returnParamIndex != INVALID_INDEX) && (stripPrefix(commandData.returnType, "Vk") != enhancedReturnType)) + else if ( ( returnParamIndex != INVALID_INDEX ) && + ( stripPrefix( commandData.returnType, "Vk" ) != enhancedReturnType ) ) { // for the other returning cases, when the return type is somhow enhanced, just return the local returnVariable str += indentation + " return " + returnName + ";\n"; @@ -1669,28 +2107,39 @@ void VulkanHppGenerator::appendFunctionBodyEnhanced(std::string & str, std::stri } } -std::string VulkanHppGenerator::appendFunctionBodyEnhancedLocalReturnVariable(std::string & str, std::string const& indentation, CommandData const& commandData, size_t returnParamIndex, std::map const& vectorParamIndices, bool twoStep, std::string const& enhancedReturnType, bool singular, bool isStructureChain, bool withAllocator) const +std::string VulkanHppGenerator::appendFunctionBodyEnhancedLocalReturnVariable( + std::string & str, + std::string const & indentation, + CommandData const & commandData, + size_t returnParamIndex, + std::map const & vectorParamIndices, + bool twoStep, + std::string const & enhancedReturnType, + bool singular, + bool isStructureChain, + bool withAllocator ) const { - std::string pureReturnType = stripPrefix(commandData.params[returnParamIndex].type.type, "Vk"); - std::string returnName = startLowerCase(stripPrefix(commandData.params[returnParamIndex].name, "p")); + std::string pureReturnType = stripPrefix( commandData.params[returnParamIndex].type.type, "Vk" ); + std::string returnName = startLowerCase( stripPrefix( commandData.params[returnParamIndex].name, "p" ) ); // there is a returned parameter -> we need a local variable to hold that value - if (stripPrefix(commandData.returnType, "Vk") != enhancedReturnType) + if ( stripPrefix( commandData.returnType, "Vk" ) != enhancedReturnType ) { // the returned parameter is somehow enhanced by us str += indentation + " "; - if (singular) + if ( singular ) { - returnName = appendFunctionBodyEnhancedLocalReturnVariableSingular(str, indentation, returnName, pureReturnType, isStructureChain); + returnName = appendFunctionBodyEnhancedLocalReturnVariableSingular( + str, indentation, returnName, pureReturnType, isStructureChain ); } else { // in non-singular case, use the enhanced type for the return variable (like vector<...>) - if (isStructureChain && vectorParamIndices.empty()) + if ( isStructureChain && vectorParamIndices.empty() ) { // For StructureChains use the template parameters - str += "StructureChain structureChain;\n" - + indentation + " " + enhancedReturnType + "& " + returnName + " = structureChain.template get<" + enhancedReturnType + ">()"; + str += "StructureChain structureChain;\n" + indentation + " " + enhancedReturnType + "& " + + returnName + " = structureChain.template get<" + enhancedReturnType + ">()"; returnName = "structureChain"; } else @@ -1698,12 +2147,13 @@ std::string VulkanHppGenerator::appendFunctionBodyEnhancedLocalReturnVariable(st str += enhancedReturnType + " " + returnName; } - std::map::const_iterator vpiIt = vectorParamIndices.find(returnParamIndex); - if (vpiIt != vectorParamIndices.end() && !twoStep) + std::map::const_iterator vpiIt = vectorParamIndices.find( returnParamIndex ); + if ( vpiIt != vectorParamIndices.end() && !twoStep ) { - appendFunctionBodyEnhancedLocalReturnVariableVectorSize(str, commandData.params, *vpiIt, returnParamIndex, vectorParamIndices, withAllocator); + appendFunctionBodyEnhancedLocalReturnVariableVectorSize( + str, commandData.params, *vpiIt, returnParamIndex, vectorParamIndices, withAllocator ); } - else if (withAllocator) + else if ( withAllocator ) { str += "( vectorAllocator )"; } @@ -1712,57 +2162,71 @@ std::string VulkanHppGenerator::appendFunctionBodyEnhancedLocalReturnVariable(st } else { - // the return parameter is not enhanced -> the type is supposed to be a Result and there are more than one success codes! - assert((commandData.returnType == "VkResult") && (1 < commandData.successCodes.size())); + // the return parameter is not enhanced -> the type is supposed to be a Result and there are more than one success + // codes! + assert( ( commandData.returnType == "VkResult" ) && ( 1 < commandData.successCodes.size() ) ); str += indentation + " " + pureReturnType + " " + returnName + ";\n"; } return returnName; } -void VulkanHppGenerator::appendFunctionBodyEnhancedLocalReturnVariableVectorSize(std::string & str, std::vector const& params, std::pair const& vectorParamIndex, size_t returnParamIndex, std::map const& vectorParamIndices, bool withAllocator) const +void VulkanHppGenerator::appendFunctionBodyEnhancedLocalReturnVariableVectorSize( + std::string & str, + std::vector const & params, + std::pair const & vectorParamIndex, + size_t returnParamIndex, + std::map const & vectorParamIndices, + bool withAllocator ) const { // if the return parameter is a vector parameter, and not part of a two-step algorithm, initialize its size std::string size; - if (vectorParamIndex.second == INVALID_INDEX) + if ( vectorParamIndex.second == INVALID_INDEX ) { - assert(!params[returnParamIndex].len.empty()); - // the size of the vector is not given by an other parameter, but by some member of a parameter, described as 'parameter->member' + assert( !params[returnParamIndex].len.empty() ); + // the size of the vector is not given by an other parameter, but by some member of a parameter, described as + // 'parameter->member' // -> replace the '->' by '.' and filter out the leading 'p' to access that value - size = startLowerCase(stripPrefix(params[returnParamIndex].len, "p")); - size_t pos = size.find("->"); + size = startLowerCase( stripPrefix( params[returnParamIndex].len, "p" ) ); + size_t pos = size.find( "->" ); // older versions of the vk.xml used the notation parameter::member ! - if (pos == std::string::npos) + if ( pos == std::string::npos ) { - pos = size.find("::"); + pos = size.find( "::" ); } - assert(pos != std::string::npos); - size.replace(pos, 2, "."); + assert( pos != std::string::npos ); + size.replace( pos, 2, "." ); } else { // the size of the vector is given by an other parameter // first check, if that size has become the size of some other vector parameter // -> look for it and get it's actual size - for (auto const& vpi : vectorParamIndices) + for ( auto const & vpi : vectorParamIndices ) { - if ((vpi.first != vectorParamIndex.first) && (vpi.second == vectorParamIndex.second)) + if ( ( vpi.first != vectorParamIndex.first ) && ( vpi.second == vectorParamIndex.second ) ) { - size = startLowerCase(stripPrefix(params[vpi.first].name, "p")) + ".size()"; + size = startLowerCase( stripPrefix( params[vpi.first].name, "p" ) ) + ".size()"; break; } } - if (size.empty()) + if ( size.empty() ) { // otherwise, just use that parameter size = params[vectorParamIndex.second].name; } } - assert(!size.empty()); - str += "( " + size + (withAllocator ? ", vectorAllocator" : "") + " )"; + assert( !size.empty() ); + str += "( " + size + ( withAllocator ? ", vectorAllocator" : "" ) + " )"; } -void VulkanHppGenerator::appendFunctionBodyEnhancedMultiVectorSizeCheck(std::string & str, std::string const& indentation, std::string const& name, CommandData const& commandData, size_t returnParamIndex, std::map const& vectorParamIndices) const +void VulkanHppGenerator::appendFunctionBodyEnhancedMultiVectorSizeCheck( + std::string & str, + std::string const & indentation, + std::string const & name, + CommandData const & commandData, + size_t returnParamIndex, + std::map const & vectorParamIndices ) const { std::string const sizeCheckTemplate = R"#(#ifdef VULKAN_HPP_NO_EXCEPTIONS @@ -1775,56 +2239,70 @@ ${i} } #endif /*VULKAN_HPP_NO_EXCEPTIONS*/ )#"; - assert(m_commandToHandle.find(name) != m_commandToHandle.end()); - std::string const& handle = m_commandToHandle.find(name)->second; + assert( m_commandToHandle.find( name ) != m_commandToHandle.end() ); + std::string const & handle = m_commandToHandle.find( name )->second; // add some error checks if multiple vectors need to have the same size - std::string commandName = determineCommandName(name, commandData.params[0].type.type); - for (std::map::const_iterator it0 = vectorParamIndices.begin(); it0 != vectorParamIndices.end(); ++it0) + std::string commandName = determineCommandName( name, commandData.params[0].type.type ); + for ( std::map::const_iterator it0 = vectorParamIndices.begin(); it0 != vectorParamIndices.end(); + ++it0 ) { - if (it0->first != returnParamIndex) + if ( it0->first != returnParamIndex ) { - for (std::map::const_iterator it1 = std::next(it0); it1 != vectorParamIndices.end(); ++it1) + for ( std::map::const_iterator it1 = std::next( it0 ); it1 != vectorParamIndices.end(); ++it1 ) { - if ((it1->first != returnParamIndex) && (it0->second == it1->second)) + if ( ( it1->first != returnParamIndex ) && ( it0->second == it1->second ) ) { - str += replaceWithMap(sizeCheckTemplate, std::map({ - { "firstVectorName", startLowerCase(stripPrefix(commandData.params[it0->first].name, "p")) }, - { "secondVectorName", startLowerCase(stripPrefix(commandData.params[it1->first].name, "p")) }, - { "className", handle }, - { "commandName", commandName }, - { "i", indentation } - })); + str += replaceWithMap( + sizeCheckTemplate, + std::map( + { { "firstVectorName", startLowerCase( stripPrefix( commandData.params[it0->first].name, "p" ) ) }, + { "secondVectorName", startLowerCase( stripPrefix( commandData.params[it1->first].name, "p" ) ) }, + { "className", handle }, + { "commandName", commandName }, + { "i", indentation } } ) ); } } } } } -void VulkanHppGenerator::appendFunctionBodyEnhancedReturnResultValue(std::string & str, std::string const& indentation, std::string const& returnName, std::string const& name, CommandData const& commandData, size_t returnParamIndex, bool twoStep, bool singular, bool unique) const +void VulkanHppGenerator::appendFunctionBodyEnhancedReturnResultValue( std::string & str, + std::string const & indentation, + std::string const & returnName, + std::string const & name, + CommandData const & commandData, + size_t returnParamIndex, + bool twoStep, + bool singular, + bool unique ) const { - std::string type = (returnParamIndex != INVALID_INDEX) ? commandData.params[returnParamIndex].type.type : ""; - std::string returnVectorName = (returnParamIndex != INVALID_INDEX) ? stripPostfix(stripPrefix(commandData.params[returnParamIndex].name, "p"), "s") : ""; - std::string commandName = determineCommandName(name, commandData.params[0].type.type); + std::string type = ( returnParamIndex != INVALID_INDEX ) ? commandData.params[returnParamIndex].type.type : ""; + std::string returnVectorName = ( returnParamIndex != INVALID_INDEX ) + ? stripPostfix( stripPrefix( commandData.params[returnParamIndex].name, "p" ), "s" ) + : ""; + std::string commandName = determineCommandName( name, commandData.params[0].type.type ); - if (commandData.returnType == "void") + if ( commandData.returnType == "void" ) { - std::cerr << "warning: skipping appendFunctionBodyEnhancedReturnResultValue for function " << commandName << " because the returnType is void"; + std::cerr << "warning: skipping appendFunctionBodyEnhancedReturnResultValue for function " << commandName + << " because the returnType is void"; return; } - assert(m_commandToHandle.find(name) != m_commandToHandle.end()); - std::string const& handle = m_commandToHandle.find(name)->second; + assert( m_commandToHandle.find( name ) != m_commandToHandle.end() ); + std::string const & handle = m_commandToHandle.find( name )->second; - if (unique) + if ( unique ) { // the unique version needs a Deleter object for destruction of the newly created stuff // get the DeleterData corresponding to the returned type // special handling for "createDevice", as Device is created from PhysicalDevice, but destroyed on its own - bool noParent = handle.empty() || (name == "vkCreateDevice"); - str += "\n" - + indentation + ((name == "vkAllocateMemory") ? " ObjectFree<" : " ObjectDestroy<") + (noParent ? "NoParent" : stripPrefix(handle, "Vk")) + ",Dispatch> deleter( " + (noParent ? "" : "*this, ") + "allocator, d );\n" - + indentation + " return createResultValue<" + stripPrefix(type, "Vk") + ",Dispatch>( result, "; + bool noParent = handle.empty() || ( name == "vkCreateDevice" ); + str += "\n" + indentation + ( ( name == "vkAllocateMemory" ) ? " ObjectFree<" : " ObjectDestroy<" ) + + ( noParent ? "NoParent" : stripPrefix( handle, "Vk" ) ) + ",Dispatch> deleter( " + + ( noParent ? "" : "*this, " ) + "allocator, d );\n" + indentation + " return createResultValue<" + + stripPrefix( type, "Vk" ) + ",Dispatch>( result, "; } else { @@ -1832,27 +2310,28 @@ void VulkanHppGenerator::appendFunctionBodyEnhancedReturnResultValue(std::string } // if the return type is "Result" or there is at least one success code, create the Result/Value construct to return - if (returnParamIndex != INVALID_INDEX) + if ( returnParamIndex != INVALID_INDEX ) { // if there's a return parameter, list it in the Result/Value constructor str += returnName + ", "; } // now the function name (with full namespace) as a string - str += "VULKAN_HPP_NAMESPACE_STRING\"::" + (handle.empty() ? "" : stripPrefix(handle, "Vk") + "::") + (singular ? stripPluralS(commandName) : commandName) + (unique ? "Unique" : "") + "\""; + str += "VULKAN_HPP_NAMESPACE_STRING\"::" + ( handle.empty() ? "" : stripPrefix( handle, "Vk" ) + "::" ) + + ( singular ? stripPluralS( commandName ) : commandName ) + ( unique ? "Unique" : "" ) + "\""; - if (!twoStep && (1 < commandData.successCodes.size())) + if ( !twoStep && ( 1 < commandData.successCodes.size() ) ) { // and for the single-step algorithms with more than one success code list them all - str += ", { Result::" + createSuccessCode(commandData.successCodes[0], m_tags); - for (size_t i = 1; i < commandData.successCodes.size(); i++) + str += ", { Result::" + createSuccessCode( commandData.successCodes[0], m_tags ); + for ( size_t i = 1; i < commandData.successCodes.size(); i++ ) { - str += ", Result::" + createSuccessCode(commandData.successCodes[i], m_tags); + str += ", Result::" + createSuccessCode( commandData.successCodes[i], m_tags ); } str += " }"; } - if (unique) + if ( unique ) { str += ", deleter"; } @@ -1860,38 +2339,54 @@ void VulkanHppGenerator::appendFunctionBodyEnhancedReturnResultValue(std::string str += " );\n"; } -void VulkanHppGenerator::appendFunctionBodyEnhancedSingleStep(std::string & str, std::string const& indentation, std::string const& name, CommandData const& commandData, size_t returnParamIndex, size_t templateParamIndex, std::map const& vectorParamIndices, bool singular) const +void VulkanHppGenerator::appendFunctionBodyEnhancedSingleStep( std::string & str, + std::string const & indentation, + std::string const & name, + CommandData const & commandData, + size_t returnParamIndex, + size_t templateParamIndex, + std::map const & vectorParamIndices, + bool singular ) const { str += indentation + " "; - if (commandData.returnType == "VkResult") + if ( commandData.returnType == "VkResult" ) { str += "Result result = static_cast( "; } - else if (commandData.returnType != "void") + else if ( commandData.returnType != "void" ) { str += "return "; } - appendCall(str, name, commandData, returnParamIndex, templateParamIndex, vectorParamIndices, false, true, singular); - if (commandData.returnType == "VkResult") + appendCall( str, name, commandData, returnParamIndex, templateParamIndex, vectorParamIndices, false, true, singular ); + if ( commandData.returnType == "VkResult" ) { str += " )"; } str += ";\n"; } -void VulkanHppGenerator::appendFunctionBodyEnhancedTwoStep(std::string & str, std::string const& indentation, std::string const& name, CommandData const& commandData, size_t returnParamIndex, size_t templateParamIndex, std::map const& vectorParamIndices, bool singular, std::string const& returnName) const +void VulkanHppGenerator::appendFunctionBodyEnhancedTwoStep( std::string & str, + std::string const & indentation, + std::string const & name, + CommandData const & commandData, + size_t returnParamIndex, + size_t templateParamIndex, + std::map const & vectorParamIndices, + bool singular, + std::string const & returnName ) const { - assert(!singular); - assert((commandData.returnType == "VkResult") || (commandData.returnType == "void")); - assert(returnParamIndex != INVALID_INDEX); + assert( !singular ); + assert( ( commandData.returnType == "VkResult" ) || ( commandData.returnType == "void" ) ); + assert( returnParamIndex != INVALID_INDEX ); // local count variable to hold the size of the vector to fill - std::map::const_iterator returnit = vectorParamIndices.find(returnParamIndex); - assert(returnit != vectorParamIndices.end() && (returnit->second != INVALID_INDEX)); + std::map::const_iterator returnit = vectorParamIndices.find( returnParamIndex ); + assert( returnit != vectorParamIndices.end() && ( returnit->second != INVALID_INDEX ) ); // take the pure type of the size parameter; strip the leading 'p' from its name for its local name - std::string sizeName = startLowerCase(stripPrefix(commandData.params[returnit->second].name, "p")); - str += indentation + " " + stripPrefix(commandData.params[returnit->second].type.type, "Vk") + " " + sizeName + ";\n"; + std::string sizeName = startLowerCase( stripPrefix( commandData.params[returnit->second].name, "p" ) ); + str += + indentation + " " + stripPrefix( commandData.params[returnit->second].type.type, "Vk" ) + " " + sizeName + ";\n"; std::string const multiSuccessTemplate = R"(${i} Result result; @@ -1923,23 +2418,31 @@ ${i} } ${i} ${returnName}.resize( ${sizeName} ); ${i} ${call2}; )"; - std::string const& selectedTemplate = (commandData.returnType == "VkResult") ? ((1 < commandData.successCodes.size()) ? multiSuccessTemplate : singleSuccessTemplate) : voidMultiCallTemplate; + std::string const & selectedTemplate = + ( commandData.returnType == "VkResult" ) + ? ( ( 1 < commandData.successCodes.size() ) ? multiSuccessTemplate : singleSuccessTemplate ) + : voidMultiCallTemplate; std::string call1, call2; - appendCall(call1, name, commandData, returnParamIndex, templateParamIndex, vectorParamIndices, true, true, false); - appendCall(call2, name, commandData, returnParamIndex, templateParamIndex, vectorParamIndices, true, false, false); + appendCall( call1, name, commandData, returnParamIndex, templateParamIndex, vectorParamIndices, true, true, false ); + appendCall( call2, name, commandData, returnParamIndex, templateParamIndex, vectorParamIndices, true, false, false ); - str += replaceWithMap(selectedTemplate, - { - { "sizeName", sizeName }, - { "returnName", returnName }, - { "call1", call1 }, - { "call2", call2 }, - { "i", indentation } - }); + str += replaceWithMap( selectedTemplate, + { { "sizeName", sizeName }, + { "returnName", returnName }, + { "call1", call1 }, + { "call2", call2 }, + { "i", indentation } } ); } -void VulkanHppGenerator::appendFunctionBodyEnhancedVectorOfStructureChain(std::string & str, std::string const& indentation, std::string const& name, CommandData const& commandData, size_t returnParamIndex, std::map const& vectorParamIndices, bool withAllocator) const +void VulkanHppGenerator::appendFunctionBodyEnhancedVectorOfStructureChain( + std::string & str, + std::string const & indentation, + std::string const & name, + CommandData const & commandData, + size_t returnParamIndex, + std::map const & vectorParamIndices, + bool withAllocator ) const { std::string const stringTemplate = R"(${i} std::vector ${returnName}${vectorAllocator}; @@ -1960,26 +2463,37 @@ ${i} return ${returnName}; )"; // local count variable to hold the size of the vector to fill - std::map::const_iterator returnit = vectorParamIndices.find(returnParamIndex); - assert(returnit != vectorParamIndices.end() && (returnit->second != INVALID_INDEX)); + std::map::const_iterator returnit = vectorParamIndices.find( returnParamIndex ); + assert( returnit != vectorParamIndices.end() && ( returnit->second != INVALID_INDEX ) ); - assert(m_commandToHandle.find(name)->second == commandData.params[0].type.type); // make sure, the first argument is the handle - assert(commandData.params.size() == 3); // make sure, there are three args: the handle, the pointer to size, and the data pointer + assert( m_commandToHandle.find( name )->second == + commandData.params[0].type.type ); // make sure, the first argument is the handle + assert( commandData.params.size() == + 3 ); // make sure, there are three args: the handle, the pointer to size, and the data pointer - str += replaceWithMap(stringTemplate, - { - { "commandName", name}, - { "handleName", startLowerCase(stripPrefix(commandData.params[0].type.type, "Vk")) }, - { "i", indentation }, - { "returnName", startLowerCase(stripPrefix(commandData.params[returnParamIndex].name, "p")) }, - { "returnType", stripPrefix(commandData.params[returnParamIndex].type.type, "Vk")}, - { "sizeName", startLowerCase(stripPrefix(commandData.params[returnit->second].name, "p"))}, - { "vectorAllocator", withAllocator ? "( vectorAllocator )" : "" }, - { "VkReturnType", commandData.params[returnParamIndex].type.type} - }); + str += + replaceWithMap( stringTemplate, + { { "commandName", name }, + { "handleName", startLowerCase( stripPrefix( commandData.params[0].type.type, "Vk" ) ) }, + { "i", indentation }, + { "returnName", startLowerCase( stripPrefix( commandData.params[returnParamIndex].name, "p" ) ) }, + { "returnType", stripPrefix( commandData.params[returnParamIndex].type.type, "Vk" ) }, + { "sizeName", startLowerCase( stripPrefix( commandData.params[returnit->second].name, "p" ) ) }, + { "vectorAllocator", withAllocator ? "( vectorAllocator )" : "" }, + { "VkReturnType", commandData.params[returnParamIndex].type.type } } ); } -void VulkanHppGenerator::appendFunctionBodyEnhancedVectorOfUniqueHandles(std::string & str, std::string const& indentation, std::string const& name, CommandData const& commandData, size_t returnParamIndex, size_t templateParamIndex, std::map const& vectorParamIndices, bool twoStep, bool singular, bool withAllocator) const +void VulkanHppGenerator::appendFunctionBodyEnhancedVectorOfUniqueHandles( + std::string & str, + std::string const & indentation, + std::string const & name, + CommandData const & commandData, + size_t returnParamIndex, + size_t templateParamIndex, + std::map const & vectorParamIndices, + bool twoStep, + bool singular, + bool withAllocator ) const { std::string const stringTemplate = R"(${i} std::vector, Allocator> ${uniqueTypeVariable}s${allocator}; @@ -1998,85 +2512,114 @@ ${i} } ${i} return createResultValue( result, ${uniqueTypeVariable}s, VULKAN_HPP_NAMESPACE_STRING "::${class}::${commandName}Unique"${successCodes} ); )"; - std::string type = (returnParamIndex != INVALID_INDEX) ? commandData.params[returnParamIndex].type.type : ""; - std::string typeVariable = startLowerCase(stripPrefix(type, "Vk")); - std::string uniqueTypeVariable = "unique" + stripPrefix(type, "Vk"); + std::string type = ( returnParamIndex != INVALID_INDEX ) ? commandData.params[returnParamIndex].type.type : ""; + std::string typeVariable = startLowerCase( stripPrefix( type, "Vk" ) ); + std::string uniqueTypeVariable = "unique" + stripPrefix( type, "Vk" ); std::string arguments; - appendArguments(arguments, commandData, returnParamIndex, templateParamIndex, vectorParamIndices, twoStep, true, singular, 1, commandData.params.size() - 1); + appendArguments( arguments, + commandData, + returnParamIndex, + templateParamIndex, + vectorParamIndices, + twoStep, + true, + singular, + 1, + commandData.params.size() - 1 ); - assert(m_commandToHandle.find(name) != m_commandToHandle.end()); - std::string const& handle = m_commandToHandle.find(name)->second; + assert( m_commandToHandle.find( name ) != m_commandToHandle.end() ); + std::string const & handle = m_commandToHandle.find( name )->second; - auto handleIt = m_handles.find(type); - assert(handleIt != m_handles.end()); + auto handleIt = m_handles.find( type ); + assert( handleIt != m_handles.end() ); - assert(!commandData.successCodes.empty()); - std::string successChecks = "result == VULKAN_HPP_NAMESPACE::Result::" + createSuccessCode(commandData.successCodes[0], m_tags); + assert( !commandData.successCodes.empty() ); + std::string successChecks = + "result == VULKAN_HPP_NAMESPACE::Result::" + createSuccessCode( commandData.successCodes[0], m_tags ); std::string successCodes; - if (1 < commandData.successCodes.size()) + if ( 1 < commandData.successCodes.size() ) { successChecks = "( " + successChecks + " )"; - successCodes = ", { VULKAN_HPP_NAMESPACE::Result::" + createSuccessCode(commandData.successCodes[0], m_tags); - for (size_t i = 1; i < commandData.successCodes.size(); i++) + successCodes = ", { VULKAN_HPP_NAMESPACE::Result::" + createSuccessCode( commandData.successCodes[0], m_tags ); + for ( size_t i = 1; i < commandData.successCodes.size(); i++ ) { - successChecks += " || ( result == VULKAN_HPP_NAMESPACE::Result::" + createSuccessCode(commandData.successCodes[i], m_tags) + " )"; - successCodes += ", VULKAN_HPP_NAMESPACE::Result::" + createSuccessCode(commandData.successCodes[i], m_tags); + successChecks += + " || ( result == VULKAN_HPP_NAMESPACE::Result::" + createSuccessCode( commandData.successCodes[i], m_tags ) + + " )"; + successCodes += ", VULKAN_HPP_NAMESPACE::Result::" + createSuccessCode( commandData.successCodes[i], m_tags ); } successCodes += " }"; } - std::string commandName = determineCommandName(name, commandData.params[0].type.type); - bool isCreateFunction = (name.substr(2, 6) == "Create"); - str += replaceWithMap(stringTemplate, std::map - { - { "allocator", withAllocator ? "( vectorAllocator )" : "" }, - { "arguments", arguments }, - { "class", stripPrefix(handle, "Vk") }, - { "command", stripPrefix(name, "vk") }, - { "commandName", commandName }, - { "Deleter", handleIt->second.deletePool.empty() ? "ObjectDestroy" : "PoolFree" }, - { "deleterArg", handleIt->second.deletePool.empty() ? "allocator" : "allocateInfo." + startLowerCase(stripPrefix(handleIt->second.deletePool, "Vk")) }, - { "DeleterTemplate", stripPrefix(handle, "Vk") + (handleIt->second.deletePool.empty() ? "" : "," + stripPrefix(handleIt->second.deletePool, "Vk")) }, - { "i", indentation }, - { "successChecks", successChecks }, - { "successCodes", successCodes }, - { "type", stripPrefix(type, "Vk") }, - { "typeVariable", typeVariable }, - { "uniqueTypeVariable", uniqueTypeVariable }, - { "vectorSize", isCreateFunction ? "createInfos.size()" : "allocateInfo." + typeVariable + "Count" } - }); + std::string commandName = determineCommandName( name, commandData.params[0].type.type ); + bool isCreateFunction = ( name.substr( 2, 6 ) == "Create" ); + str += replaceWithMap( + stringTemplate, + std::map{ + { "allocator", withAllocator ? "( vectorAllocator )" : "" }, + { "arguments", arguments }, + { "class", stripPrefix( handle, "Vk" ) }, + { "command", stripPrefix( name, "vk" ) }, + { "commandName", commandName }, + { "Deleter", handleIt->second.deletePool.empty() ? "ObjectDestroy" : "PoolFree" }, + { "deleterArg", + handleIt->second.deletePool.empty() + ? "allocator" + : "allocateInfo." + startLowerCase( stripPrefix( handleIt->second.deletePool, "Vk" ) ) }, + { "DeleterTemplate", + stripPrefix( handle, "Vk" ) + + ( handleIt->second.deletePool.empty() ? "" : "," + stripPrefix( handleIt->second.deletePool, "Vk" ) ) }, + { "i", indentation }, + { "successChecks", successChecks }, + { "successCodes", successCodes }, + { "type", stripPrefix( type, "Vk" ) }, + { "typeVariable", typeVariable }, + { "uniqueTypeVariable", uniqueTypeVariable }, + { "vectorSize", isCreateFunction ? "createInfos.size()" : "allocateInfo." + typeVariable + "Count" } } ); } -void VulkanHppGenerator::appendFunctionBodyStandard(std::string & str, std::string const& indentation, std::string const& commandName, CommandData const& commandData) const +void VulkanHppGenerator::appendFunctionBodyStandard( std::string & str, + std::string const & indentation, + std::string const & commandName, + CommandData const & commandData ) const { - std::pair returnData = generateFunctionBodyStandardReturn(commandData.returnType); + std::pair returnData = generateFunctionBodyStandardReturn( commandData.returnType ); - assert(m_commandToHandle.find(commandName) != m_commandToHandle.end()); - std::string const& handle = m_commandToHandle.find(commandName)->second; - assert(handle.empty() || (handle == commandData.params[0].type.type)); + assert( m_commandToHandle.find( commandName ) != m_commandToHandle.end() ); + std::string const & handle = m_commandToHandle.find( commandName )->second; + assert( handle.empty() || ( handle == commandData.params[0].type.type ) ); - str += indentation + " " + returnData.second + "d." + commandName + "( " + (handle.empty() ? "" : ("m_" + startLowerCase(stripPrefix(handle, "Vk")))); - for (size_t i = handle.empty() ? 0 : 1; i < commandData.params.size(); i++) + str += indentation + " " + returnData.second + "d." + commandName + "( " + + ( handle.empty() ? "" : ( "m_" + startLowerCase( stripPrefix( handle, "Vk" ) ) ) ); + for ( size_t i = handle.empty() ? 0 : 1; i < commandData.params.size(); i++ ) { - if (0 < i) + if ( 0 < i ) { str += ", "; } - appendFunctionBodyStandardArgument(str, commandData.params[i].type, commandData.params[i].name, commandData.params[i].arraySizes); + appendFunctionBodyStandardArgument( + str, commandData.params[i].type, commandData.params[i].name, commandData.params[i].arraySizes ); } - str += std::string(" )") + (returnData.first ? " )" : "") + ";\n"; + str += std::string( " )" ) + ( returnData.first ? " )" : "" ) + ";\n"; } -void VulkanHppGenerator::appendFunctionBodyStandardArgument(std::string & str, TypeData const& typeData, std::string const& name, std::vector const& arraySizes) const +void VulkanHppGenerator::appendFunctionBodyStandardArgument( std::string & str, + TypeData const & typeData, + std::string const & name, + std::vector const & arraySizes ) const { - if (beginsWith(typeData.type, "Vk")) + if ( beginsWith( typeData.type, "Vk" ) ) { // the parameter is a vulkan type - if (!typeData.postfix.empty() || !arraySizes.empty()) + if ( !typeData.postfix.empty() || !arraySizes.empty() ) { - assert((typeData.postfix.empty() || (typeData.postfix.back() == '*')) && (arraySizes.empty() || (arraySizes.size() == 1))); + assert( ( typeData.postfix.empty() || ( typeData.postfix.back() == '*' ) ) && + ( arraySizes.empty() || ( arraySizes.size() == 1 ) ) ); // it's a pointer -> need to reinterpret_cast it - appendReinterpretCast(str, typeData.prefix.find("const") == 0, typeData.type, typeData.postfix.find("* const") != std::string::npos); + appendReinterpretCast( str, + typeData.prefix.find( "const" ) == 0, + typeData.type, + typeData.postfix.find( "* const" ) != std::string::npos ); } else { @@ -2092,64 +2635,87 @@ void VulkanHppGenerator::appendFunctionBodyStandardArgument(std::string & str, T } } -bool VulkanHppGenerator::appendFunctionHeaderArgumentEnhanced(std::string & str, ParamData const& param, size_t paramIndex, std::map const& vectorParamIndices, bool skip, bool argEncountered, bool isTemplateParam, bool isLastArgument, bool singular, bool withDefaults, bool withAllocator) const +bool VulkanHppGenerator::appendFunctionHeaderArgumentEnhanced( std::string & str, + ParamData const & param, + size_t paramIndex, + std::map const & vectorParamIndices, + bool skip, + bool argEncountered, + bool isTemplateParam, + bool isLastArgument, + bool singular, + bool withDefaults, + bool withAllocator ) const { - if (!skip) + if ( !skip ) { - if (argEncountered) + if ( argEncountered ) { str += ", "; } - std::string strippedParameterName = startLowerCase(stripPrefix(param.name, "p")); + std::string strippedParameterName = startLowerCase( stripPrefix( param.name, "p" ) ); - std::map::const_iterator it = vectorParamIndices.find(paramIndex); - if (it == vectorParamIndices.end()) + std::map::const_iterator it = vectorParamIndices.find( paramIndex ); + if ( it == vectorParamIndices.end() ) { // the argument ist not a vector - if (param.type.postfix.empty()) + if ( param.type.postfix.empty() ) { // and its not a pointer -> just use its type and name here - appendFunctionHeaderArgumentEnhancedSimple(str, param, isLastArgument, withDefaults, withAllocator); + appendFunctionHeaderArgumentEnhancedSimple( str, param, isLastArgument, withDefaults, withAllocator ); } else { // the argument is not a vector, but a pointer - assert(param.type.postfix.back() == '*'); - appendFunctionHeaderArgumentEnhancedPointer(str, param, strippedParameterName, withDefaults, withAllocator); + assert( param.type.postfix.back() == '*' ); + appendFunctionHeaderArgumentEnhancedPointer( str, param, strippedParameterName, withDefaults, withAllocator ); } } else { // the argument is a vector - appendFunctionHeaderArgumentEnhancedVector(str, param, strippedParameterName, it->second != INVALID_INDEX, isTemplateParam, singular, withDefaults, withAllocator); + appendFunctionHeaderArgumentEnhancedVector( str, + param, + strippedParameterName, + it->second != INVALID_INDEX, + isTemplateParam, + singular, + withDefaults, + withAllocator ); } argEncountered = true; } return argEncountered; } -void VulkanHppGenerator::appendFunctionHeaderArgumentEnhancedPointer(std::string & str, ParamData const& param, std::string const& strippedParameterName, bool withDefaults, bool withAllocator) const +void VulkanHppGenerator::appendFunctionHeaderArgumentEnhancedPointer( std::string & str, + ParamData const & param, + std::string const & strippedParameterName, + bool withDefaults, + bool withAllocator ) const { - assert(param.type.postfix.back() == '*'); - if (param.optional) + assert( param.type.postfix.back() == '*' ); + if ( param.optional ) { // for an optional argument, trim the leading 'p' from the name - str += "Optional<" + param.type.prefix + (param.type.prefix.empty() ? "" : " ") + stripPrefix(param.type.type, "Vk") + "> " + strippedParameterName; - if (withDefaults && !withAllocator) + str += "Optional<" + param.type.prefix + ( param.type.prefix.empty() ? "" : " " ) + + stripPrefix( param.type.type, "Vk" ) + "> " + strippedParameterName; + if ( withDefaults && !withAllocator ) { str += " = nullptr"; } } - else if (param.type.type == "void") + else if ( param.type.type == "void" ) { // for void-pointer, just use type and name str += param.type.compose() + " " + param.name; } - else if (param.type.type != "char") + else if ( param.type.type != "char" ) { // for non-char-pointer, change to reference - assert(param.type.postfix == "*"); - str += param.type.prefix + (param.type.prefix.empty() ? "" : " ") + stripPrefix(param.type.type, "Vk") + " & " + strippedParameterName; + assert( param.type.postfix == "*" ); + str += param.type.prefix + ( param.type.prefix.empty() ? "" : " " ) + stripPrefix( param.type.type, "Vk" ) + " & " + + strippedParameterName; } else { @@ -2158,42 +2724,50 @@ void VulkanHppGenerator::appendFunctionHeaderArgumentEnhancedPointer(std::string } } -void VulkanHppGenerator::appendFunctionHeaderArgumentEnhancedSimple(std::string & str, ParamData const& param, bool lastArgument, bool withDefaults, bool withAllocator) const +void VulkanHppGenerator::appendFunctionHeaderArgumentEnhancedSimple( + std::string & str, ParamData const & param, bool lastArgument, bool withDefaults, bool withAllocator ) const { - str += param.type.compose() + " " + param.name + constructCArraySizes(param.arraySizes); + str += param.type.compose() + " " + param.name + constructCArraySizes( param.arraySizes ); - if (withDefaults && lastArgument && !withAllocator) + if ( withDefaults && lastArgument && !withAllocator ) { // check if the very last argument is a flag without any bits -> provide some empty default for it - std::map::const_iterator bitmasksIt = m_bitmasks.find(param.type.type); - if (bitmasksIt != m_bitmasks.end()) + std::map::const_iterator bitmasksIt = m_bitmasks.find( param.type.type ); + if ( bitmasksIt != m_bitmasks.end() ) { // get the enum corresponding to this flag, to check if it's empty - std::string strippedBitmaskName = stripPrefix(bitmasksIt->first, "Vk"); - std::map::const_iterator enumIt = m_enums.find(bitmasksIt->second.requirements); - assert((enumIt == m_enums.end()) || (enumIt->second.isBitmask)); - if ((enumIt == m_enums.end()) || (enumIt->second.values.empty())) + std::string strippedBitmaskName = stripPrefix( bitmasksIt->first, "Vk" ); + std::map::const_iterator enumIt = m_enums.find( bitmasksIt->second.requirements ); + assert( ( enumIt == m_enums.end() ) || ( enumIt->second.isBitmask ) ); + if ( ( enumIt == m_enums.end() ) || ( enumIt->second.values.empty() ) ) { // there are no bits in this flag -> provide the default - str += " = " + stripPrefix(param.type.type, "Vk") + "()"; + str += " = " + stripPrefix( param.type.type, "Vk" ) + "()"; } } } } -void VulkanHppGenerator::appendFunctionHeaderArgumentEnhancedVector(std::string & str, ParamData const& param, std::string const& strippedParameterName, bool hasSizeParam, bool isTemplateParam, bool singular, bool withDefaults, bool withAllocator) const +void VulkanHppGenerator::appendFunctionHeaderArgumentEnhancedVector( std::string & str, + ParamData const & param, + std::string const & strippedParameterName, + bool hasSizeParam, + bool isTemplateParam, + bool singular, + bool withDefaults, + bool withAllocator ) const { - assert(param.type.postfix.back() == '*'); + assert( param.type.postfix.back() == '*' ); // it's optional, if it's marked as optional and there's no size specified bool optional = param.optional && !hasSizeParam; - if (param.type.type.find("char") != std::string::npos) + if ( param.type.type.find( "char" ) != std::string::npos ) { // it's a char-vector -> use a std::string (either optional or a const-reference - if (optional) + if ( optional ) { str += "Optional " + strippedParameterName; - if (withDefaults && !withAllocator) + if ( withDefaults && !withAllocator ) { str += " = nullptr"; } @@ -2206,55 +2780,83 @@ void VulkanHppGenerator::appendFunctionHeaderArgumentEnhancedVector(std::string else { // it's a non-char vector (they are never optional) - assert(!optional); - if (singular) + assert( !optional ); + if ( singular ) { // in singular case, change from pointer to reference - str += param.type.prefix + (param.type.prefix.empty() ? "" : " ") + stripPrefix(param.type.type, "Vk") + " & " + stripPluralS(strippedParameterName); + str += param.type.prefix + ( param.type.prefix.empty() ? "" : " " ) + stripPrefix( param.type.type, "Vk" ) + + " & " + stripPluralS( strippedParameterName ); } else { // otherwise, use our ArrayProxy - bool isConst = (param.type.prefix.find("const") != std::string::npos); - str += "ArrayProxy<" + (isTemplateParam ? (isConst ? "const T" : "T") : stripPostfix(param.type.compose(), "*")) + "> " + strippedParameterName; + bool isConst = ( param.type.prefix.find( "const" ) != std::string::npos ); + str += "ArrayProxy<" + + ( isTemplateParam ? ( isConst ? "const T" : "T" ) : stripPostfix( param.type.compose(), "*" ) ) + "> " + + strippedParameterName; } } } -void VulkanHppGenerator::appendFunctionHeaderArguments(std::string & str, std::string const& name, CommandData const& commandData, size_t returnParamIndex, size_t templateParamIndex, std::map const& vectorParamIndices, bool enhanced, bool singular, bool withDefaults, bool withAllocator) const +void VulkanHppGenerator::appendFunctionHeaderArguments( std::string & str, + std::string const & name, + CommandData const & commandData, + size_t returnParamIndex, + size_t templateParamIndex, + std::map const & vectorParamIndices, + bool enhanced, + bool singular, + bool withDefaults, + bool withAllocator ) const { str += "("; - if (enhanced) + if ( enhanced ) { - appendFunctionHeaderArgumentsEnhanced(str, name, commandData, returnParamIndex, templateParamIndex, vectorParamIndices, singular, withDefaults, withAllocator); + appendFunctionHeaderArgumentsEnhanced( str, + name, + commandData, + returnParamIndex, + templateParamIndex, + vectorParamIndices, + singular, + withDefaults, + withAllocator ); } else { - appendFunctionHeaderArgumentsStandard(str, name, commandData, withDefaults); + appendFunctionHeaderArgumentsStandard( str, name, commandData, withDefaults ); } str += ")"; - assert(m_commandToHandle.find(name) != m_commandToHandle.end()); - if (!m_commandToHandle.find(name)->second.empty()) + assert( m_commandToHandle.find( name ) != m_commandToHandle.end() ); + if ( !m_commandToHandle.find( name )->second.empty() ) { str += " const"; } } -void VulkanHppGenerator::appendFunctionHeaderArgumentsEnhanced(std::string & str, std::string const& name, CommandData const& commandData, size_t returnParamIndex, size_t templateParamIndex, std::map const& vectorParamIndices, bool singular, bool withDefaults, bool withAllocator) const +void VulkanHppGenerator::appendFunctionHeaderArgumentsEnhanced( std::string & str, + std::string const & name, + CommandData const & commandData, + size_t returnParamIndex, + size_t templateParamIndex, + std::map const & vectorParamIndices, + bool singular, + bool withDefaults, + bool withAllocator ) const { - assert(m_commandToHandle.find(name) != m_commandToHandle.end()); - std::string const& handle = m_commandToHandle.find(name)->second; + assert( m_commandToHandle.find( name ) != m_commandToHandle.end() ); + std::string const & handle = m_commandToHandle.find( name )->second; // check if there's at least one argument left to put in here - std::set skippedParams = determineSkippedParams(returnParamIndex, vectorParamIndices); - if (skippedParams.size() + (handle.empty() ? 0 : 1) < commandData.params.size()) + std::set skippedParams = determineSkippedParams( returnParamIndex, vectorParamIndices ); + if ( skippedParams.size() + ( handle.empty() ? 0 : 1 ) < commandData.params.size() ) { // determine the last argument, where we might provide some default for size_t lastArgument = INVALID_INDEX; - for (size_t i = commandData.params.size() - 1; i < commandData.params.size(); i--) + for ( size_t i = commandData.params.size() - 1; i < commandData.params.size(); i-- ) { - if (skippedParams.find(i) == skippedParams.end()) + if ( skippedParams.find( i ) == skippedParams.end() ) { lastArgument = i; break; @@ -2263,262 +2865,320 @@ void VulkanHppGenerator::appendFunctionHeaderArgumentsEnhanced(std::string & str str += " "; bool argEncountered = false; - for (size_t i = handle.empty() ? 0 : 1; i < commandData.params.size(); i++) + for ( size_t i = handle.empty() ? 0 : 1; i < commandData.params.size(); i++ ) { - argEncountered = appendFunctionHeaderArgumentEnhanced(str, commandData.params[i], i, vectorParamIndices, skippedParams.find(i) != skippedParams.end(), argEncountered, - (templateParamIndex == i), (lastArgument == i), singular, withDefaults, withAllocator); + argEncountered = appendFunctionHeaderArgumentEnhanced( str, + commandData.params[i], + i, + vectorParamIndices, + skippedParams.find( i ) != skippedParams.end(), + argEncountered, + ( templateParamIndex == i ), + ( lastArgument == i ), + singular, + withDefaults, + withAllocator ); } - if (argEncountered) + if ( argEncountered ) { str += ", "; } } - if (withAllocator) + if ( withAllocator ) { str += "Allocator const& vectorAllocator, "; } str += "Dispatch const &d"; - if (withDefaults && !withAllocator) + if ( withDefaults && !withAllocator ) { str += " = VULKAN_HPP_DEFAULT_DISPATCHER"; } str += " "; } -void VulkanHppGenerator::appendFunctionHeaderArgumentsStandard(std::string & str, std::string const& name, CommandData const& commandData, bool withDefaults) const +void VulkanHppGenerator::appendFunctionHeaderArgumentsStandard( std::string & str, + std::string const & name, + CommandData const & commandData, + bool withDefaults ) const { // for the standard case, just list all the arguments as we've got them // determine the last argument, where we might provide some default for size_t lastArgument = commandData.params.size() - 1; - assert(m_commandToHandle.find(name) != m_commandToHandle.end()); - std::string const& handle = m_commandToHandle.find(name)->second; + assert( m_commandToHandle.find( name ) != m_commandToHandle.end() ); + std::string const & handle = m_commandToHandle.find( name )->second; bool argEncountered = false; - for (size_t i = handle.empty() ? 0 : 1; i < commandData.params.size(); i++) + for ( size_t i = handle.empty() ? 0 : 1; i < commandData.params.size(); i++ ) { - argEncountered = appendFunctionHeaderArgumentStandard(str, commandData.params[i], argEncountered, lastArgument == i, withDefaults); + argEncountered = appendFunctionHeaderArgumentStandard( + str, commandData.params[i], argEncountered, lastArgument == i, withDefaults ); } - if (argEncountered) + if ( argEncountered ) { str += ", "; } str += "Dispatch const &d"; - if (withDefaults) + if ( withDefaults ) { str += " = VULKAN_HPP_DEFAULT_DISPATCHER "; } } -bool VulkanHppGenerator::appendFunctionHeaderArgumentStandard(std::string & str, ParamData const& param, bool argEncountered, bool isLastArgument, bool withDefaults) const +bool VulkanHppGenerator::appendFunctionHeaderArgumentStandard( + std::string & str, ParamData const & param, bool argEncountered, bool isLastArgument, bool withDefaults ) const { - if (argEncountered) + if ( argEncountered ) { str += ","; } - str += " " + param.type.compose() + " " + param.name + constructCArraySizes(param.arraySizes); + str += " " + param.type.compose() + " " + param.name + constructCArraySizes( param.arraySizes ); - if (withDefaults && isLastArgument) + if ( withDefaults && isLastArgument ) { // check if the very last argument is a flag without any bits -> provide some empty default for it - std::map::const_iterator bitmasksIt = m_bitmasks.find(param.type.type); - if (bitmasksIt != m_bitmasks.end()) + std::map::const_iterator bitmasksIt = m_bitmasks.find( param.type.type ); + if ( bitmasksIt != m_bitmasks.end() ) { // get the enum corresponding to this flag, to check if it's empty - std::string strippedBitmaskName = stripPrefix(bitmasksIt->first, "Vk"); - std::map::const_iterator enumIt = m_enums.find(bitmasksIt->second.requirements); - assert((enumIt == m_enums.end()) || (enumIt->second.isBitmask)); - if ((enumIt == m_enums.end()) || (enumIt->second.values.empty())) + std::string strippedBitmaskName = stripPrefix( bitmasksIt->first, "Vk" ); + std::map::const_iterator enumIt = m_enums.find( bitmasksIt->second.requirements ); + assert( ( enumIt == m_enums.end() ) || ( enumIt->second.isBitmask ) ); + if ( ( enumIt == m_enums.end() ) || ( enumIt->second.values.empty() ) ) { // there are no bits in this flag -> provide the default - str += " = " + stripPrefix(param.type.type, "Vk") + "()"; + str += " = " + stripPrefix( param.type.type, "Vk" ) + "()"; } } } return true; } -void VulkanHppGenerator::appendFunctionHeaderReturnType(std::string & str, CommandData const& commandData, size_t returnParamIndex, std::map const& vectorParamIndices, std::string const& enhancedReturnType, bool enhanced, bool twoStep, bool singular, bool unique, bool isStructureChain) const +void VulkanHppGenerator::appendFunctionHeaderReturnType( std::string & str, + CommandData const & commandData, + size_t returnParamIndex, + std::map const & vectorParamIndices, + std::string const & enhancedReturnType, + bool enhanced, + bool twoStep, + bool singular, + bool unique, + bool isStructureChain ) const { - if (enhanced) + if ( enhanced ) { - bool useTypename = ((commandData.successCodes.size() == 1) || ((commandData.successCodes.size() == 2) && (commandData.successCodes[1] == "VK_INCOMPLETE") && twoStep)); + bool useTypename = + ( ( commandData.successCodes.size() == 1 ) || + ( ( commandData.successCodes.size() == 2 ) && ( commandData.successCodes[1] == "VK_INCOMPLETE" ) && twoStep ) ); // the enhanced function might return some pretty complex return stuff - bool isVector = (enhancedReturnType.find("Allocator") != std::string::npos); - if (unique) + bool isVector = ( enhancedReturnType.find( "Allocator" ) != std::string::npos ); + if ( unique ) { // the unique version returns something prefixed with 'Unique'; potentially a vector of that stuff // it's a vector, if it's not the singular version and the return parameter is a vector parameter - bool returnsVector = !singular && (vectorParamIndices.find(returnParamIndex) != vectorParamIndices.end()); + bool returnsVector = !singular && ( vectorParamIndices.find( returnParamIndex ) != vectorParamIndices.end() ); - std::string returnType = isStructureChain ? "StructureChain" : stripPrefix(commandData.params[returnParamIndex].type.type, "Vk"); + std::string returnType = isStructureChain ? "StructureChain" + : stripPrefix( commandData.params[returnParamIndex].type.type, "Vk" ); str += useTypename ? "typename ResultValueType<" : "ResultValue<"; - str += returnsVector ? "std::vector,Allocator>>" : "UniqueHandle<" + returnType + ",Dispatch>>"; + str += returnsVector ? "std::vector,Allocator>>" + : "UniqueHandle<" + returnType + ",Dispatch>>"; str += useTypename ? "::type " : " "; } - else if ((enhancedReturnType != stripPrefix(commandData.returnType, "Vk")) && (commandData.returnType != "void")) + else if ( ( enhancedReturnType != stripPrefix( commandData.returnType, "Vk" ) ) && + ( commandData.returnType != "void" ) ) { - // if the enhanced return type differs from the original return type, and it's not void, we return a ResultValueType<...>::type - assert(commandData.returnType == "VkResult"); - // in singular case, we create the ResultValueType from the pure return type, otherwise from the enhanced return type - std::string returnType = isStructureChain ? "StructureChain" : (singular ? stripPrefix(commandData.params[returnParamIndex].type.type, "Vk") : enhancedReturnType); + // if the enhanced return type differs from the original return type, and it's not void, we return a + // ResultValueType<...>::type + assert( commandData.returnType == "VkResult" ); + // in singular case, we create the ResultValueType from the pure return type, otherwise from the enhanced return + // type + std::string returnType = + isStructureChain + ? "StructureChain" + : ( singular ? stripPrefix( commandData.params[returnParamIndex].type.type, "Vk" ) : enhancedReturnType ); // for the non-singular case with allocation, we need to prepend with 'typename' to keep compilers happy - str += (useTypename ? "typename ResultValueType<" : "ResultValue<") + returnType + ">" + (useTypename ? "::type " : " "); + str += ( useTypename ? "typename ResultValueType<" : "ResultValue<" ) + returnType + ">" + + ( useTypename ? "::type " : " " ); } - else if ((returnParamIndex != INVALID_INDEX) && (1 < commandData.successCodes.size())) + else if ( ( returnParamIndex != INVALID_INDEX ) && ( 1 < commandData.successCodes.size() ) ) { - // if there is a return parameter at all, and there are multiple success codes, we return a ResultValue<...> with the pure return type - assert(commandData.returnType == "VkResult"); - str += "ResultValue<" + (isStructureChain ? "StructureChain" : stripPrefix(commandData.params[returnParamIndex].type.type, "Vk")) + "> "; + // if there is a return parameter at all, and there are multiple success codes, we return a ResultValue<...> with + // the pure return type + assert( commandData.returnType == "VkResult" ); + str += "ResultValue<" + + ( isStructureChain ? "StructureChain" + : stripPrefix( commandData.params[returnParamIndex].type.type, "Vk" ) ) + + "> "; } else { // and in every other case, we just return the enhanced return type. - str += (isStructureChain && !isVector ? "StructureChain" : enhancedReturnType) + " "; + str += ( isStructureChain && !isVector ? "StructureChain" : enhancedReturnType ) + " "; } } else { // the non-enhanced function just uses the return type - str += stripPrefix(commandData.returnType, "Vk") + " "; + str += stripPrefix( commandData.returnType, "Vk" ) + " "; } } -void VulkanHppGenerator::appendFunctionHeaderTemplate(std::string & str, std::string const& indentation, size_t returnParamIndex, size_t templateParamIndex, std::string const& enhancedReturnType, bool enhanced, bool singular, bool unique, bool withDefault, bool isStructureChain) const +void VulkanHppGenerator::appendFunctionHeaderTemplate( std::string & str, + std::string const & indentation, + size_t returnParamIndex, + size_t templateParamIndex, + std::string const & enhancedReturnType, + bool enhanced, + bool singular, + bool unique, + bool withDefault, + bool isStructureChain ) const { - bool withAllocator = (enhancedReturnType.find("Allocator") != std::string::npos); + bool withAllocator = ( enhancedReturnType.find( "Allocator" ) != std::string::npos ); str += indentation + "template<"; - if (enhanced) + if ( enhanced ) { - if (isStructureChain) + if ( isStructureChain ) { - str += std::string("typename ") + (withAllocator ? "StructureChain" : "X, typename Y, typename ...Z") + ", "; + str += std::string( "typename " ) + ( withAllocator ? "StructureChain" : "X, typename Y, typename ...Z" ) + ", "; } - else if ((templateParamIndex != INVALID_INDEX) && ((templateParamIndex != returnParamIndex) || (enhancedReturnType == "Result"))) + else if ( ( templateParamIndex != INVALID_INDEX ) && + ( ( templateParamIndex != returnParamIndex ) || ( enhancedReturnType == "Result" ) ) ) { - assert(!withAllocator); + assert( !withAllocator ); str += "typename T, "; } - if (!singular && withAllocator) + if ( !singular && withAllocator ) { // otherwise, if there's an Allocator used in the enhanced return type, we templatize on that Allocator - assert((enhancedReturnType.substr(0, 12) == "std::vector<") && (enhancedReturnType.find(',') != std::string::npos) && (12 < enhancedReturnType.find(','))); + assert( ( enhancedReturnType.substr( 0, 12 ) == "std::vector<" ) && + ( enhancedReturnType.find( ',' ) != std::string::npos ) && ( 12 < enhancedReturnType.find( ',' ) ) ); str += "typename Allocator"; - if (withDefault) + if ( withDefault ) { - // for the default type get the type from the enhancedReturnType, which is of the form 'std::vector' - assert(!isStructureChain || !unique); - str += " = std::allocator<" + (isStructureChain ? "StructureChain" : (unique ? "Unique" : "") + enhancedReturnType.substr(12, enhancedReturnType.find(',') - 12)) + ">"; + // for the default type get the type from the enhancedReturnType, which is of the form + // 'std::vector' + assert( !isStructureChain || !unique ); + str += " = std::allocator<" + + ( isStructureChain ? "StructureChain" + : ( unique ? "Unique" : "" ) + + enhancedReturnType.substr( 12, enhancedReturnType.find( ',' ) - 12 ) ) + + ">"; } str += ", "; } } - str += std::string("typename Dispatch") + (withDefault ? " = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE" : "") + ">\n"; + str += std::string( "typename Dispatch" ) + ( withDefault ? " = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE" : "" ) + ">\n"; } -void VulkanHppGenerator::appendHandle(std::string & str, std::pair const& handleData, std::set & listedHandles) const +void VulkanHppGenerator::appendHandle( std::string & str, + std::pair const & handleData, + std::set & listedHandles ) const { - if (listedHandles.find(handleData.first) == listedHandles.end()) + if ( listedHandles.find( handleData.first ) == listedHandles.end() ) { - listedHandles.insert(handleData.first); + listedHandles.insert( handleData.first ); // first check for any handle that needs to be listed before this one - for (auto const& command : handleData.second.commands) + for ( auto const & command : handleData.second.commands ) { - for (auto const& parameter : command.second.params) + for ( auto const & parameter : command.second.params ) { - std::string typeName = parameter.type.type; - auto handlesIt = m_handles.find(typeName); - if ((handlesIt != m_handles.end()) && (listedHandles.find(typeName) == listedHandles.end())) + std::string typeName = parameter.type.type; + auto handlesIt = m_handles.find( typeName ); + if ( ( handlesIt != m_handles.end() ) && ( listedHandles.find( typeName ) == listedHandles.end() ) ) { - appendHandle(str, *handlesIt, listedHandles); + appendHandle( str, *handlesIt, listedHandles ); } } } - if (handleData.first.empty()) + if ( handleData.first.empty() ) { - for (auto const& command : handleData.second.commands) + for ( auto const & command : handleData.second.commands ) { - if (command.first == "vkCreateInstance") + if ( command.first == "vkCreateInstance" ) { - // special handling for createInstance, as we need to explicitly place the forward declarations and the deleter classes here -#if !defined(NDEBUG) - auto handleIt = m_handles.find(""); - assert((handleIt != m_handles.end()) && (handleIt->second.childrenHandles.size() == 2)); - assert(handleIt->second.childrenHandles.find("VkInstance") != handleIt->second.childrenHandles.end()); + // special handling for createInstance, as we need to explicitly place the forward declarations and the + // deleter classes here +#if !defined( NDEBUG ) + auto handleIt = m_handles.find( "" ); + assert( ( handleIt != m_handles.end() ) && ( handleIt->second.childrenHandles.size() == 2 ) ); + assert( handleIt->second.childrenHandles.find( "VkInstance" ) != handleIt->second.childrenHandles.end() ); #endif - appendUniqueTypes(str, "", { "VkInstance" }); + appendUniqueTypes( str, "", { "VkInstance" } ); } str += "\n"; - appendPlatformEnter(str, !command.second.aliases.empty(), command.second.platform); - appendCommand(str, " ", command.first, command.second, false); - appendPlatformLeave(str, !command.second.aliases.empty(), command.second.platform); - for (auto const& alias : command.second.aliases) + appendPlatformEnter( str, !command.second.aliases.empty(), command.second.platform ); + appendCommand( str, " ", command.first, command.second, false ); + appendPlatformLeave( str, !command.second.aliases.empty(), command.second.platform ); + for ( auto const & alias : command.second.aliases ) { - appendCommand(str, " ", alias, command.second, false); + appendCommand( str, " ", alias, command.second, false ); } } } else { // then append any forward declaration of Deleters used by this handle - if (!handleData.second.childrenHandles.empty()) + if ( !handleData.second.childrenHandles.empty() ) { - appendUniqueTypes(str, handleData.first, handleData.second.childrenHandles); + appendUniqueTypes( str, handleData.first, handleData.second.childrenHandles ); } - else if (handleData.first == "VkPhysicalDevice") + else if ( handleData.first == "VkPhysicalDevice" ) { // special handling for class Device, as it's created from PhysicalDevice, but destroys itself - appendUniqueTypes(str, "", { "VkDevice" }); + appendUniqueTypes( str, "", { "VkDevice" } ); } std::string commands; // list all the commands that are mapped to members of this class - for (auto const& command : handleData.second.commands) + for ( auto const & command : handleData.second.commands ) { std::string enter, leave, commandString; - appendPlatformEnter(enter, !command.second.aliases.empty(), command.second.platform); - appendPlatformLeave(leave, !command.second.aliases.empty(), command.second.platform); + appendPlatformEnter( enter, !command.second.aliases.empty(), command.second.platform ); + appendPlatformLeave( leave, !command.second.aliases.empty(), command.second.platform ); commands += "\n" + enter; - std::string commandName = determineCommandName(command.first, command.second.params[0].type.type); - appendCommand(commands, " ", command.first, command.second, false); - for (auto const& alias : command.second.aliases) + std::string commandName = determineCommandName( command.first, command.second.params[0].type.type ); + appendCommand( commands, " ", command.first, command.second, false ); + for ( auto const & alias : command.second.aliases ) { - assert(enter.empty() && leave.empty()); + assert( enter.empty() && leave.empty() ); commands += "\n"; - appendCommand(commands, " ", alias, command.second, false); + appendCommand( commands, " ", alias, command.second, false ); } // special handling for destroy functions bool platformLeft = false; - if (((command.first.substr(2, 7) == "Destroy") && (commandName != "destroy")) || (command.first.substr(2, 4) == "Free")) + if ( ( ( command.first.substr( 2, 7 ) == "Destroy" ) && ( commandName != "destroy" ) ) || + ( command.first.substr( 2, 4 ) == "Free" ) ) { - assert(1 < command.second.params.size()); - auto handleIt = m_handles.find(command.second.params[1].type.type); - assert(handleIt != m_handles.end()); - if (!handleIt->second.alias.empty()) + assert( 1 < command.second.params.size() ); + auto handleIt = m_handles.find( command.second.params[1].type.type ); + assert( handleIt != m_handles.end() ); + if ( !handleIt->second.alias.empty() ) { commands += leave; platformLeft = true; } std::string destroyCommandString; - appendCommand(destroyCommandString, " ", command.first, command.second, false); - std::string shortenedName = (command.first.substr(2, 7) == "Destroy") ? "destroy" : "free"; - size_t pos = destroyCommandString.find(commandName); - while (pos != std::string::npos) + appendCommand( destroyCommandString, " ", command.first, command.second, false ); + std::string shortenedName = ( command.first.substr( 2, 7 ) == "Destroy" ) ? "destroy" : "free"; + size_t pos = destroyCommandString.find( commandName ); + while ( pos != std::string::npos ) { - destroyCommandString.replace(pos, commandName.length(), shortenedName); - pos = destroyCommandString.find(commandName, pos); + destroyCommandString.replace( pos, commandName.length(), shortenedName ); + pos = destroyCommandString.find( commandName, pos ); } commands += "\n" + destroyCommandString; } - if (!platformLeft) + if ( !platformLeft ) { commands += leave; } @@ -2606,88 +3266,90 @@ ${commands} )"; std::string enter, leave; - appendPlatformEnter(enter, !handleData.second.alias.empty(), handleData.second.platform); - appendPlatformLeave(leave, !handleData.second.alias.empty(), handleData.second.platform); - str += replaceWithMap(templateString, { - { "className", stripPrefix(handleData.first, "Vk") }, - { "commands", commands }, - { "enter", enter }, - { "memberName", startLowerCase(stripPrefix(handleData.first, "Vk")) } - }); + appendPlatformEnter( enter, !handleData.second.alias.empty(), handleData.second.platform ); + appendPlatformLeave( leave, !handleData.second.alias.empty(), handleData.second.platform ); + str += replaceWithMap( templateString, + { { "className", stripPrefix( handleData.first, "Vk" ) }, + { "commands", commands }, + { "enter", enter }, + { "memberName", startLowerCase( stripPrefix( handleData.first, "Vk" ) ) } } ); - if (!handleData.second.alias.empty()) + if ( !handleData.second.alias.empty() ) { - str += " using " + stripPrefix(handleData.second.alias, "Vk") + " = " + stripPrefix(handleData.first, "Vk") + ";\n"; + str += " using " + stripPrefix( handleData.second.alias, "Vk" ) + " = " + + stripPrefix( handleData.first, "Vk" ) + ";\n"; } str += leave; } } } -void VulkanHppGenerator::appendHandles(std::string & str) const +void VulkanHppGenerator::appendHandles( std::string & str ) const { std::set listedHandles; - for (auto const& handle : m_handles) + for ( auto const & handle : m_handles ) { - appendHandle(str, handle, listedHandles); + appendHandle( str, handle, listedHandles ); } } -void VulkanHppGenerator::appendHandlesCommandDefintions(std::string & str) const +void VulkanHppGenerator::appendHandlesCommandDefintions( std::string & str ) const { - for (auto const& handle : m_handles) + for ( auto const & handle : m_handles ) { // finally the commands, that are member functions of this handle - for (auto const& command : handle.second.commands) + for ( auto const & command : handle.second.commands ) { - std::string strippedName = startLowerCase(stripPrefix(command.first, "vk")); + std::string strippedName = startLowerCase( stripPrefix( command.first, "vk" ) ); str += "\n"; - appendPlatformEnter(str, !command.second.aliases.empty(), command.second.platform); - appendCommand(str, " ", command.first, command.second, true); - for (auto const& alias : command.second.aliases) + appendPlatformEnter( str, !command.second.aliases.empty(), command.second.platform ); + appendCommand( str, " ", command.first, command.second, true ); + for ( auto const & alias : command.second.aliases ) { str += "\n"; - appendCommand(str, " ", alias, command.second, true); + appendCommand( str, " ", alias, command.second, true ); } // special handling for destroy functions - bool platformLeft = false; - std::string commandName = determineCommandName(command.first, command.second.params[0].type.type); - if (((command.first.substr(2, 7) == "Destroy") && (commandName != "destroy")) || (command.first.substr(2, 4) == "Free")) + bool platformLeft = false; + std::string commandName = determineCommandName( command.first, command.second.params[0].type.type ); + if ( ( ( command.first.substr( 2, 7 ) == "Destroy" ) && ( commandName != "destroy" ) ) || + ( command.first.substr( 2, 4 ) == "Free" ) ) { - assert(1 < command.second.params.size()); - auto handleIt = m_handles.find(command.second.params[1].type.type); - assert(handleIt != m_handles.end()); - if (!handleIt->second.alias.empty()) + assert( 1 < command.second.params.size() ); + auto handleIt = m_handles.find( command.second.params[1].type.type ); + assert( handleIt != m_handles.end() ); + if ( !handleIt->second.alias.empty() ) { - appendPlatformLeave(str, !command.second.aliases.empty(), command.second.platform); + appendPlatformLeave( str, !command.second.aliases.empty(), command.second.platform ); platformLeft = true; } std::string destroyCommandString; - appendCommand(destroyCommandString, " ", command.first, command.second, true); - std::string shortenedName = (command.first.substr(2, 7) == "Destroy") ? "destroy" : "free"; - size_t pos = destroyCommandString.find(commandName); - while (pos != std::string::npos) + appendCommand( destroyCommandString, " ", command.first, command.second, true ); + std::string shortenedName = ( command.first.substr( 2, 7 ) == "Destroy" ) ? "destroy" : "free"; + size_t pos = destroyCommandString.find( commandName ); + while ( pos != std::string::npos ) { - destroyCommandString.replace(pos, commandName.length(), shortenedName); - pos = destroyCommandString.find(commandName, pos); + destroyCommandString.replace( pos, commandName.length(), shortenedName ); + pos = destroyCommandString.find( commandName, pos ); } str += "\n" + destroyCommandString; } - if (!platformLeft) + if ( !platformLeft ) { - appendPlatformLeave(str, !command.second.aliases.empty(), command.second.platform); + appendPlatformLeave( str, !command.second.aliases.empty(), command.second.platform ); } } } str += "\n"; } -void VulkanHppGenerator::appendHashStructures(std::string& str) const +void VulkanHppGenerator::appendHashStructures( std::string & str ) const { - str += "\n" + str += + "\n" "namespace std\n" "{\n"; @@ -2699,16 +3361,16 @@ void VulkanHppGenerator::appendHashStructures(std::string& str) const } }; )"; - for (auto handle : m_handles) + for ( auto handle : m_handles ) { - if (!handle.first.empty()) + if ( !handle.first.empty() ) { str += "\n"; - appendPlatformEnter(str, !handle.second.alias.empty(), handle.second.platform); - std::string type = stripPrefix(handle.first, "Vk"); - std::string name = startLowerCase(type); - str += replaceWithMap(hashTemplate, { {"name", name}, {"type", type} }); - appendPlatformLeave(str, !handle.second.alias.empty(), handle.second.platform); + appendPlatformEnter( str, !handle.second.alias.empty(), handle.second.platform ); + std::string type = stripPrefix( handle.first, "Vk" ); + std::string name = startLowerCase( type ); + str += replaceWithMap( hashTemplate, { { "name", name }, { "type", type } } ); + appendPlatformLeave( str, !handle.second.alias.empty(), handle.second.platform ); } } @@ -2716,7 +3378,7 @@ void VulkanHppGenerator::appendHashStructures(std::string& str) const } // Intended only for `enum class Result`! -void VulkanHppGenerator::appendResultExceptions(std::string & str) const +void VulkanHppGenerator::appendResultExceptions( std::string & str ) const { std::string templateString = R"( class ${className} : public SystemError @@ -2729,82 +3391,87 @@ void VulkanHppGenerator::appendResultExceptions(std::string & str) const }; )"; - auto enumData = m_enums.find("VkResult"); - for (auto const& value : enumData->second.values) + auto enumData = m_enums.find( "VkResult" ); + for ( auto const & value : enumData->second.values ) { - if (beginsWith(value.vkValue, "eError")) + if ( beginsWith( value.vkValue, "eError" ) ) { - str += replaceWithMap(templateString, - { - { "className", stripPrefix(value.vkValue, "eError") + "Error" }, - { "enumName", stripPrefix(enumData->first, "Vk") }, - { "enumMemberName", value.vkValue } - }); + str += replaceWithMap( templateString, + { { "className", stripPrefix( value.vkValue, "eError" ) + "Error" }, + { "enumName", stripPrefix( enumData->first, "Vk" ) }, + { "enumMemberName", value.vkValue } } ); } } str += "\n"; } -void VulkanHppGenerator::appendPlatformEnter(std::string & str, bool isAliased, std::string const& platform) const +void VulkanHppGenerator::appendPlatformEnter( std::string & str, bool isAliased, std::string const & platform ) const { - if (!isAliased && !platform.empty()) + if ( !isAliased && !platform.empty() ) { - auto it = m_platforms.find(platform); - assert((it != m_platforms.end()) && !it->second.empty()); + auto it = m_platforms.find( platform ); + assert( ( it != m_platforms.end() ) && !it->second.empty() ); str += "#ifdef " + it->second + "\n"; } } -void VulkanHppGenerator::appendPlatformLeave(std::string & str, bool isAliased, std::string const& platform) const +void VulkanHppGenerator::appendPlatformLeave( std::string & str, bool isAliased, std::string const & platform ) const { - if (!isAliased && !platform.empty()) + if ( !isAliased && !platform.empty() ) { - auto it = m_platforms.find(platform); - assert((it != m_platforms.end()) && !it->second.empty()); + auto it = m_platforms.find( platform ); + assert( ( it != m_platforms.end() ) && !it->second.empty() ); str += "#endif /*" + it->second + "*/\n"; } } -void VulkanHppGenerator::appendStruct(std::string & str, std::pair const& structure, std::set & listedStructures) const +void VulkanHppGenerator::appendStruct( std::string & str, + std::pair const & structure, + std::set & listedStructures ) const { - if (listedStructures.find(structure.first) == listedStructures.end()) + if ( listedStructures.find( structure.first ) == listedStructures.end() ) { - listedStructures.insert(structure.first); - for (auto const& member : structure.second.members) + listedStructures.insert( structure.first ); + for ( auto const & member : structure.second.members ) { - auto structureIt = m_structures.find(member.type.type); - if ((structureIt != m_structures.end()) && (listedStructures.find(member.type.type) == listedStructures.end())) + auto structureIt = m_structures.find( member.type.type ); + if ( ( structureIt != m_structures.end() ) && + ( listedStructures.find( member.type.type ) == listedStructures.end() ) ) { - appendStruct(str, *structureIt, listedStructures); + appendStruct( str, *structureIt, listedStructures ); } } - if (!structure.second.subStruct.empty()) + if ( !structure.second.subStruct.empty() ) { - auto structureIt = m_structures.find(structure.second.subStruct); - if ((structureIt != m_structures.end()) && (listedStructures.find(structureIt->first) == listedStructures.end())) + auto structureIt = m_structures.find( structure.second.subStruct ); + if ( ( structureIt != m_structures.end() ) && + ( listedStructures.find( structureIt->first ) == listedStructures.end() ) ) { - appendStruct(str, *structureIt, listedStructures); + appendStruct( str, *structureIt, listedStructures ); } } - if (structure.second.isUnion) + if ( structure.second.isUnion ) { - appendUnion(str, structure); + appendUnion( str, structure ); } else { - appendStructure(str, structure); + appendStructure( str, structure ); } } } -void VulkanHppGenerator::appendStructAssignmentOperator(std::string &str, std::pair const& structData, std::string const& prefix) const +void VulkanHppGenerator::appendStructAssignmentOperator( std::string & str, + std::pair const & structData, + std::string const & prefix ) const { // we need an assignment operator if there is constant sType in this struct std::string copyTemplate; - if ((nonConstSTypeStructs.find(structData.first) == nonConstSTypeStructs.end()) && !structData.second.members.empty() && (structData.second.members.front().name == "sType")) + if ( ( nonConstSTypeStructs.find( structData.first ) == nonConstSTypeStructs.end() ) && + !structData.second.members.empty() && ( structData.second.members.front().name == "sType" ) ) { - assert((2 <= structData.second.members.size()) && (structData.second.members[1].name == "pNext")); + assert( ( 2 <= structData.second.members.size() ) && ( structData.second.members[1].name == "pNext" ) ); static const std::string stringTemplate = R"( ${prefix}${structName} & operator=( ${structName} const & rhs ) VULKAN_HPP_NOEXCEPT @@ -2813,18 +3480,20 @@ ${prefix} memcpy( &pNext, &rhs.pNext, sizeof( ${structName} ) - offsetof( ${str ${prefix} return *this; ${prefix}} )"; - str += replaceWithMap(stringTemplate, { {"prefix", prefix }, { "structName", stripPrefix(structData.first, "Vk") } }); + str += replaceWithMap( stringTemplate, + { { "prefix", prefix }, { "structName", stripPrefix( structData.first, "Vk" ) } } ); } } -void VulkanHppGenerator::appendStructCompareOperators(std::string & str, std::pair const& structData) const +void VulkanHppGenerator::appendStructCompareOperators( std::string & str, + std::pair const & structData ) const { // two structs are compared by comparing each of the elements std::string compareMembers; std::string intro = ""; - for (size_t i = 0; i < structData.second.members.size(); i++) + for ( size_t i = 0; i < structData.second.members.size(); i++ ) { - MemberData const& member = structData.second.members[i]; + MemberData const & member = structData.second.members[i]; compareMembers += intro + "( " + member.name + " == rhs." + member.name + " )"; intro = "\n && "; } @@ -2845,63 +3514,73 @@ void VulkanHppGenerator::appendStructCompareOperators(std::string & str, std::pa #endif )"; - str += replaceWithMap(compareTemplate, { { "name", stripPrefix(structData.first, "Vk") }, { "compareMembers", compareMembers } }); + str += replaceWithMap( compareTemplate, + { { "name", stripPrefix( structData.first, "Vk" ) }, { "compareMembers", compareMembers } } ); } -std::string VulkanHppGenerator::constructConstexprString(std::pair const& structData) const +std::string + VulkanHppGenerator::constructConstexprString( std::pair const & structData ) const { // structs with a union (and VkBaseInStructure and VkBaseOutStructure) can't be a constexpr! - bool isConstExpression = !containsUnion(structData.first) && (structData.first != "VkBaseInStructure") && (structData.first != "VkBaseOutStructure"); - return isConstExpression ? (std::string("VULKAN_HPP_CONSTEXPR") + (containsArray(structData.first) ? "_14 " : " ")) : ""; + bool isConstExpression = !containsUnion( structData.first ) && ( structData.first != "VkBaseInStructure" ) && + ( structData.first != "VkBaseOutStructure" ); + return isConstExpression + ? ( std::string( "VULKAN_HPP_CONSTEXPR" ) + ( containsArray( structData.first ) ? "_14 " : " " ) ) + : ""; } -void VulkanHppGenerator::appendStructConstructor(std::string &str, std::pair const& structData, std::string const& prefix) const +void VulkanHppGenerator::appendStructConstructor( std::string & str, + std::pair const & structData, + std::string const & prefix ) const { // the constructor with all the elements as arguments, with defaults - std::string constexprString = constructConstexprString(structData); - std::string ctorOpening = prefix + constexprString + stripPrefix(structData.first, "Vk"); - std::string indentation(ctorOpening.size() + 2, ' '); + std::string constexprString = constructConstexprString( structData ); + std::string ctorOpening = prefix + constexprString + stripPrefix( structData.first, "Vk" ); + std::string indentation( ctorOpening.size() + 2, ' ' ); std::string arguments, initializers; - bool listedArgument = false; - bool firstArgument = true; - for (auto const& member : structData.second.members) + bool listedArgument = false; + bool firstArgument = true; + for ( auto const & member : structData.second.members ) { // gather the arguments - listedArgument = appendStructConstructorArgument(arguments, listedArgument, indentation, member); + listedArgument = appendStructConstructorArgument( arguments, listedArgument, indentation, member ); // gather the initializers; skip members 'pNext' and 'sType', they are directly set by initializers - if ((member.name != "pNext") && (member.name != "sType")) + if ( ( member.name != "pNext" ) && ( member.name != "sType" ) ) { - initializers += prefix + " " + (firstArgument ? ":" : ",") + " " + member.name + "( " + member.name + "_ )\n"; + initializers += prefix + " " + ( firstArgument ? ":" : "," ) + " " + member.name + "( " + member.name + "_ )\n"; firstArgument = false; } } - str += ctorOpening + (arguments.empty() ? "()" : std::string("( " + arguments + " )")) + " VULKAN_HPP_NOEXCEPT\n" + initializers - + prefix + "{}\n"; + str += ctorOpening + ( arguments.empty() ? "()" : std::string( "( " + arguments + " )" ) ) + + " VULKAN_HPP_NOEXCEPT\n" + initializers + prefix + "{}\n"; } -bool VulkanHppGenerator::appendStructConstructorArgument(std::string & str, bool listedArgument, std::string const& indentation, MemberData const& memberData) const +bool VulkanHppGenerator::appendStructConstructorArgument( std::string & str, + bool listedArgument, + std::string const & indentation, + MemberData const & memberData ) const { // skip members 'pNext' and 'sType', as they are never explicitly set - if ((memberData.name != "pNext") && (memberData.name != "sType")) + if ( ( memberData.name != "pNext" ) && ( memberData.name != "sType" ) ) { - str += (listedArgument ? (",\n" + indentation) : ""); - if (memberData.arraySizes.empty()) + str += ( listedArgument ? ( ",\n" + indentation ) : "" ); + if ( memberData.arraySizes.empty() ) { str += memberData.type.compose() + " "; } else { - str += constructStandardArray(memberData.type.compose(), memberData.arraySizes) + " const& "; + str += constructStandardArray( memberData.type.compose(), memberData.arraySizes ) + " const& "; } str += memberData.name + "_ = "; - auto enumIt = m_enums.find(memberData.type.type); - if (enumIt != m_enums.end() && memberData.type.postfix.empty()) + auto enumIt = m_enums.find( memberData.type.type ); + if ( enumIt != m_enums.end() && memberData.type.postfix.empty() ) { - appendEnumInitializer(str, memberData.type, memberData.arraySizes, enumIt->second.values, true); + appendEnumInitializer( str, memberData.type, memberData.arraySizes, enumIt->second.values, true ); } else { @@ -2913,7 +3592,7 @@ bool VulkanHppGenerator::appendStructConstructorArgument(std::string & str, bool return listedArgument; } -void VulkanHppGenerator::appendStructCopyConstructors(std::string & str, std::string const& name) const +void VulkanHppGenerator::appendStructCopyConstructors( std::string & str, std::string const & name ) const { static const std::string templateString = R"( ${name}( Vk${name} const & rhs ) VULKAN_HPP_NOEXCEPT @@ -2928,42 +3607,50 @@ void VulkanHppGenerator::appendStructCopyConstructors(std::string & str, std::st } )"; - str += replaceWithMap(templateString, { { "name", name } }); + str += replaceWithMap( templateString, { { "name", name } } ); } -void VulkanHppGenerator::appendStructMembers(std::string & str, std::pair const& structData, std::string const& prefix) const +void VulkanHppGenerator::appendStructMembers( std::string & str, + std::pair const & structData, + std::string const & prefix ) const { - for (auto const& member : structData.second.members) + for ( auto const & member : structData.second.members ) { str += prefix; - if ((member.name == "sType") && (nonConstSTypeStructs.find(structData.first) == nonConstSTypeStructs.end())) // special handling for sType and some nasty little structs that don't get a const sType + if ( ( member.name == "sType" ) && + ( nonConstSTypeStructs.find( structData.first ) == + nonConstSTypeStructs + .end() ) ) // special handling for sType and some nasty little structs that don't get a const sType { str += "const "; } - if (!member.bitCount.empty() && beginsWith(member.type.type, "Vk")) + if ( !member.bitCount.empty() && beginsWith( member.type.type, "Vk" ) ) { - assert(member.type.prefix.empty() && member.type.postfix.empty()); // never encounterd a different case + assert( member.type.prefix.empty() && member.type.postfix.empty() ); // never encounterd a different case str += member.type.type; } - else if (member.arraySizes.empty()) + else if ( member.arraySizes.empty() ) { str += member.type.compose(); } else { - assert(member.type.prefix.empty() && member.type.postfix.empty()); - str += constructStandardArrayWrapper(member.type.compose(), member.arraySizes); + assert( member.type.prefix.empty() && member.type.postfix.empty() ); + str += constructStandardArrayWrapper( member.type.compose(), member.arraySizes ); } str += " " + member.name; - if (member.name == "sType") // special handling for sType + if ( member.name == "sType" ) // special handling for sType { - auto enumIt = m_enums.find("VkStructureType"); - assert(enumIt != m_enums.end()); - if (!member.values.empty()) + auto enumIt = m_enums.find( "VkStructureType" ); + assert( enumIt != m_enums.end() ); + if ( !member.values.empty() ) { - assert(beginsWith(member.values, "VK_STRUCTURE_TYPE")); - auto nameIt = std::find_if(enumIt->second.values.begin(), enumIt->second.values.end(), [&member](EnumValueData const& evd) { return member.values == evd.vulkanValue; }); - assert(nameIt != enumIt->second.values.end()); + assert( beginsWith( member.values, "VK_STRUCTURE_TYPE" ) ); + auto nameIt = + std::find_if( enumIt->second.values.begin(), + enumIt->second.values.end(), + [&member]( EnumValueData const & evd ) { return member.values == evd.vulkanValue; } ); + assert( nameIt != enumIt->second.values.end() ); str += " = StructureType::" + nameIt->vkValue; } else @@ -2975,18 +3662,19 @@ void VulkanHppGenerator::appendStructMembers(std::string & str, std::pairsecond.values, false); + appendEnumInitializer( str, member.type, member.arraySizes, enumIt->second.values, false ); } else { @@ -2998,18 +3686,18 @@ void VulkanHppGenerator::appendStructMembers(std::string & str, std::pair listedStructures; - for (auto const& structure : m_structures) + for ( auto const & structure : m_structures ) { - appendStruct(str, structure, listedStructures); + appendStruct( str, structure, listedStructures ); } } void VulkanHppGenerator::appendStructSetter(std::string & str, std::string const& structureName, bool isUnion, MemberData const& memberData) const { - if (memberData.type.type != "VkStructureType") // filter out StructureType, which is supposed to be immutable ! + if ( memberData.type.type != "VkStructureType" ) // filter out StructureType, which is supposed to be immutable ! { static const std::string templateString = R"( ${structureName} & set${MemberName}( ${memberType} ${reference}${memberName}_ ) VULKAN_HPP_NOEXCEPT @@ -3021,7 +3709,7 @@ void VulkanHppGenerator::appendStructSetter(std::string & str, std::string const std::string memberType = memberData.arraySizes.empty() ? memberData.type.compose() : constructStandardArray(memberData.type.compose(), memberData.arraySizes); std::string assignment; - if (!memberData.bitCount.empty() && beginsWith(memberData.type.type, "Vk")) + if ( !memberData.bitCount.empty() && beginsWith( memberData.type.type, "Vk" ) ) { assignment = memberData.name + " = " + "*reinterpret_cast<" + memberData.type.type + "*>(&" + memberData.name + "_)"; } @@ -3046,57 +3734,64 @@ void VulkanHppGenerator::appendStructSetter(std::string & str, std::string const } } -void VulkanHppGenerator::appendStructSubConstructor(std::string &str, std::pair const& structData, std::string const& prefix) const +void VulkanHppGenerator::appendStructSubConstructor( std::string & str, + std::pair const & structData, + std::string const & prefix ) const { - if (!structData.second.subStruct.empty()) + if ( !structData.second.subStruct.empty() ) { - auto const& subStruct = m_structures.find(structData.second.subStruct); - assert(subStruct != m_structures.end()); + auto const & subStruct = m_structures.find( structData.second.subStruct ); + assert( subStruct != m_structures.end() ); - std::string subStructArgumentName = startLowerCase(stripPrefix(subStruct->first, "Vk")); - std::string ctorOpening = prefix + "explicit " + stripPrefix(structData.first, "Vk") + "( "; - std::string indentation = std::string(ctorOpening.size(), ' '); + std::string subStructArgumentName = startLowerCase( stripPrefix( subStruct->first, "Vk" ) ); + std::string ctorOpening = prefix + "explicit " + stripPrefix( structData.first, "Vk" ) + "( "; + std::string indentation = std::string( ctorOpening.size(), ' ' ); std::string subCopies; - bool firstArgument = true; - for (size_t i = 0; i < subStruct->second.members.size(); i++) + bool firstArgument = true; + for ( size_t i = 0; i < subStruct->second.members.size(); i++ ) { - assert(structData.second.members[i].arraySizes.empty()); - subCopies += prefix + " " + (firstArgument ? ":" : ",") + " " + structData.second.members[i].name + "( " + subStructArgumentName + "." + subStruct->second.members[i].name + " )\n"; + assert( structData.second.members[i].arraySizes.empty() ); + subCopies += prefix + " " + ( firstArgument ? ":" : "," ) + " " + structData.second.members[i].name + "( " + + subStructArgumentName + "." + subStruct->second.members[i].name + " )\n"; firstArgument = false; } std::string subArguments; - bool listedArgument = true; - for (size_t i = subStruct->second.members.size(); i < structData.second.members.size(); i++) + bool listedArgument = true; + for ( size_t i = subStruct->second.members.size(); i < structData.second.members.size(); i++ ) { - listedArgument = appendStructConstructorArgument(subArguments, listedArgument, indentation, structData.second.members[i]); + listedArgument = + appendStructConstructorArgument( subArguments, listedArgument, indentation, structData.second.members[i] ); - assert(structData.second.members[i].arraySizes.empty()); - subCopies += prefix + " , " + structData.second.members[i].name + "( " + structData.second.members[i].name + "_ )\n"; + assert( structData.second.members[i].arraySizes.empty() ); + subCopies += + prefix + " , " + structData.second.members[i].name + "( " + structData.second.members[i].name + "_ )\n"; } - str += "\n" - " explicit " + stripPrefix(structData.first, "Vk") + "( " + stripPrefix(subStruct->first, "Vk") + " const& " + subStructArgumentName + subArguments + " )\n" - + subCopies + - " {}\n"; + str += + "\n" + " explicit " + + stripPrefix( structData.first, "Vk" ) + "( " + stripPrefix( subStruct->first, "Vk" ) + " const& " + + subStructArgumentName + subArguments + " )\n" + subCopies + " {}\n"; } } -void VulkanHppGenerator::appendStructure(std::string & str, std::pair const& structure) const +void VulkanHppGenerator::appendStructure( std::string & str, + std::pair const & structure ) const { str += "\n"; - appendPlatformEnter(str, !structure.second.aliases.empty(), structure.second.platform); + appendPlatformEnter( str, !structure.second.aliases.empty(), structure.second.platform ); std::string constructorAndSetters; - appendStructConstructor(constructorAndSetters, structure, " "); - appendStructSubConstructor(constructorAndSetters, structure, " "); - appendStructAssignmentOperator(constructorAndSetters, structure, " "); - appendStructCopyConstructors(constructorAndSetters, stripPrefix(structure.first, "Vk")); - if (!structure.second.returnedOnly) + appendStructConstructor( constructorAndSetters, structure, " " ); + appendStructSubConstructor( constructorAndSetters, structure, " " ); + appendStructAssignmentOperator( constructorAndSetters, structure, " " ); + appendStructCopyConstructors( constructorAndSetters, stripPrefix( structure.first, "Vk" ) ); + if ( !structure.second.returnedOnly ) { // only structs that are not returnedOnly get setters! - for (auto const& member : structure.second.members) + for ( auto const & member : structure.second.members ) { appendStructSetter(constructorAndSetters, stripPrefix(structure.first, "Vk"), false, member); } @@ -3105,14 +3800,14 @@ void VulkanHppGenerator::appendStructure(std::string & str, std::pair::value, "struct wrapper is not a standard layout!" ); )"; - str += replaceWithMap(structureTemplate, - { - { "name", stripPrefix(structure.first, "Vk") }, - { "constructorAndSetters", constructorAndSetters }, - { "vkName", structure.first }, - { "compareOperators", compareOperators }, - { "members", members } - }); - appendPlatformLeave(str, !structure.second.aliases.empty(), structure.second.platform); + str += replaceWithMap( structureTemplate, + { { "name", stripPrefix( structure.first, "Vk" ) }, + { "constructorAndSetters", constructorAndSetters }, + { "vkName", structure.first }, + { "compareOperators", compareOperators }, + { "members", members } } ); + appendPlatformLeave( str, !structure.second.aliases.empty(), structure.second.platform ); } -void VulkanHppGenerator::appendStructureChainValidation(std::string & str) +void VulkanHppGenerator::appendStructureChainValidation( std::string & str ) { // append all template functions for the structure pointer chain validation - for (auto const& structure : m_structures) + for ( auto const & structure : m_structures ) { - if (!structure.second.structExtends.empty()) + if ( !structure.second.structExtends.empty() ) { - appendPlatformEnter(str, !structure.second.aliases.empty(), structure.second.platform); + appendPlatformEnter( str, !structure.second.aliases.empty(), structure.second.platform ); // append out allowed structure chains - for (auto extendName : structure.second.structExtends) + for ( auto extendName : structure.second.structExtends ) { - std::map::const_iterator itExtend = m_structures.find(extendName); - if (itExtend == m_structures.end()) + std::map::const_iterator itExtend = m_structures.find( extendName ); + if ( itExtend == m_structures.end() ) { // look if the extendName acutally is an alias of some other structure - itExtend = std::find_if(m_structures.begin(), m_structures.end(), [extendName](auto const& sd) { return sd.second.aliases.find(extendName) != sd.second.aliases.end(); }); + itExtend = std::find_if( m_structures.begin(), m_structures.end(), [extendName]( auto const & sd ) { + return sd.second.aliases.find( extendName ) != sd.second.aliases.end(); + } ); } - if (itExtend == m_structures.end()) + if ( itExtend == m_structures.end() ) { std::string errorString; errorString = "<" + extendName + "> does not specify a struct in structextends field."; // check if symbol name is an alias to a struct - auto itAlias = std::find_if(m_structures.begin(), m_structures.end(), [&extendName](std::pair const &it) -> bool {return std::find(it.second.aliases.begin(), it.second.aliases.end(), extendName) != it.second.aliases.end(); }); - if (itAlias != m_structures.end()) + auto itAlias = + std::find_if( m_structures.begin(), + m_structures.end(), + [&extendName]( std::pair const & it ) -> bool { + return std::find( it.second.aliases.begin(), it.second.aliases.end(), extendName ) != + it.second.aliases.end(); + } ); + if ( itAlias != m_structures.end() ) { errorString += " The symbol is an alias and maps to <" + itAlias->first + ">."; } - check(false, structure.second.xmlLine, errorString); + check( false, structure.second.xmlLine, errorString ); } - if (structure.second.platform != itExtend->second.platform) + if ( structure.second.platform != itExtend->second.platform ) { - appendPlatformEnter(str, !itExtend->second.aliases.empty(), itExtend->second.platform); + appendPlatformEnter( str, !itExtend->second.aliases.empty(), itExtend->second.platform ); } - str += " template <> struct isStructureChainValid<" + stripPrefix(extendName, "Vk") + ", " + stripPrefix(structure.first, "Vk") + ">{ enum { value = true }; };\n"; + str += " template <> struct isStructureChainValid<" + stripPrefix( extendName, "Vk" ) + ", " + + stripPrefix( structure.first, "Vk" ) + ">{ enum { value = true }; };\n"; - if (structure.second.platform != itExtend->second.platform) + if ( structure.second.platform != itExtend->second.platform ) { - appendPlatformLeave(str, !itExtend->second.aliases.empty(), itExtend->second.platform); + appendPlatformLeave( str, !itExtend->second.aliases.empty(), itExtend->second.platform ); } } - appendPlatformLeave(str, !structure.second.aliases.empty(), structure.second.platform); + appendPlatformLeave( str, !structure.second.aliases.empty(), structure.second.platform ); } } } -void VulkanHppGenerator::appendThrowExceptions(std::string & str) const +void VulkanHppGenerator::appendThrowExceptions( std::string & str ) const { - auto enumData = m_enums.find("VkResult"); + auto enumData = m_enums.find( "VkResult" ); - str += "\n" + str += + "\n" " [[noreturn]] static void throwResultException( Result result, char const * message )\n" " {\n" " switch ( result )\n" " {\n"; - for (auto const& value : enumData->second.values) + for ( auto const & value : enumData->second.values ) { - if (beginsWith(value.vkValue, "eError")) + if ( beginsWith( value.vkValue, "eError" ) ) { - str += " case Result::" + value.vkValue + ": throw " + stripPrefix(value.vkValue, "eError") + "Error( message );\n"; + str += " case Result::" + value.vkValue + ": throw " + stripPrefix( value.vkValue, "eError" ) + + "Error( message );\n"; } } - str += " default: throw SystemError( make_error_code( result ) );\n" + str += + " default: throw SystemError( make_error_code( result ) );\n" " }\n" " }\n"; } -void VulkanHppGenerator::appendUnion(std::string & str, std::pair const& structure) const +void VulkanHppGenerator::appendUnion( std::string & str, std::pair const & structure ) const { str += "\n"; - appendPlatformEnter(str, !structure.second.aliases.empty(), structure.second.platform); - std::string unionName = stripPrefix(structure.first, "Vk"); - str += - " union " + unionName + "\n" - " {\n" - " " + unionName + "( VULKAN_HPP_NAMESPACE::" + unionName + " const& rhs ) VULKAN_HPP_NOEXCEPT\n" - " {\n" - " memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::" + unionName + " ) );\n" - " }\n"; + appendPlatformEnter( str, !structure.second.aliases.empty(), structure.second.platform ); + std::string unionName = stripPrefix( structure.first, "Vk" ); + str += " union " + unionName + + "\n" + " {\n" + " " + + unionName + "( VULKAN_HPP_NAMESPACE::" + unionName + + " const& rhs ) VULKAN_HPP_NOEXCEPT\n" + " {\n" + " memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::" + + unionName + + " ) );\n" + " }\n"; bool firstMember = true; - for (auto const& member : structure.second.members) + for ( auto const & member : structure.second.members ) { - // VkBool32 is aliased to uint32_t. Don't create a VkBool32 constructor if the union also contains a uint32_t constructor. - auto compareBool32Alias = [](MemberData const& member) { return member.type.type == std::string("uint32_t"); }; - if (member.type.type == "VkBool32") { - if (std::find_if(structure.second.members.begin(), structure.second.members.end(), compareBool32Alias) != structure.second.members.end()) + // VkBool32 is aliased to uint32_t. Don't create a VkBool32 constructor if the union also contains a uint32_t + // constructor. + auto compareBool32Alias = []( MemberData const & member ) { + return member.type.type == std::string( "uint32_t" ); + }; + if ( member.type.type == "VkBool32" ) + { + if ( std::find_if( structure.second.members.begin(), structure.second.members.end(), compareBool32Alias ) != + structure.second.members.end() ) { continue; } @@ -3259,7 +3973,7 @@ void VulkanHppGenerator::appendUnion(std::string & str, std::pair const& childrenTypes) const +void VulkanHppGenerator::appendUniqueTypes( std::string & str, + std::string const & parentType, + std::set const & childrenTypes ) const { - str += "\n" + str += + "\n" "#ifndef VULKAN_HPP_NO_SMART_HANDLE\n"; - if (!parentType.empty()) + if ( !parentType.empty() ) { - str += " class " + stripPrefix(parentType, "Vk") + ";\n"; + str += " class " + stripPrefix( parentType, "Vk" ) + ";\n"; } - for (auto const& childType : childrenTypes) + for ( auto const & childType : childrenTypes ) { - auto handleIt = m_handles.find(childType); - assert(handleIt != m_handles.end()); + auto handleIt = m_handles.find( childType ); + assert( handleIt != m_handles.end() ); - std::string type = stripPrefix(childType, "Vk"); - std::string deleterType = handleIt->second.deletePool.empty() ? "Object" : "Pool"; - std::string deleterAction = (handleIt->second.deleteCommand.substr(2, 4) == "Free") ? "Free" : "Destroy"; - std::string deleterParent = parentType.empty() ? "NoParent" : stripPrefix(parentType, "Vk"); - std::string deleterPool = handleIt->second.deletePool.empty() ? "" : ", " + stripPrefix(handleIt->second.deletePool, "Vk"); - appendPlatformEnter(str, !handleIt->second.alias.empty(), handleIt->second.platform); - str += " template class UniqueHandleTraits<" + type + ", Dispatch> { public: using deleter = " + deleterType + deleterAction + "<" + deleterParent + deleterPool + ", Dispatch>; };\n" - " using Unique" + type + " = UniqueHandle<" + type + ", VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;\n"; + std::string type = stripPrefix( childType, "Vk" ); + std::string deleterType = handleIt->second.deletePool.empty() ? "Object" : "Pool"; + std::string deleterAction = ( handleIt->second.deleteCommand.substr( 2, 4 ) == "Free" ) ? "Free" : "Destroy"; + std::string deleterParent = parentType.empty() ? "NoParent" : stripPrefix( parentType, "Vk" ); + std::string deleterPool = + handleIt->second.deletePool.empty() ? "" : ", " + stripPrefix( handleIt->second.deletePool, "Vk" ); + appendPlatformEnter( str, !handleIt->second.alias.empty(), handleIt->second.platform ); + str += " template class UniqueHandleTraits<" + type + + ", Dispatch> { public: using deleter = " + deleterType + deleterAction + "<" + deleterParent + deleterPool + + ", Dispatch>; };\n" + " using Unique" + + type + " = UniqueHandle<" + type + ", VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;\n"; - if (!handleIt->second.alias.empty()) + if ( !handleIt->second.alias.empty() ) { - str += " using Unique" + stripPrefix(handleIt->second.alias, "Vk") + " = UniqueHandle<" + type + ", VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;\n"; + str += " using Unique" + stripPrefix( handleIt->second.alias, "Vk" ) + " = UniqueHandle<" + type + + ", VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;\n"; } - appendPlatformLeave(str, !handleIt->second.alias.empty(), handleIt->second.platform); + appendPlatformLeave( str, !handleIt->second.alias.empty(), handleIt->second.platform ); } str += "#endif /*VULKAN_HPP_NO_SMART_HANDLE*/\n"; } -void VulkanHppGenerator::EnumData::addEnumValue(int line, std::string const &valueName, bool bitmask, bool bitpos, std::string const& prefix, std::string const& postfix, std::string const& tag) +void VulkanHppGenerator::EnumData::addEnumValue( int line, + std::string const & valueName, + bool bitmask, + bool bitpos, + std::string const & prefix, + std::string const & postfix, + std::string const & tag ) { - std::string translatedName = createEnumValueName(valueName, prefix, postfix, bitmask, tag); + std::string translatedName = createEnumValueName( valueName, prefix, postfix, bitmask, tag ); - auto it = std::find_if(values.begin(), values.end(), [&translatedName](EnumValueData const& evd) { return evd.vkValue == translatedName; }); - if (it == values.end()) + auto it = std::find_if( values.begin(), values.end(), [&translatedName]( EnumValueData const & evd ) { + return evd.vkValue == translatedName; + } ); + if ( it == values.end() ) { - values.push_back(EnumValueData(valueName, translatedName, bitpos)); + values.push_back( EnumValueData( valueName, translatedName, bitpos ) ); } else { - check(it->vulkanValue == valueName, line, "enum value <" + valueName + "> maps to same C++-name as <" + it->vulkanValue + ">"); + check( it->vulkanValue == valueName, + line, + "enum value <" + valueName + "> maps to same C++-name as <" + it->vulkanValue + ">" ); } } void VulkanHppGenerator::checkCorrectness() { - check(!m_vulkanLicenseHeader.empty(), -1, "missing license header"); - for (auto const& baseType : m_baseTypes) + check( !m_vulkanLicenseHeader.empty(), -1, "missing license header" ); + for ( auto const & baseType : m_baseTypes ) { - check(m_types.find(baseType.second.type) != m_types.end(), baseType.second.xmlLine, "basetype type <" + baseType.second.type + "> not specified"); + check( m_types.find( baseType.second.type ) != m_types.end(), + baseType.second.xmlLine, + "basetype type <" + baseType.second.type + "> not specified" ); } - for (auto const& bitmask : m_bitmasks) + for ( auto const & bitmask : m_bitmasks ) { - if (!bitmask.second.requirements.empty()) + if ( !bitmask.second.requirements.empty() ) { - check(m_enums.find(bitmask.second.requirements) != m_enums.end(), bitmask.second.xmlLine, "bitmask requires unknown <" + bitmask.second.requirements + ">"); + check( m_enums.find( bitmask.second.requirements ) != m_enums.end(), + bitmask.second.xmlLine, + "bitmask requires unknown <" + bitmask.second.requirements + ">" ); } } - for (auto const& extension : m_extensions) + for ( auto const & extension : m_extensions ) { - if (!extension.second.deprecatedBy.empty()) + if ( !extension.second.deprecatedBy.empty() ) { - check((m_extensions.find(extension.second.deprecatedBy) != m_extensions.end()) || (m_features.find(extension.second.deprecatedBy) != m_features.end()) - , extension.second.xmlLine, "extension deprecated by to unknown extension/version <" + extension.second.promotedTo + ">"); + check( ( m_extensions.find( extension.second.deprecatedBy ) != m_extensions.end() ) || + ( m_features.find( extension.second.deprecatedBy ) != m_features.end() ), + extension.second.xmlLine, + "extension deprecated by to unknown extension/version <" + extension.second.promotedTo + ">" ); } - if (!extension.second.obsoletedBy.empty()) + if ( !extension.second.obsoletedBy.empty() ) { - check((m_extensions.find(extension.second.obsoletedBy) != m_extensions.end()) || (m_features.find(extension.second.obsoletedBy) != m_features.end()) - , extension.second.xmlLine, "extension obsoleted by unknown extension/version <" + extension.second.promotedTo + ">"); + check( ( m_extensions.find( extension.second.obsoletedBy ) != m_extensions.end() ) || + ( m_features.find( extension.second.obsoletedBy ) != m_features.end() ), + extension.second.xmlLine, + "extension obsoleted by unknown extension/version <" + extension.second.promotedTo + ">" ); } - if (!extension.second.promotedTo.empty()) + if ( !extension.second.promotedTo.empty() ) { - check((m_extensions.find(extension.second.promotedTo) != m_extensions.end()) || (m_features.find(extension.second.promotedTo) != m_features.end()) - , extension.second.xmlLine, "extension promoted to unknown extension/version <" + extension.second.promotedTo + ">"); + check( ( m_extensions.find( extension.second.promotedTo ) != m_extensions.end() ) || + ( m_features.find( extension.second.promotedTo ) != m_features.end() ), + extension.second.xmlLine, + "extension promoted to unknown extension/version <" + extension.second.promotedTo + ">" ); } - for (auto const& require : extension.second.requirements) + for ( auto const & require : extension.second.requirements ) { - check(m_extensions.find(require.first) != m_extensions.end(), require.second, "unknown extension requires <" + require.first + ">"); + check( m_extensions.find( require.first ) != m_extensions.end(), + require.second, + "unknown extension requires <" + require.first + ">" ); } } - for (auto const& funcPointer : m_funcPointers) + for ( auto const & funcPointer : m_funcPointers ) { - if (!funcPointer.second.requirements.empty()) + if ( !funcPointer.second.requirements.empty() ) { - check(m_types.find(funcPointer.second.requirements) != m_types.end(), funcPointer.second.xmlLine, "funcpointer requires unknown <" + funcPointer.second.requirements + ">"); + check( m_types.find( funcPointer.second.requirements ) != m_types.end(), + funcPointer.second.xmlLine, + "funcpointer requires unknown <" + funcPointer.second.requirements + ">" ); } } - auto structureTypeIt = m_enums.find("VkStructureType"); - assert(structureTypeIt != m_enums.end()); - for (auto const& structure : m_structures) + auto structureTypeIt = m_enums.find( "VkStructureType" ); + assert( structureTypeIt != m_enums.end() ); + for ( auto const & structure : m_structures ) { - for (auto const& extend : structure.second.structExtends) + for ( auto const & extend : structure.second.structExtends ) { - check(m_types.find(extend) != m_types.end(), structure.second.xmlLine, "struct extends unknown <" + extend + ">"); + check( + m_types.find( extend ) != m_types.end(), structure.second.xmlLine, "struct extends unknown <" + extend + ">" ); } - for (auto const& member : structure.second.members) + for ( auto const & member : structure.second.members ) { - if (!member.values.empty()) + if ( !member.values.empty() ) { - assert(member.name == "sType"); - check(std::find_if(structureTypeIt->second.values.begin(), structureTypeIt->second.values.end(), [&member](auto const& evd) { return member.values == evd.vulkanValue; }) != structureTypeIt->second.values.end(), - member.xmlLine, "sType value <" + member.values + "> not listed for VkStructureType"); + assert( member.name == "sType" ); + check( std::find_if( structureTypeIt->second.values.begin(), + structureTypeIt->second.values.end(), + [&member]( auto const & evd ) { return member.values == evd.vulkanValue; } ) != + structureTypeIt->second.values.end(), + member.xmlLine, + "sType value <" + member.values + "> not listed for VkStructureType" ); } - check(m_types.find(member.type.type) != m_types.end(), member.xmlLine, "struct member uses unknown type <" + member.type.type + ">"); - if (!member.usedConstant.empty()) + check( m_types.find( member.type.type ) != m_types.end(), + member.xmlLine, + "struct member uses unknown type <" + member.type.type + ">" ); + if ( !member.usedConstant.empty() ) { - check(m_constants.find(member.usedConstant) != m_constants.end(), member.xmlLine, "struct member array size uses unknown constant <" + member.usedConstant + ">"); + check( m_constants.find( member.usedConstant ) != m_constants.end(), + member.xmlLine, + "struct member array size uses unknown constant <" + member.usedConstant + ">" ); } } } - auto resultIt = m_enums.find("VkResult"); - assert(resultIt != m_enums.end()); + auto resultIt = m_enums.find( "VkResult" ); + assert( resultIt != m_enums.end() ); std::set resultCodes; - for (auto rc : resultIt->second.values) + for ( auto rc : resultIt->second.values ) { - resultCodes.insert(rc.vulkanValue); + resultCodes.insert( rc.vulkanValue ); } - for (auto rc : resultIt->second.aliases) + for ( auto rc : resultIt->second.aliases ) { - resultCodes.insert(rc.first); + resultCodes.insert( rc.first ); } - for (auto const& handle : m_handles) + for ( auto const & handle : m_handles ) { - for (auto const& parent : handle.second.parents) + for ( auto const & parent : handle.second.parents ) { - check(m_handles.find(parent) != m_handles.end(), handle.second.xmlLine, "handle with unknown parent <" + parent + ">"); + check( m_handles.find( parent ) != m_handles.end(), + handle.second.xmlLine, + "handle with unknown parent <" + parent + ">" ); } - for (auto const& command : handle.second.commands) + for ( auto const & command : handle.second.commands ) { - for (auto const& ec : command.second.errorCodes) + for ( auto const & ec : command.second.errorCodes ) { - check(resultCodes.find(ec) != resultCodes.end(), command.second.xmlLine, "command uses unknown error code <" + ec + ">"); + check( resultCodes.find( ec ) != resultCodes.end(), + command.second.xmlLine, + "command uses unknown error code <" + ec + ">" ); } - for (auto const& sc : command.second.successCodes) + for ( auto const & sc : command.second.successCodes ) { - check(resultCodes.find(sc) != resultCodes.end(), command.second.xmlLine, "command uses unknown success code <" + sc + ">"); + check( resultCodes.find( sc ) != resultCodes.end(), + command.second.xmlLine, + "command uses unknown success code <" + sc + ">" ); } // check that functions returning a VkResult specify successcodes - check((command.second.returnType != "VkResult") || !command.second.successCodes.empty(), command.second.xmlLine, "missing successcodes on command <" + command.first + "> returning VkResult!"); + check( ( command.second.returnType != "VkResult" ) || !command.second.successCodes.empty(), + command.second.xmlLine, + "missing successcodes on command <" + command.first + "> returning VkResult!" ); - for (auto const& p : command.second.params) + for ( auto const & p : command.second.params ) { - check(m_types.find(p.type.type) != m_types.end(), p.xmlLine, "comand uses parameter of unknown type <" + p.type.type + ">"); + check( m_types.find( p.type.type ) != m_types.end(), + p.xmlLine, + "comand uses parameter of unknown type <" + p.type.type + ">" ); } - check(m_types.find(command.second.returnType) != m_types.end(), command.second.xmlLine, "command uses unknown return type <" + command.second.returnType + ">"); + check( m_types.find( command.second.returnType ) != m_types.end(), + command.second.xmlLine, + "command uses unknown return type <" + command.second.returnType + ">" ); } } } -bool VulkanHppGenerator::checkLenAttribute(std::string const& len, std::vector const& params) +bool VulkanHppGenerator::checkLenAttribute( std::string const & len, std::vector const & params ) { // simple: "null-terminated" or previously encountered parameter - if ((len == "null-terminated") || (std::find_if(params.begin(), params.end(), [&len](ParamData const& pd) { return pd.name == len; }) != params.end())) + if ( ( len == "null-terminated" ) || ( std::find_if( params.begin(), params.end(), [&len]( ParamData const & pd ) { + return pd.name == len; + } ) != params.end() ) ) { return true; } // check if len specifies a member of a struct - std::vector lenParts = tokenize(len, "->"); - if (lenParts.size() == 1) + std::vector lenParts = tokenize( len, "->" ); + if ( lenParts.size() == 1 ) { // older versions of vk.xml used the notation parameter::member - lenParts = tokenize(len, "::"); + lenParts = tokenize( len, "::" ); } - if (lenParts.size() == 2) + if ( lenParts.size() == 2 ) { - auto paramIt = std::find_if(params.begin(), params.end(), [&l = lenParts[0]](ParamData const& pd){ return pd.name == l; }); - if (paramIt != params.end()) + auto paramIt = + std::find_if( params.begin(), params.end(), [&l = lenParts[0]]( ParamData const & pd ) { return pd.name == l; } ); + if ( paramIt != params.end() ) { - auto structureIt = m_structures.find(paramIt->type.type); - if ((structureIt != m_structures.end()) && (std::find_if(structureIt->second.members.begin(), structureIt->second.members.end(), [&n = lenParts[1]](MemberData const& md){ return md.name == n; }) != structureIt->second.members.end())) + auto structureIt = m_structures.find( paramIt->type.type ); + if ( ( structureIt != m_structures.end() ) && ( std::find_if( structureIt->second.members.begin(), + structureIt->second.members.end(), + [&n = lenParts[1]]( MemberData const & md ) { + return md.name == n; + } ) != structureIt->second.members.end() ) ) { return true; } @@ -3494,69 +4275,78 @@ bool VulkanHppGenerator::checkLenAttribute(std::string const& len, std::vector

second.members.begin(); memberIt != structureIt->second.members.end() && !found; ++memberIt) + for ( auto memberIt = structureIt->second.members.begin(); memberIt != structureIt->second.members.end() && !found; + ++memberIt ) { - found = !memberIt->arraySizes.empty() || containsArray(memberIt->type.type); + found = !memberIt->arraySizes.empty() || containsArray( memberIt->type.type ); } } return found; } -bool VulkanHppGenerator::containsUnion(std::string const& type) const +bool VulkanHppGenerator::containsUnion( std::string const & type ) const { // a simple recursive check if a type is or contains a union - auto structureIt = m_structures.find(type); - bool found = (structureIt != m_structures.end()); - if (found) + auto structureIt = m_structures.find( type ); + bool found = ( structureIt != m_structures.end() ); + if ( found ) { found = structureIt->second.isUnion; - for (auto memberIt = structureIt->second.members.begin(); memberIt != structureIt->second.members.end() && !found; ++memberIt) + for ( auto memberIt = structureIt->second.members.begin(); memberIt != structureIt->second.members.end() && !found; + ++memberIt ) { - found = memberIt->type.prefix.empty() && memberIt->type.postfix.empty() && containsUnion(memberIt->type.type); + found = memberIt->type.prefix.empty() && memberIt->type.postfix.empty() && containsUnion( memberIt->type.type ); } } return found; } -std::string VulkanHppGenerator::determineEnhancedReturnType(CommandData const& commandData, size_t returnParamIndex, std::map const& vectorParamIndices, bool isStructureChain) const +std::string VulkanHppGenerator::determineEnhancedReturnType( CommandData const & commandData, + size_t returnParamIndex, + std::map const & vectorParamIndices, + bool isStructureChain ) const { - assert((returnParamIndex == INVALID_INDEX) || (returnParamIndex < commandData.params.size())); - for (auto vpi : vectorParamIndices) + assert( ( returnParamIndex == INVALID_INDEX ) || ( returnParamIndex < commandData.params.size() ) ); + for ( auto vpi : vectorParamIndices ) { - assert((vpi.first != vpi.second) && (vpi.first < commandData.params.size()) && ((vpi.second == INVALID_INDEX) || (vpi.second < commandData.params.size()))); + assert( ( vpi.first != vpi.second ) && ( vpi.first < commandData.params.size() ) && + ( ( vpi.second == INVALID_INDEX ) || ( vpi.second < commandData.params.size() ) ) ); } std::string enhancedReturnType; - if (returnParamIndex != INVALID_INDEX) + if ( returnParamIndex != INVALID_INDEX ) { // if there is a return parameter, we think returnType is always "void" or "VkResult" // -> we can return that parameter - assert((commandData.returnType == "void") || (commandData.returnType == "VkResult")); - assert(commandData.successCodes.empty() || (commandData.successCodes[0] == "VK_SUCCESS")); - if (vectorParamIndices.find(returnParamIndex) != vectorParamIndices.end()) + assert( ( commandData.returnType == "void" ) || ( commandData.returnType == "VkResult" ) ); + assert( commandData.successCodes.empty() || ( commandData.successCodes[0] == "VK_SUCCESS" ) ); + if ( vectorParamIndices.find( returnParamIndex ) != vectorParamIndices.end() ) { - enhancedReturnType = (commandData.params[returnParamIndex].type.type == "void") - ? "std::vector" // the return parameter is a vector-type parameter - : isStructureChain - ? "std::vector" // for structureChain returns, it's just a vector of StrutureChains - : "std::vector<" + stripPrefix(commandData.params[returnParamIndex].type.type, "Vk") + ",Allocator>"; // for the other parameters, we use a vector of the pure type + enhancedReturnType = + ( commandData.params[returnParamIndex].type.type == "void" ) + ? "std::vector" // the return parameter is a vector-type parameter + : isStructureChain ? "std::vector" // for structureChain returns, it's just + // a vector of StrutureChains + : "std::vector<" + stripPrefix( commandData.params[returnParamIndex].type.type, "Vk" ) + + ",Allocator>"; // for the other parameters, we use a vector of the pure type } else { // it's a simple parameter -> get the type and just remove the trailing '*' (originally, it's a pointer) - assert(commandData.params[returnParamIndex].type.postfix.back() == '*'); - assert((commandData.params[returnParamIndex].type.prefix.find("const") == std::string::npos) && (commandData.params[returnParamIndex].type.postfix.find("const") == std::string::npos)); - enhancedReturnType = stripPostfix(commandData.params[returnParamIndex].type.compose(), "*"); + assert( commandData.params[returnParamIndex].type.postfix.back() == '*' ); + assert( ( commandData.params[returnParamIndex].type.prefix.find( "const" ) == std::string::npos ) && + ( commandData.params[returnParamIndex].type.postfix.find( "const" ) == std::string::npos ) ); + enhancedReturnType = stripPostfix( commandData.params[returnParamIndex].type.compose(), "*" ); } } - else if ((commandData.returnType == "VkResult") && (commandData.successCodes.size() == 1)) + else if ( ( commandData.returnType == "VkResult" ) && ( commandData.successCodes.size() == 1 ) ) { // an original return of type "Result" with just one successCode is changed to void, errors throw an exception enhancedReturnType = "void"; @@ -3564,39 +4354,51 @@ std::string VulkanHppGenerator::determineEnhancedReturnType(CommandData const& c else { // the return type just stays the original return type - enhancedReturnType = stripPrefix(commandData.returnType, "Vk"); + enhancedReturnType = stripPrefix( commandData.returnType, "Vk" ); } return enhancedReturnType; } -size_t VulkanHppGenerator::determineReturnParamIndex(CommandData const& commandData, std::map const& vectorParamIndices, bool twoStep) const +size_t VulkanHppGenerator::determineReturnParamIndex( CommandData const & commandData, + std::map const & vectorParamIndices, + bool twoStep ) const { - for (auto vpi : vectorParamIndices) + for ( auto vpi : vectorParamIndices ) { - assert((vpi.first != vpi.second) && (vpi.first < commandData.params.size()) && ((vpi.second == INVALID_INDEX) || (vpi.second < commandData.params.size()))); + assert( ( vpi.first != vpi.second ) && ( vpi.first < commandData.params.size() ) && + ( ( vpi.second == INVALID_INDEX ) || ( vpi.second < commandData.params.size() ) ) ); } size_t returnParamIndex = INVALID_INDEX; // for return types of type VkResult or void, we can determine a parameter to return - if ((commandData.returnType == "VkResult") || (commandData.returnType == "void")) + if ( ( commandData.returnType == "VkResult" ) || ( commandData.returnType == "void" ) ) { - for (size_t i = 0; i < commandData.params.size(); i++) + for ( size_t i = 0; i < commandData.params.size(); i++ ) { - if ((commandData.params[i].type.postfix.find('*') != std::string::npos) - && ((commandData.params[i].type.type != "void") || twoStep || (commandData.params[i].type.postfix.find("**") != std::string::npos)) - && (commandData.params[i].type.prefix.find("const") == std::string::npos) - && std::find_if(vectorParamIndices.begin(), vectorParamIndices.end(), [i](std::pair const& vpi) { return vpi.second == i; }) == vectorParamIndices.end()) + if ( ( commandData.params[i].type.postfix.find( '*' ) != std::string::npos ) && + ( ( commandData.params[i].type.type != "void" ) || twoStep || + ( commandData.params[i].type.postfix.find( "**" ) != std::string::npos ) ) && + ( commandData.params[i].type.prefix.find( "const" ) == std::string::npos ) && + std::find_if( + vectorParamIndices.begin(), vectorParamIndices.end(), [i]( std::pair const & vpi ) { + return vpi.second == i; + } ) == vectorParamIndices.end() ) { // it's a non-const pointer and not a vector-size parameter - std::map::const_iterator vpit = vectorParamIndices.find(i); - if ((vpit == vectorParamIndices.end()) || twoStep || (vectorParamIndices.size() > 1) || (vpit->second == INVALID_INDEX) || (commandData.params[vpit->second].type.postfix.find('*') != std::string::npos)) + std::map::const_iterator vpit = vectorParamIndices.find( i ); + if ( ( vpit == vectorParamIndices.end() ) || twoStep || ( vectorParamIndices.size() > 1 ) || + ( vpit->second == INVALID_INDEX ) || + ( commandData.params[vpit->second].type.postfix.find( '*' ) != std::string::npos ) ) { - // it's not a vector parameter, or a two-step process, or there is at least one more vector parameter, or the size argument of this vector parameter is not an argument, or the size argument of this vector parameter is provided by a pointer + // it's not a vector parameter, or a two-step process, or there is at least one more vector parameter, or the + // size argument of this vector parameter is not an argument, or the size argument of this vector parameter is + // provided by a pointer // -> look for another non-cost pointer argument - auto paramIt = std::find_if(commandData.params.begin() + i + 1, commandData.params.end(), [](ParamData const& pd) - { - return (pd.type.postfix.find('*') != std::string::npos) && (pd.type.postfix.find("const") == std::string::npos); - }); + auto paramIt = + std::find_if( commandData.params.begin() + i + 1, commandData.params.end(), []( ParamData const & pd ) { + return ( pd.type.postfix.find( '*' ) != std::string::npos ) && + ( pd.type.postfix.find( "const" ) == std::string::npos ); + } ); // if there is another such argument, we can't decide which one to return -> return INVALID_INDEX // otherwise return the index of the selcted parameter returnParamIndex = paramIt != commandData.params.end() ? INVALID_INDEX : i; @@ -3607,18 +4409,20 @@ size_t VulkanHppGenerator::determineReturnParamIndex(CommandData const& commandD return returnParamIndex; } -std::string VulkanHppGenerator::determineSubStruct(std::pair const& structure) const +std::string VulkanHppGenerator::determineSubStruct( std::pair const & structure ) const { - for (auto const& s : m_structures) + for ( auto const & s : m_structures ) { - if ((s.first != structure.first) && (s.second.members.size() < structure.second.members.size()) && (s.second.members[0].name != "sType")) + if ( ( s.first != structure.first ) && ( s.second.members.size() < structure.second.members.size() ) && + ( s.second.members[0].name != "sType" ) ) { bool equal = true; - for (size_t i = 0; i < s.second.members.size() && equal; i++) + for ( size_t i = 0; i < s.second.members.size() && equal; i++ ) { - equal = (s.second.members[i].type == structure.second.members[i].type) && (s.second.members[i].name == structure.second.members[i].name); + equal = ( s.second.members[i].type == structure.second.members[i].type ) && + ( s.second.members[i].name == structure.second.members[i].name ); } - if (equal) + if ( equal ) { return s.first; } @@ -3627,64 +4431,74 @@ std::string VulkanHppGenerator::determineSubStruct(std::pair const& params, std::map const& vectorParamIndices) const +size_t VulkanHppGenerator::determineTemplateParamIndex( std::vector const & params, + std::map const & vectorParamIndices ) const { size_t templateParamIndex = INVALID_INDEX; - for (size_t i = 0; i < params.size(); i++) + for ( size_t i = 0; i < params.size(); i++ ) { // any vector parameter on the pure type void is templatized in the enhanced API - if ((vectorParamIndices.find(i) != vectorParamIndices.end()) && (params[i].type.type == "void")) + if ( ( vectorParamIndices.find( i ) != vectorParamIndices.end() ) && ( params[i].type.type == "void" ) ) { -#if !defined(NDEBUG) - for (size_t j = i + 1; j < params.size(); j++) +#if !defined( NDEBUG ) + for ( size_t j = i + 1; j < params.size(); j++ ) { - assert((vectorParamIndices.find(j) == vectorParamIndices.end()) || (params[j].type.type != "void")); + assert( ( vectorParamIndices.find( j ) == vectorParamIndices.end() ) || ( params[j].type.type != "void" ) ); } #endif templateParamIndex = i; break; } } - assert((templateParamIndex == INVALID_INDEX) || (vectorParamIndices.find(templateParamIndex) != vectorParamIndices.end())); + assert( ( templateParamIndex == INVALID_INDEX ) || + ( vectorParamIndices.find( templateParamIndex ) != vectorParamIndices.end() ) ); return templateParamIndex; } -std::map VulkanHppGenerator::determineVectorParamIndices(std::vector const& params) const +std::map VulkanHppGenerator::determineVectorParamIndices( std::vector const & params ) const { std::map vectorParamIndices; // look for the parameters whose len equals the name of an other parameter - for (auto it = params.begin(); it != params.end(); ++it) + for ( auto it = params.begin(); it != params.end(); ++it ) { - if (!it->len.empty()) + if ( !it->len.empty() ) { - auto findLambda = [it](ParamData const& pd) { return pd.name == it->len; }; - auto findIt = std::find_if(params.begin(), it, findLambda); // look for a parameter named as the len of this parameter - assert((std::count_if(params.begin(), params.end(), findLambda) == 0) || (findIt < it)); // make sure, there is no other parameter like that + auto findLambda = [it]( ParamData const & pd ) { + return pd.name == it->len; + }; + auto findIt = + std::find_if( params.begin(), it, findLambda ); // look for a parameter named as the len of this parameter + assert( ( std::count_if( params.begin(), params.end(), findLambda ) == 0 ) || + ( findIt < it ) ); // make sure, there is no other parameter like that - // add this parameter as a vector parameter, using the len-name parameter as the second value (or INVALID_INDEX if there is nothing like that) - vectorParamIndices.insert(std::make_pair(std::distance(params.begin(), it), (findIt < it) ? std::distance(params.begin(), findIt) : INVALID_INDEX)); - assert((vectorParamIndices[std::distance(params.begin(), it)] != INVALID_INDEX) - || (it->len == "null-terminated") - || (it->len == "pAllocateInfo->descriptorSetCount") || (it->len == "pAllocateInfo::descriptorSetCount") - || (it->len == "pAllocateInfo->commandBufferCount") || (it->len == "pAllocateInfo::commandBufferCount")); + // add this parameter as a vector parameter, using the len-name parameter as the second value (or INVALID_INDEX if + // there is nothing like that) + vectorParamIndices.insert( + std::make_pair( std::distance( params.begin(), it ), + ( findIt < it ) ? std::distance( params.begin(), findIt ) : INVALID_INDEX ) ); + assert( ( vectorParamIndices[std::distance( params.begin(), it )] != INVALID_INDEX ) || + ( it->len == "null-terminated" ) || ( it->len == "pAllocateInfo->descriptorSetCount" ) || + ( it->len == "pAllocateInfo::descriptorSetCount" ) || + ( it->len == "pAllocateInfo->commandBufferCount" ) || + ( it->len == "pAllocateInfo::commandBufferCount" ) ); } } return vectorParamIndices; } -std::string const& VulkanHppGenerator::getTypesafeCheck() const +std::string const & VulkanHppGenerator::getTypesafeCheck() const { return m_typesafeCheck; } -std::string const& VulkanHppGenerator::getVersion() const +std::string const & VulkanHppGenerator::getVersion() const { return m_version; } -std::string const& VulkanHppGenerator::getVulkanLicenseHeader() const +std::string const & VulkanHppGenerator::getVulkanLicenseHeader() const { return m_vulkanLicenseHeader; } @@ -3702,80 +4516,94 @@ bool VulkanHppGenerator::holdsSType(std::string const& type) const bool VulkanHppGenerator::isTwoStepAlgorithm(std::vector const& params) const { - // we generate a two-step algorithm for functions returning a vector of stuff, where the length is specified as a pointer as well - // for those functions, the size can be queried first, and then used + // we generate a two-step algorithm for functions returning a vector of stuff, where the length is specified as a + // pointer as well for those functions, the size can be queried first, and then used bool isTwoStep = false; - for (auto paramIt = params.begin(); paramIt != params.end() && !isTwoStep; ++paramIt) + for ( auto paramIt = params.begin(); paramIt != params.end() && !isTwoStep; ++paramIt ) { - if (!paramIt->len.empty()) + if ( !paramIt->len.empty() ) { - auto lenIt = std::find_if(params.begin(), paramIt, [paramIt](ParamData const& pd) { return paramIt->len == pd.name; }); - if (lenIt != paramIt) + auto lenIt = + std::find_if( params.begin(), paramIt, [paramIt]( ParamData const & pd ) { return paramIt->len == pd.name; } ); + if ( lenIt != paramIt ) { - isTwoStep = (lenIt->type.postfix.find('*') != std::string::npos); + isTwoStep = ( lenIt->type.postfix.find( '*' ) != std::string::npos ); } } } return isTwoStep; } -void VulkanHppGenerator::linkCommandToHandle(int line, std::string const& name, CommandData const& commandData) +void VulkanHppGenerator::linkCommandToHandle( int line, std::string const & name, CommandData const & commandData ) { // first, find the handle named like the type of the first argument - // if there is no such handle, look for the unnamed "handle", that gathers all the functions not tied to a specific handle - check(!commandData.params.empty(), line, "command <" + name + "> with no params"); - std::map::iterator handleIt = m_handles.find(commandData.params[0].type.type); - if (handleIt == m_handles.end()) + // if there is no such handle, look for the unnamed "handle", that gathers all the functions not tied to a specific + // handle + check( !commandData.params.empty(), line, "command <" + name + "> with no params" ); + std::map::iterator handleIt = m_handles.find( commandData.params[0].type.type ); + if ( handleIt == m_handles.end() ) { - handleIt = m_handles.find(""); + handleIt = m_handles.find( "" ); } - check(handleIt != m_handles.end(), line, "could not find a handle to hold command <" + name + ">"); + check( handleIt != m_handles.end(), line, "could not find a handle to hold command <" + name + ">" ); // put the command into the handle's list of commands - check(handleIt->second.commands.find(name) == handleIt->second.commands.end(), line, "command list of handle <" + handleIt->first + "> already holds a commnand <" + name + ">"); - handleIt->second.commands.insert(std::make_pair(name, commandData)); + check( handleIt->second.commands.find( name ) == handleIt->second.commands.end(), + line, + "command list of handle <" + handleIt->first + "> already holds a commnand <" + name + ">" ); + handleIt->second.commands.insert( std::make_pair( name, commandData ) ); // and store the handle in the command-to-handle map - check(m_commandToHandle.find(name) == m_commandToHandle.end(), line, "command to handle mapping already holds the command <" + name + ">"); + check( m_commandToHandle.find( name ) == m_commandToHandle.end(), + line, + "command to handle mapping already holds the command <" + name + ">" ); m_commandToHandle[name] = handleIt->first; } -void VulkanHppGenerator::readBaseType(tinyxml2::XMLElement const* element, std::map const& attributes) +void VulkanHppGenerator::readBaseType( tinyxml2::XMLElement const * element, + std::map const & attributes ) { int line = element->GetLineNum(); - checkAttributes(line, attributes, { { "category",{ "basetype" } } }, {}); + checkAttributes( line, attributes, { { "category", { "basetype" } } }, {} ); NameData nameData; TypeData typeData; - std::tie(nameData, typeData) = readNameAndType(element); + std::tie( nameData, typeData ) = readNameAndType( element ); - check(beginsWith(nameData.name, "Vk"), line, "name <" + nameData.name + "> does not begin with "); - check(nameData.arraySizes.empty(), line, "name <" + nameData.name + "> with unsupported arraySizes"); - check(nameData.bitCount.empty(), line, "name <" + nameData.name + "> with unsupported bitCount <" + nameData.bitCount + ">"); - check(typeData.prefix == "typedef", line, "unexpected type prefix <" + typeData.prefix + ">"); - check(typeData.postfix.empty(), line, "unexpected type postfix <" + typeData.postfix + ">"); + check( beginsWith( nameData.name, "Vk" ), line, "name <" + nameData.name + "> does not begin with " ); + check( nameData.arraySizes.empty(), line, "name <" + nameData.name + "> with unsupported arraySizes" ); + check( nameData.bitCount.empty(), + line, + "name <" + nameData.name + "> with unsupported bitCount <" + nameData.bitCount + ">" ); + check( typeData.prefix == "typedef", line, "unexpected type prefix <" + typeData.prefix + ">" ); + check( typeData.postfix.empty(), line, "unexpected type postfix <" + typeData.postfix + ">" ); - check(m_baseTypes.insert(std::make_pair(nameData.name, BaseTypeData(typeData.type, line))).second, line, "basetype <" + nameData.name + "> already specified"); - check(m_types.insert(std::make_pair(nameData.name, TypeCategory::BaseType)).second, line, "basetype <" + nameData.name + "> already specified as a type"); + check( m_baseTypes.insert( std::make_pair( nameData.name, BaseTypeData( typeData.type, line ) ) ).second, + line, + "basetype <" + nameData.name + "> already specified" ); + check( m_types.insert( std::make_pair( nameData.name, TypeCategory::BaseType ) ).second, + line, + "basetype <" + nameData.name + "> already specified as a type" ); } -void VulkanHppGenerator::readBitmask(tinyxml2::XMLElement const* element, std::map const& attributes) +void VulkanHppGenerator::readBitmask( tinyxml2::XMLElement const * element, + std::map const & attributes ) { int line = element->GetLineNum(); - auto aliasIt = attributes.find("alias"); - if (aliasIt != attributes.end()) + auto aliasIt = attributes.find( "alias" ); + if ( aliasIt != attributes.end() ) { - readBitmaskAlias(element, attributes); + readBitmaskAlias( element, attributes ); } else { - checkAttributes(line, attributes, { { "category",{ "bitmask" } } }, { { "requires",{} } }); + checkAttributes( line, attributes, { { "category", { "bitmask" } } }, { { "requires", {} } } ); std::string requirements; - for (auto const& attribute : attributes) + for ( auto const & attribute : attributes ) { - if (attribute.first == "requires") + if ( attribute.first == "requires" ) { requirements = attribute.second; } @@ -3783,1731 +4611,1960 @@ void VulkanHppGenerator::readBitmask(tinyxml2::XMLElement const* element, std::m NameData nameData; TypeData typeData; - std::tie(nameData, typeData) = readNameAndType(element); + std::tie( nameData, typeData ) = readNameAndType( element ); - check(beginsWith(nameData.name, "Vk"), line, "name <" + nameData.name + "> does not begin with "); - check(nameData.arraySizes.empty(), line, "name <" + nameData.name + "> with unsupported arraySizes"); - check(nameData.bitCount.empty(), line, "name <" + nameData.name + "> with unsupported bitCount <" + nameData.bitCount + ">"); - warn((typeData.type == "VkFlags") || (typeData.type == "VkFlags64"), line, "unexpected bitmask type <" + typeData.type + ">"); - check(typeData.prefix == "typedef", line, "unexpected type prefix <" + typeData.prefix + ">"); - check(typeData.postfix.empty(), line, "unexpected type postfix <" + typeData.postfix + ">"); + check( beginsWith( nameData.name, "Vk" ), line, "name <" + nameData.name + "> does not begin with " ); + check( nameData.arraySizes.empty(), line, "name <" + nameData.name + "> with unsupported arraySizes" ); + check( nameData.bitCount.empty(), + line, + "name <" + nameData.name + "> with unsupported bitCount <" + nameData.bitCount + ">" ); + warn( ( typeData.type == "VkFlags" ) || ( typeData.type == "VkFlags64" ), + line, + "unexpected bitmask type <" + typeData.type + ">" ); + check( typeData.prefix == "typedef", line, "unexpected type prefix <" + typeData.prefix + ">" ); + check( typeData.postfix.empty(), line, "unexpected type postfix <" + typeData.postfix + ">" ); - check(m_commandToHandle.find(nameData.name) == m_commandToHandle.end(), line, "command <" + nameData.name + "> already specified"); + check( m_commandToHandle.find( nameData.name ) == m_commandToHandle.end(), + line, + "command <" + nameData.name + "> already specified" ); - m_bitmasks.insert(std::make_pair(nameData.name, BitmaskData(requirements, typeData.type, line))); - check(m_types.insert(std::make_pair(nameData.name, TypeCategory::Bitmask)).second, line, "bitmask <" + nameData.name + "> already specified as a type"); + m_bitmasks.insert( std::make_pair( nameData.name, BitmaskData( requirements, typeData.type, line ) ) ); + check( m_types.insert( std::make_pair( nameData.name, TypeCategory::Bitmask ) ).second, + line, + "bitmask <" + nameData.name + "> already specified as a type" ); } } -void VulkanHppGenerator::readBitmaskAlias(tinyxml2::XMLElement const* element, std::map const& attributes) +void VulkanHppGenerator::readBitmaskAlias( tinyxml2::XMLElement const * element, + std::map const & attributes ) { int line = element->GetLineNum(); - checkAttributes(line, attributes, { { "alias",{} },{ "category",{ "bitmask" } },{ "name",{} } }, {}); - checkElements(line, getChildElements(element), {}); + checkAttributes( line, attributes, { { "alias", {} }, { "category", { "bitmask" } }, { "name", {} } }, {} ); + checkElements( line, getChildElements( element ), {} ); std::string alias, name; - for (auto const& attribute : attributes) + for ( auto const & attribute : attributes ) { - if (attribute.first == "alias") + if ( attribute.first == "alias" ) { alias = attribute.second; } - else if (attribute.first == "name") + else if ( attribute.first == "name" ) { name = attribute.second; } } - auto bitmasksIt = m_bitmasks.find(alias); - check(bitmasksIt != m_bitmasks.end(), line, "missing alias <" + alias + ">."); - check(bitmasksIt->second.alias.empty(), line, "alias for bitmask <" + bitmasksIt->first + "> already specified as <" + bitmasksIt->second.alias + ">"); + auto bitmasksIt = m_bitmasks.find( alias ); + check( bitmasksIt != m_bitmasks.end(), line, "missing alias <" + alias + ">." ); + check( bitmasksIt->second.alias.empty(), + line, + "alias for bitmask <" + bitmasksIt->first + "> already specified as <" + bitmasksIt->second.alias + ">" ); bitmasksIt->second.alias = name; - check(m_types.insert(std::make_pair(name, TypeCategory::Bitmask)).second, line, "aliased bitmask <" + name + "> already specified as a type"); + check( m_types.insert( std::make_pair( name, TypeCategory::Bitmask ) ).second, + line, + "aliased bitmask <" + name + "> already specified as a type" ); } -void VulkanHppGenerator::readCommand(tinyxml2::XMLElement const* element) +void VulkanHppGenerator::readCommand( tinyxml2::XMLElement const * element ) { - std::map attributes = getAttributes(element); - auto aliasIt = attributes.find("alias"); - if (aliasIt != attributes.end()) + std::map attributes = getAttributes( element ); + auto aliasIt = attributes.find( "alias" ); + if ( aliasIt != attributes.end() ) { - readCommandAlias(element, attributes); + readCommandAlias( element, attributes ); } else { - readCommand(element, attributes); + readCommand( element, attributes ); } } -void VulkanHppGenerator::readCommand(tinyxml2::XMLElement const* element, std::map const& attributes) +void VulkanHppGenerator::readCommand( tinyxml2::XMLElement const * element, + std::map const & attributes ) { int line = element->GetLineNum(); - checkAttributes(line, attributes, {}, - { - { "cmdbufferlevel",{ "primary", "secondary" } }, - { "comment",{} }, - { "errorcodes",{} }, - { "pipeline",{ "compute", "graphics", "transfer" } }, - { "queues",{ "compute", "graphics", "sparse_binding", "transfer" } }, - { "renderpass",{ "both", "inside", "outside" } }, - { "successcodes",{} } - }); + checkAttributes( line, + attributes, + {}, + { { "cmdbufferlevel", { "primary", "secondary" } }, + { "comment", {} }, + { "errorcodes", {} }, + { "pipeline", { "compute", "graphics", "transfer" } }, + { "queues", { "compute", "graphics", "sparse_binding", "transfer" } }, + { "renderpass", { "both", "inside", "outside" } }, + { "successcodes", {} } } ); - std::vector children = getChildElements(element); - checkElements(line, children, { { "param", false }, { "proto", true } }, { "implicitexternsyncparams" }); + std::vector children = getChildElements( element ); + checkElements( line, children, { { "param", false }, { "proto", true } }, { "implicitexternsyncparams" } ); - CommandData commandData(line); - for (auto const& attribute : attributes) + CommandData commandData( line ); + for ( auto const & attribute : attributes ) { - if (attribute.first == "errorcodes") + if ( attribute.first == "errorcodes" ) { - commandData.errorCodes = tokenize(attribute.second, ","); + commandData.errorCodes = tokenize( attribute.second, "," ); // errorCodes are checked in checkCorrectness after complete reading } - else if (attribute.first == "successcodes") + else if ( attribute.first == "successcodes" ) { - commandData.successCodes = tokenize(attribute.second, ","); + commandData.successCodes = tokenize( attribute.second, "," ); // successCodes are checked in checkCorrectness after complete reading } } std::string name; - for (auto child : children) + for ( auto child : children ) { std::string value = child->Value(); - if (value == "param") + if ( value == "param" ) { - commandData.params.push_back(readCommandParam(child, commandData.params)); + commandData.params.push_back( readCommandParam( child, commandData.params ) ); } - else if (value == "proto") + else if ( value == "proto" ) { - std::tie(name, commandData.returnType) = readCommandProto(child); + std::tie( name, commandData.returnType ) = readCommandProto( child ); } } - assert(!name.empty()); + assert( !name.empty() ); - registerDeleter(name, std::make_pair(name, commandData)); - linkCommandToHandle(line, name, commandData); + registerDeleter( name, std::make_pair( name, commandData ) ); + linkCommandToHandle( line, name, commandData ); } -void VulkanHppGenerator::readCommandAlias(tinyxml2::XMLElement const* element, std::map const& attributes) +void VulkanHppGenerator::readCommandAlias( tinyxml2::XMLElement const * element, + std::map const & attributes ) { // for command aliases, create a copy of the aliased command int line = element->GetLineNum(); - checkAttributes(line, attributes, {}, { { "alias",{} }, { "name",{} }, }); - checkElements(line, getChildElements(element), {}); + checkAttributes( line, + attributes, + {}, + { + { "alias", {} }, + { "name", {} }, + } ); + checkElements( line, getChildElements( element ), {} ); std::string alias, name; - for (auto const& attribute : attributes) + for ( auto const & attribute : attributes ) { - if (attribute.first == "alias") + if ( attribute.first == "alias" ) { alias = attribute.second; } - else if (attribute.first == "name") + else if ( attribute.first == "name" ) { name = attribute.second; - check(beginsWith(name, "vk"), line, "name <" + name + "> should begin with "); + check( beginsWith( name, "vk" ), line, "name <" + name + "> should begin with " ); } } - auto commandToHandleIt = m_commandToHandle.find(alias); - check(commandToHandleIt != m_commandToHandle.end(), line, "missing command <" + alias + ">"); - auto handleIt = m_handles.find(commandToHandleIt->second); - check(handleIt != m_handles.end(), line, "missing handle <" + commandToHandleIt->second + ">"); - auto commandsIt = handleIt->second.commands.find(alias); - if (commandsIt == handleIt->second.commands.end()) + auto commandToHandleIt = m_commandToHandle.find( alias ); + check( commandToHandleIt != m_commandToHandle.end(), line, "missing command <" + alias + ">" ); + auto handleIt = m_handles.find( commandToHandleIt->second ); + check( handleIt != m_handles.end(), line, "missing handle <" + commandToHandleIt->second + ">" ); + auto commandsIt = handleIt->second.commands.find( alias ); + if ( commandsIt == handleIt->second.commands.end() ) { // look, if this command is aliases and already aliased command - commandsIt = std::find_if(handleIt->second.commands.begin(), handleIt->second.commands.end(), [&alias](auto const& cd) { return cd.second.aliases.find(alias) != cd.second.aliases.end(); }); + commandsIt = + std::find_if( handleIt->second.commands.begin(), handleIt->second.commands.end(), [&alias]( auto const & cd ) { + return cd.second.aliases.find( alias ) != cd.second.aliases.end(); + } ); } - check(commandsIt != handleIt->second.commands.end(), line, "missing command <" + alias + "> in handle <" + handleIt->first + ">"); - check(commandsIt->second.aliases.insert(name).second, line, "alias <" + name + "> for command <" + alias + "> already specified"); + check( commandsIt != handleIt->second.commands.end(), + line, + "missing command <" + alias + "> in handle <" + handleIt->first + ">" ); + check( commandsIt->second.aliases.insert( name ).second, + line, + "alias <" + name + "> for command <" + alias + "> already specified" ); // and store the alias in the command-to-handle map - check(m_commandToHandle.find(name) == m_commandToHandle.end(), line, "command to handle mapping already holds the command <" + name + ">"); + check( m_commandToHandle.find( name ) == m_commandToHandle.end(), + line, + "command to handle mapping already holds the command <" + name + ">" ); m_commandToHandle[name] = handleIt->first; } -VulkanHppGenerator::ParamData VulkanHppGenerator::readCommandParam(tinyxml2::XMLElement const* element, std::vector const& params) +VulkanHppGenerator::ParamData VulkanHppGenerator::readCommandParam( tinyxml2::XMLElement const * element, + std::vector const & params ) { - int line = element->GetLineNum(); - std::map attributes = getAttributes(element); - checkAttributes(line, attributes, {}, { { "externsync",{} },{ "len",{} },{ "noautovalidity",{ "true" } },{ "optional",{ "false", "true" } } }); + int line = element->GetLineNum(); + std::map attributes = getAttributes( element ); + checkAttributes( + line, + attributes, + {}, + { { "externsync", {} }, { "len", {} }, { "noautovalidity", { "true" } }, { "optional", { "false", "true" } } } ); - ParamData paramData(line); - for (auto attribute : attributes) + ParamData paramData( line ); + for ( auto attribute : attributes ) { - if (attribute.first == "len") + if ( attribute.first == "len" ) { paramData.len = attribute.second; - check(checkLenAttribute(paramData.len, params), line, "command param len <" + paramData.len + "> is not recognized as a valid len value"); + check( checkLenAttribute( paramData.len, params ), + line, + "command param len <" + paramData.len + "> is not recognized as a valid len value" ); } - else if (attribute.first == "optional") + else if ( attribute.first == "optional" ) { - paramData.optional = (attribute.second == "true"); + paramData.optional = ( attribute.second == "true" ); } } NameData nameData; - std::tie(nameData, paramData.type) = readNameAndType(element); + std::tie( nameData, paramData.type ) = readNameAndType( element ); - check(nameData.bitCount.empty(), line, "name <" + nameData.name + "> with unsupported bitCount <" + nameData.bitCount + ">"); - check(m_types.find(paramData.type.type) != m_types.end(), line, "unknown type <" + paramData.type.type + ">"); - check(paramData.type.prefix.empty() || (paramData.type.prefix == "const") || (paramData.type.prefix == "const struct") || (paramData.type.prefix == "struct"), line, "unexpected type prefix <" + paramData.type.prefix + ">"); - check(paramData.type.postfix.empty() || (paramData.type.postfix == "*") || (paramData.type.postfix == "**") || (paramData.type.postfix == "* const*"), line, "unexpected type postfix <" + paramData.type.postfix + ">"); - check(std::find_if(params.begin(), params.end(), [&name = nameData.name](ParamData const& pd) { return pd.name == name; }) == params.end(), line, "command param <" + nameData.name + "> already used"); - paramData.name = nameData.name; + check( nameData.bitCount.empty(), + line, + "name <" + nameData.name + "> with unsupported bitCount <" + nameData.bitCount + ">" ); + check( m_types.find( paramData.type.type ) != m_types.end(), line, "unknown type <" + paramData.type.type + ">" ); + check( paramData.type.prefix.empty() || ( paramData.type.prefix == "const" ) || + ( paramData.type.prefix == "const struct" ) || ( paramData.type.prefix == "struct" ), + line, + "unexpected type prefix <" + paramData.type.prefix + ">" ); + check( paramData.type.postfix.empty() || ( paramData.type.postfix == "*" ) || ( paramData.type.postfix == "**" ) || + ( paramData.type.postfix == "* const*" ), + line, + "unexpected type postfix <" + paramData.type.postfix + ">" ); + check( std::find_if( params.begin(), + params.end(), + [&name = nameData.name]( ParamData const & pd ) { return pd.name == name; } ) == params.end(), + line, + "command param <" + nameData.name + "> already used" ); + paramData.name = nameData.name; paramData.arraySizes = nameData.arraySizes; return paramData; } -std::pair VulkanHppGenerator::readCommandProto(tinyxml2::XMLElement const* element) +std::pair VulkanHppGenerator::readCommandProto( tinyxml2::XMLElement const * element ) { int line = element->GetLineNum(); - checkAttributes(line, getAttributes(element), {}, {}); + checkAttributes( line, getAttributes( element ), {}, {} ); NameData nameData; TypeData typeData; - std::tie(nameData, typeData) = readNameAndType(element); + std::tie( nameData, typeData ) = readNameAndType( element ); - check(beginsWith(nameData.name, "vk"), line, "name <" + nameData.name + "> does not begin with "); - check(nameData.arraySizes.empty(), line, "name <" + nameData.name + "> with unsupported arraySizes"); - check(nameData.bitCount.empty(), line, "name <" + nameData.name + "> with unsupported bitCount <" + nameData.bitCount + ">"); - check(m_types.find(typeData.type) != m_types.end(), line, "unknown type <" + typeData.type + ">"); - check(typeData.prefix.empty(), line, "unexpected type prefix <" + typeData.prefix + ">"); - check(typeData.postfix.empty(), line, "unexpected type postfix <" + typeData.postfix + ">"); - check(m_commandToHandle.find(nameData.name) == m_commandToHandle.end(), line, "command <" + nameData.name + "> already specified"); + check( beginsWith( nameData.name, "vk" ), line, "name <" + nameData.name + "> does not begin with " ); + check( nameData.arraySizes.empty(), line, "name <" + nameData.name + "> with unsupported arraySizes" ); + check( nameData.bitCount.empty(), + line, + "name <" + nameData.name + "> with unsupported bitCount <" + nameData.bitCount + ">" ); + check( m_types.find( typeData.type ) != m_types.end(), line, "unknown type <" + typeData.type + ">" ); + check( typeData.prefix.empty(), line, "unexpected type prefix <" + typeData.prefix + ">" ); + check( typeData.postfix.empty(), line, "unexpected type postfix <" + typeData.postfix + ">" ); + check( m_commandToHandle.find( nameData.name ) == m_commandToHandle.end(), + line, + "command <" + nameData.name + "> already specified" ); - return std::make_pair(nameData.name, typeData.type); + return std::make_pair( nameData.name, typeData.type ); } -void VulkanHppGenerator::readCommands(tinyxml2::XMLElement const* element) +void VulkanHppGenerator::readCommands( tinyxml2::XMLElement const * element ) { int line = element->GetLineNum(); - checkAttributes(line, getAttributes(element), {}, { { "comment",{} } }); + checkAttributes( line, getAttributes( element ), {}, { { "comment", {} } } ); - std::vector children = getChildElements(element); - checkElements(line, children, { { "command", false } }); - for (auto child : children) + std::vector children = getChildElements( element ); + checkElements( line, children, { { "command", false } } ); + for ( auto child : children ) { - readCommand(child); + readCommand( child ); } } -std::string VulkanHppGenerator::readComment(tinyxml2::XMLElement const* element) +std::string VulkanHppGenerator::readComment( tinyxml2::XMLElement const * element ) { int line = element->GetLineNum(); - checkAttributes(line, getAttributes(element), {}, {}); - checkElements(line, getChildElements(element), {}); + checkAttributes( line, getAttributes( element ), {}, {} ); + checkElements( line, getChildElements( element ), {} ); return element->GetText(); } -void VulkanHppGenerator::readDefine(tinyxml2::XMLElement const* element, std::map const& attributes) +void VulkanHppGenerator::readDefine( tinyxml2::XMLElement const * element, + std::map const & attributes ) { int line = element->GetLineNum(); - checkAttributes(line, attributes, { { "category",{ "define" } } }, { { "name",{} }, { "requires", {} } }); + checkAttributes( line, attributes, { { "category", { "define" } } }, { { "name", {} }, { "requires", {} } } ); std::string name; - for (auto const& attribute : attributes) + for ( auto const & attribute : attributes ) { - if (attribute.first == "name") + if ( attribute.first == "name" ) { name = attribute.second; } - else if (attribute.first == "requires") + else if ( attribute.first == "requires" ) { - check(m_defines.find(attribute.second) != m_defines.end(), line, "using undefined requires <" + attribute.second + ">"); + check( m_defines.find( attribute.second ) != m_defines.end(), + line, + "using undefined requires <" + attribute.second + ">" ); } } - if (!name.empty()) + if ( !name.empty() ) { - check(!element->FirstChildElement(), line, "unknown formatting of type category=define name <" + name + ">"); - check(name == "VK_DEFINE_NON_DISPATCHABLE_HANDLE", line, "unknown type category=define name <" + name + ">"); - check(element->LastChild() && element->LastChild()->ToText() && element->LastChild()->ToText()->Value(), line, "unknown formatting of type category=define named <" + name + ">"); + check( !element->FirstChildElement(), line, "unknown formatting of type category=define name <" + name + ">" ); + check( name == "VK_DEFINE_NON_DISPATCHABLE_HANDLE", line, "unknown type category=define name <" + name + ">" ); + check( element->LastChild() && element->LastChild()->ToText() && element->LastChild()->ToText()->Value(), + line, + "unknown formatting of type category=define named <" + name + ">" ); // filter out the check for the different types of VK_DEFINE_NON_DISPATCHABLE_HANDLE - std::string text = element->LastChild()->ToText()->Value(); - size_t start = text.find("#if defined(__LP64__)"); - check(start != std::string::npos, line, "unexpected text in type category=define named <" + name + ">"); - size_t end = text.find_first_of("\r\n", start + 1); - check(end != std::string::npos, line, "unexpected text in type category=define named <" + name + ">"); - m_typesafeCheck = text.substr(start, end - start); + std::string text = element->LastChild()->ToText()->Value(); + size_t start = text.find( "#if defined(__LP64__)" ); + check( start != std::string::npos, line, "unexpected text in type category=define named <" + name + ">" ); + size_t end = text.find_first_of( "\r\n", start + 1 ); + check( end != std::string::npos, line, "unexpected text in type category=define named <" + name + ">" ); + m_typesafeCheck = text.substr( start, end - start ); } - else if (element->GetText()) + else if ( element->GetText() ) { std::string text = element->GetText(); - if ((text.find("class") != std::string::npos) || (text.find("struct") != std::string::npos)) + if ( ( text.find( "class" ) != std::string::npos ) || ( text.find( "struct" ) != std::string::npos ) ) { // here are a couple of structs as defines, which really are types! - tinyxml2::XMLElement const* child = element->FirstChildElement(); - check(child && (strcmp(child->Value(), "name") == 0) && child->GetText(), line, "unexpected formatting of type category=define"); + tinyxml2::XMLElement const * child = element->FirstChildElement(); + check( child && ( strcmp( child->Value(), "name" ) == 0 ) && child->GetText(), + line, + "unexpected formatting of type category=define" ); name = child->GetText(); - check(m_types.insert(std::make_pair(name, TypeCategory::Define)).second, line, "type <" + name + "> has already been speficied"); + check( m_types.insert( std::make_pair( name, TypeCategory::Define ) ).second, + line, + "type <" + name + "> has already been speficied" ); } else { - tinyxml2::XMLElement const* child = element->FirstChildElement(); - check(child && !child->FirstAttribute() && (strcmp(child->Value(), "name") == 0) && child->GetText(), line, "unknown formatting of type category define"); - name = trim(child->GetText()); - if (name == "VK_HEADER_VERSION") + tinyxml2::XMLElement const * child = element->FirstChildElement(); + check( child && !child->FirstAttribute() && ( strcmp( child->Value(), "name" ) == 0 ) && child->GetText(), + line, + "unknown formatting of type category define" ); + name = trim( child->GetText() ); + if ( name == "VK_HEADER_VERSION" ) { - m_version = trimEnd(element->LastChild()->ToText()->Value()); + m_version = trimEnd( element->LastChild()->ToText()->Value() ); } // ignore all the other defines - warn(!child->NextSiblingElement() || (child->NextSiblingElement() && !child->NextSiblingElement()->FirstAttribute() && (strcmp(child->NextSiblingElement()->Value(), "type") == 0) && !child->NextSiblingElement()->NextSiblingElement()), line, "unknown formatting of type category define"); + warn( !child->NextSiblingElement() || + ( child->NextSiblingElement() && !child->NextSiblingElement()->FirstAttribute() && + ( strcmp( child->NextSiblingElement()->Value(), "type" ) == 0 ) && + !child->NextSiblingElement()->NextSiblingElement() ), + line, + "unknown formatting of type category define" ); } } - assert(!name.empty()); - check(m_defines.insert(name).second, line, "define <" + name + "> has already been specified"); + assert( !name.empty() ); + check( m_defines.insert( name ).second, line, "define <" + name + "> has already been specified" ); } -void VulkanHppGenerator::readEnum(tinyxml2::XMLElement const* element, EnumData & enumData, bool bitmask, std::string const& prefix, std::string const& postfix) +void VulkanHppGenerator::readEnum( tinyxml2::XMLElement const * element, + EnumData & enumData, + bool bitmask, + std::string const & prefix, + std::string const & postfix ) { - std::map attributes = getAttributes(element); - auto aliasIt = attributes.find("alias"); - if (aliasIt != attributes.end()) + std::map attributes = getAttributes( element ); + auto aliasIt = attributes.find( "alias" ); + if ( aliasIt != attributes.end() ) { - readEnumAlias(element, attributes, enumData, bitmask, prefix, postfix); + readEnumAlias( element, attributes, enumData, bitmask, prefix, postfix ); } else { - readEnum(element, attributes, enumData, bitmask, prefix, postfix); + readEnum( element, attributes, enumData, bitmask, prefix, postfix ); } } -void VulkanHppGenerator::readEnum(tinyxml2::XMLElement const* element, std::map const& attributes, EnumData & enumData, bool bitmask, std::string const& prefix, std::string const& postfix) +void VulkanHppGenerator::readEnum( tinyxml2::XMLElement const * element, + std::map const & attributes, + EnumData & enumData, + bool bitmask, + std::string const & prefix, + std::string const & postfix ) { int line = element->GetLineNum(); - checkAttributes(line, attributes, { { "name",{} } }, { { "bitpos",{} },{ "comment",{} },{ "value",{} } }); - checkElements(line, getChildElements(element), {}); + checkAttributes( line, attributes, { { "name", {} } }, { { "bitpos", {} }, { "comment", {} }, { "value", {} } } ); + checkElements( line, getChildElements( element ), {} ); std::string alias, bitpos, name, value; - for (auto const& attribute : attributes) + for ( auto const & attribute : attributes ) { - if (attribute.first == "bitpos") + if ( attribute.first == "bitpos" ) { bitpos = attribute.second; - check(!bitpos.empty(), line, "enum with empty bitpos"); + check( !bitpos.empty(), line, "enum with empty bitpos" ); } - else if (attribute.first == "name") + else if ( attribute.first == "name" ) { name = attribute.second; - check(!name.empty(), line, "enum with empty name"); + check( !name.empty(), line, "enum with empty name" ); } - else if (attribute.first == "value") + else if ( attribute.first == "value" ) { value = attribute.second; - check(!value.empty(), line, "enum with empty value"); + check( !value.empty(), line, "enum with empty value" ); } } - assert(!name.empty()); + assert( !name.empty() ); - std::string tag = findTag(m_tags, name, postfix); + std::string tag = findTag( m_tags, name, postfix ); - check(bitpos.empty() ^ value.empty(), line, "invalid set of attributes for enum <" + name + ">"); - enumData.addEnumValue(line, name, bitmask, !bitpos.empty(), prefix, postfix, tag); + check( bitpos.empty() ^ value.empty(), line, "invalid set of attributes for enum <" + name + ">" ); + enumData.addEnumValue( line, name, bitmask, !bitpos.empty(), prefix, postfix, tag ); } -void VulkanHppGenerator::readEnumAlias(tinyxml2::XMLElement const* element, std::map const& attributes, EnumData & enumData, bool bitmask, std::string const& prefix, std::string const& postfix) +void VulkanHppGenerator::readEnumAlias( tinyxml2::XMLElement const * element, + std::map const & attributes, + EnumData & enumData, + bool bitmask, + std::string const & prefix, + std::string const & postfix ) { int line = element->GetLineNum(); - checkAttributes(line, attributes, { { "alias",{} }, { "name",{} } }, { { "comment", {} } }); - checkElements(line, getChildElements(element), {}); + checkAttributes( line, attributes, { { "alias", {} }, { "name", {} } }, { { "comment", {} } } ); + checkElements( line, getChildElements( element ), {} ); std::string alias, bitpos, name, value; - for (auto const& attribute : attributes) + for ( auto const & attribute : attributes ) { - if (attribute.first == "alias") + if ( attribute.first == "alias" ) { alias = attribute.second; - check(!alias.empty(), line, "enum with empty alias"); + check( !alias.empty(), line, "enum with empty alias" ); } - else if (attribute.first == "name") + else if ( attribute.first == "name" ) { name = attribute.second; - check(!name.empty(), line, "enum with empty name"); + check( !name.empty(), line, "enum with empty name" ); } } - assert(!name.empty()); + assert( !name.empty() ); - std::string tag = findTag(m_tags, name, postfix); + std::string tag = findTag( m_tags, name, postfix ); - check(std::find_if(enumData.values.begin(), enumData.values.end(), [&alias](EnumValueData const& evd) { return evd.vulkanValue == alias; }) != enumData.values.end(), - line, "enum alias <" + alias + "> not listed in set of enum values"); - enumData.aliases.push_back(std::make_pair(name, createEnumValueName(name, prefix, postfix, bitmask, tag))); + check( std::find_if( enumData.values.begin(), + enumData.values.end(), + [&alias]( EnumValueData const & evd ) { return evd.vulkanValue == alias; } ) != + enumData.values.end(), + line, + "enum alias <" + alias + "> not listed in set of enum values" ); + enumData.aliases.push_back( std::make_pair( name, createEnumValueName( name, prefix, postfix, bitmask, tag ) ) ); } -void VulkanHppGenerator::readEnumConstant(tinyxml2::XMLElement const* element) +void VulkanHppGenerator::readEnumConstant( tinyxml2::XMLElement const * element ) { - int line = element->GetLineNum(); - std::map attributes = getAttributes(element); - checkAttributes(line, attributes, { { "name",{} } }, { { "alias",{} },{ "comment",{} }, { "value", {} } }); - checkElements(line, getChildElements(element), {}); + int line = element->GetLineNum(); + std::map attributes = getAttributes( element ); + checkAttributes( line, attributes, { { "name", {} } }, { { "alias", {} }, { "comment", {} }, { "value", {} } } ); + checkElements( line, getChildElements( element ), {} ); - for (auto const& attribute : attributes) + for ( auto const & attribute : attributes ) { - if (attribute.first == "alias") + if ( attribute.first == "alias" ) { - check(m_constants.find(attribute.second) != m_constants.end(), line, "unknown enum constant alias <" + attribute.second + ">"); + check( m_constants.find( attribute.second ) != m_constants.end(), + line, + "unknown enum constant alias <" + attribute.second + ">" ); } - else if (attribute.first == "name") + else if ( attribute.first == "name" ) { - check(m_constants.insert(attribute.second).second, line, "already specified enum constant <" + attribute.second + ">"); + check( m_constants.insert( attribute.second ).second, + line, + "already specified enum constant <" + attribute.second + ">" ); } } } -void VulkanHppGenerator::readEnums(tinyxml2::XMLElement const* element) +void VulkanHppGenerator::readEnums( tinyxml2::XMLElement const * element ) { - int line = element->GetLineNum(); - std::map attributes = getAttributes(element); - checkAttributes(line, attributes, { { "name",{} } }, { { "comment",{} },{ "type",{ "bitmask", "enum" } } }); - std::vector children = getChildElements(element); + int line = element->GetLineNum(); + std::map attributes = getAttributes( element ); + checkAttributes( line, attributes, { { "name", {} } }, { { "comment", {} }, { "type", { "bitmask", "enum" } } } ); + std::vector children = getChildElements( element ); std::string name, type; - for (auto const& attribute : attributes) + for ( auto const & attribute : attributes ) { - if (attribute.first == "name") + if ( attribute.first == "name" ) { name = attribute.second; - check(!name.empty(), line, "enum with empty name"); + check( !name.empty(), line, "enum with empty name" ); } - else if (attribute.first == "type") + else if ( attribute.first == "type" ) { type = attribute.second; - check(!type.empty(), line, "enum with empty type"); + check( !type.empty(), line, "enum with empty type" ); } } - assert(!name.empty()); + assert( !name.empty() ); - if (name == "API Constants") + if ( name == "API Constants" ) { - checkElements(line, children, { { "enum", false } }, {}); - for (auto const& child : children) + checkElements( line, children, { { "enum", false } }, {} ); + for ( auto const & child : children ) { - readEnumConstant(child); + readEnumConstant( child ); } } else { - checkElements(line, children, {}, { "comment", "enum", "unused" }); - check(!type.empty(), line, "enum without type"); + checkElements( line, children, {}, { "comment", "enum", "unused" } ); + check( !type.empty(), line, "enum without type" ); // get the EnumData entry in enum map - std::map::iterator it = m_enums.find(name); - if (it == m_enums.end()) + std::map::iterator it = m_enums.find( name ); + if ( it == m_enums.end() ) { // well, some enums are not listed in the section - warn(false, line, "enum <" + name + "> is not listed as enum in the types section"); - it = m_enums.insert(std::make_pair(name, EnumData())).first; + warn( false, line, "enum <" + name + "> is not listed as enum in the types section" ); + it = m_enums.insert( std::make_pair( name, EnumData() ) ).first; } - check(it->second.values.empty(), line, "enum <" + name + "> already holds values"); + check( it->second.values.empty(), line, "enum <" + name + "> already holds values" ); // mark it as a bitmask, if it is one - bool bitmask = (type == "bitmask"); + bool bitmask = ( type == "bitmask" ); it->second.isBitmask = bitmask; - if (bitmask) + if ( bitmask ) { // look for the corresponding bitmask and set the requirements if needed! - auto bitmaskIt = std::find_if(m_bitmasks.begin(), m_bitmasks.end(), [&name](auto const& bitmask) { return bitmask.second.requirements == name; }); - if (bitmaskIt == m_bitmasks.end()) + auto bitmaskIt = std::find_if( m_bitmasks.begin(), m_bitmasks.end(), [&name]( auto const & bitmask ) { + return bitmask.second.requirements == name; + } ); + if ( bitmaskIt == m_bitmasks.end() ) { - warn(false, line, "enum <" + name + "> is not listed as an requires for any bitmask in the types section"); + warn( false, line, "enum <" + name + "> is not listed as an requires for any bitmask in the types section" ); std::string bitmaskName = name; - size_t pos = bitmaskName.rfind("FlagBits"); - check(pos != std::string::npos, line, "enum <" + name + "> does not contain as substring"); - bitmaskName.replace(pos, 8, "Flags"); + size_t pos = bitmaskName.rfind( "FlagBits" ); + check( pos != std::string::npos, line, "enum <" + name + "> does not contain as substring" ); + bitmaskName.replace( pos, 8, "Flags" ); - bitmaskIt = m_bitmasks.find(bitmaskName); - check(bitmaskIt != m_bitmasks.end(), line, "enum <" + name + "> has not corresponding bitmask <" + bitmaskName + "> listed in the types section"); - assert(bitmaskIt->second.requirements.empty()); + bitmaskIt = m_bitmasks.find( bitmaskName ); + check( bitmaskIt != m_bitmasks.end(), + line, + "enum <" + name + "> has not corresponding bitmask <" + bitmaskName + "> listed in the types section" ); + assert( bitmaskIt->second.requirements.empty() ); bitmaskIt->second.requirements = name; } } - std::string prefix = getEnumPrefix(line, name, bitmask); - std::string postfix = getEnumPostfix(name, m_tags, prefix); + std::string prefix = getEnumPrefix( line, name, bitmask ); + std::string postfix = getEnumPostfix( name, m_tags, prefix ); // read the names of the enum values - for (auto child : children) + for ( auto child : children ) { std::string value = child->Value(); - if (value == "comment") + if ( value == "comment" ) { - readComment(child); + readComment( child ); } - else if (value == "enum") + else if ( value == "enum" ) { - readEnum(child, it->second, bitmask, prefix, postfix); + readEnum( child, it->second, bitmask, prefix, postfix ); } } } } -void VulkanHppGenerator::readExtension(tinyxml2::XMLElement const* element) +void VulkanHppGenerator::readExtension( tinyxml2::XMLElement const * element ) { - int line = element->GetLineNum(); - std::map attributes = getAttributes(element); - checkAttributes(line, attributes, - { - { "name",{} }, - { "number",{} }, - { "supported",{ "disabled", "enabled", "vulkan" } } - }, - { - { "author",{} }, - { "comment",{} }, - { "contact",{} }, - { "deprecatedby",{} }, - { "obsoletedby",{} }, - { "platform",{} }, - { "promotedto",{} }, - { "provisional", { "true" } }, - { "requires",{} }, - { "requiresCore",{} }, - { "sortorder", { "1" } }, - { "specialuse", { "cadsupport", "d3demulation", "debugging", "devtools", "glemulation"} }, - { "type",{ "device", "instance" } } - }); - std::vector children = getChildElements(element); - checkElements(line, children, {}, { "require" }); + int line = element->GetLineNum(); + std::map attributes = getAttributes( element ); + checkAttributes( line, + attributes, + { { "name", {} }, { "number", {} }, { "supported", { "disabled", "enabled", "vulkan" } } }, + { { "author", {} }, + { "comment", {} }, + { "contact", {} }, + { "deprecatedby", {} }, + { "obsoletedby", {} }, + { "platform", {} }, + { "promotedto", {} }, + { "provisional", { "true" } }, + { "requires", {} }, + { "requiresCore", {} }, + { "sortorder", { "1" } }, + { "specialuse", { "cadsupport", "d3demulation", "debugging", "devtools", "glemulation" } }, + { "type", { "device", "instance" } } } ); + std::vector children = getChildElements( element ); + checkElements( line, children, {}, { "require" } ); - std::string deprecatedBy, name, obsoletedBy, platform, promotedTo, supported; + std::string deprecatedBy, name, obsoletedBy, platform, promotedTo, supported; std::vector requirements; - for (auto const& attribute : attributes) + for ( auto const & attribute : attributes ) { - if (attribute.first == "deprecatedby") + if ( attribute.first == "deprecatedby" ) { deprecatedBy = attribute.second; } - else if (attribute.first == "name") + else if ( attribute.first == "name" ) { name = attribute.second; } - else if (attribute.first == "obsoletedby") + else if ( attribute.first == "obsoletedby" ) { obsoletedBy = attribute.second; } - else if (attribute.first == "platform") + else if ( attribute.first == "platform" ) { platform = attribute.second; - check(m_platforms.find(platform) != m_platforms.end(), line, "unknown platform <" + platform + ">"); + check( m_platforms.find( platform ) != m_platforms.end(), line, "unknown platform <" + platform + ">" ); } - else if (attribute.first == "promotedto") + else if ( attribute.first == "promotedto" ) { promotedTo = attribute.second; } - else if (attribute.first == "requires") + else if ( attribute.first == "requires" ) { - requirements = tokenize(attribute.second, ","); + requirements = tokenize( attribute.second, "," ); } - else if (attribute.first == "requiresCore") + else if ( attribute.first == "requiresCore" ) { - std::string const& requiresCore = attribute.second; - check(std::find_if(m_features.begin(), m_features.end(), [&requiresCore](std::pair const& nameNumber){ return nameNumber.second == requiresCore; }) != m_features.end(), - line, "unknown feature number <" + attribute.second + ">"); + std::string const & requiresCore = attribute.second; + check( std::find_if( m_features.begin(), + m_features.end(), + [&requiresCore]( std::pair const & nameNumber ) { + return nameNumber.second == requiresCore; + } ) != m_features.end(), + line, + "unknown feature number <" + attribute.second + ">" ); } - else if (attribute.first == "supported") + else if ( attribute.first == "supported" ) { supported = attribute.second; } } - if (supported == "disabled") + if ( supported == "disabled" ) { // kick out all the disabled stuff we've read before !! - for (auto const& child : children) + for ( auto const & child : children ) { - readExtensionDisabledRequire(name, child); + readExtensionDisabledRequire( name, child ); } } else { - auto pitb = m_extensions.insert(std::make_pair(name, ExtensionData(line))); - check(pitb.second, line, "already encountered extension <" + name + ">"); + auto pitb = m_extensions.insert( std::make_pair( name, ExtensionData( line ) ) ); + check( pitb.second, line, "already encountered extension <" + name + ">" ); pitb.first->second.deprecatedBy = deprecatedBy; - pitb.first->second.obsoletedBy = obsoletedBy; - pitb.first->second.promotedTo = promotedTo; - for (auto const& r : requirements) + pitb.first->second.obsoletedBy = obsoletedBy; + pitb.first->second.promotedTo = promotedTo; + for ( auto const & r : requirements ) { - check(pitb.first->second.requirements.insert(std::make_pair(r, line)).second, line, "required extension <" + r + "> already listed"); + check( pitb.first->second.requirements.insert( std::make_pair( r, line ) ).second, + line, + "required extension <" + r + "> already listed" ); } - std::string tag = extractTag(line, name, m_tags); - for (auto child : children) + std::string tag = extractTag( line, name, m_tags ); + for ( auto child : children ) { - readExtensionRequire(child, platform, tag, pitb.first->second.requirements); + readExtensionRequire( child, platform, tag, pitb.first->second.requirements ); } } } -void VulkanHppGenerator::readExtensionDisabledCommand(tinyxml2::XMLElement const* element) +void VulkanHppGenerator::readExtensionDisabledCommand( tinyxml2::XMLElement const * element ) { - int line = element->GetLineNum(); - std::map attributes = getAttributes(element); - checkAttributes(line, attributes, { { "name",{} } }, {}); - checkElements(line, getChildElements(element), {}); + int line = element->GetLineNum(); + std::map attributes = getAttributes( element ); + checkAttributes( line, attributes, { { "name", {} } }, {} ); + checkElements( line, getChildElements( element ), {} ); - std::string name = attributes.find("name")->second; + std::string name = attributes.find( "name" )->second; // first unlink the command from its class - auto commandToHandleIt = m_commandToHandle.find(name); - check(commandToHandleIt != m_commandToHandle.end(), line, "try to remove unknown command <" + name + ">"); - auto handlesIt = m_handles.find(m_commandToHandle.find(name)->second); - check(handlesIt != m_handles.end(), line, "cannot find handle corresponding to command <" + name + ">"); - auto commandIt = handlesIt->second.commands.find(name); - check(commandIt != handlesIt->second.commands.end(), line, "cannot find command <" + name + "> in commands associated with handle <" + handlesIt->first + ">"); - if (!commandIt->second.aliases.empty()) + auto commandToHandleIt = m_commandToHandle.find( name ); + check( commandToHandleIt != m_commandToHandle.end(), line, "try to remove unknown command <" + name + ">" ); + auto handlesIt = m_handles.find( m_commandToHandle.find( name )->second ); + check( handlesIt != m_handles.end(), line, "cannot find handle corresponding to command <" + name + ">" ); + auto commandIt = handlesIt->second.commands.find( name ); + check( commandIt != handlesIt->second.commands.end(), + line, + "cannot find command <" + name + "> in commands associated with handle <" + handlesIt->first + ">" ); + if ( !commandIt->second.aliases.empty() ) { - // if there's an alias of the to-be-removed command, insert that as a new command with the very same CommandData (minus its alias) - check(commandIt->second.aliases.size() == 1, line, "try to disable command <" + name + "> with more than one alias -> don't know what to do"); + // if there's an alias of the to-be-removed command, insert that as a new command with the very same CommandData + // (minus its alias) + check( commandIt->second.aliases.size() == 1, + line, + "try to disable command <" + name + "> with more than one alias -> don't know what to do" ); std::string aliasName = *commandIt->second.aliases.begin(); commandIt->second.aliases.clear(); - handlesIt->second.commands.insert(std::make_pair(aliasName, commandIt->second)); + handlesIt->second.commands.insert( std::make_pair( aliasName, commandIt->second ) ); } - handlesIt->second.commands.erase(commandIt); + handlesIt->second.commands.erase( commandIt ); // then remove the command from the command-to-handle map - m_commandToHandle.erase(commandToHandleIt); + m_commandToHandle.erase( commandToHandleIt ); } -void VulkanHppGenerator::readExtensionDisabledEnum(std::string const& extensionName, tinyxml2::XMLElement const* element) +void VulkanHppGenerator::readExtensionDisabledEnum( std::string const & extensionName, + tinyxml2::XMLElement const * element ) { - int line = element->GetLineNum(); - std::map attributes = getAttributes(element); - checkAttributes(line, attributes, { { "name",{} } }, { { "alias", {} }, { "bitpos",{} },{ "extends",{} },{ "offset",{} },{ "value",{} } }); - checkElements(line, getChildElements(element), {}); + int line = element->GetLineNum(); + std::map attributes = getAttributes( element ); + checkAttributes( line, + attributes, + { { "name", {} } }, + { { "alias", {} }, { "bitpos", {} }, { "extends", {} }, { "offset", {} }, { "value", {} } } ); + checkElements( line, getChildElements( element ), {} ); std::string extends, name; - for (auto const& attribute : attributes) + for ( auto const & attribute : attributes ) { - if (attribute.first == "extends") + if ( attribute.first == "extends" ) { extends = attribute.second; } - else if (attribute.first == "name") + else if ( attribute.first == "name" ) { name = attribute.second; } } - if (!extends.empty()) + if ( !extends.empty() ) { - auto enumIt = m_enums.find(extends); - check(enumIt != m_enums.end(), line, "disabled extension <" + extensionName + "> references unknown enum <" + extends + ">"); - check(std::find_if(enumIt->second.values.begin(), enumIt->second.values.end(), [&name](EnumValueData const& evd) { return evd.vulkanValue == name; }) == enumIt->second.values.end(), line, "disabled extension <" + extensionName + "> references known enum value <" + name + ">"); + auto enumIt = m_enums.find( extends ); + check( enumIt != m_enums.end(), + line, + "disabled extension <" + extensionName + "> references unknown enum <" + extends + ">" ); + check( std::find_if( enumIt->second.values.begin(), + enumIt->second.values.end(), + [&name]( EnumValueData const & evd ) { return evd.vulkanValue == name; } ) == + enumIt->second.values.end(), + line, + "disabled extension <" + extensionName + "> references known enum value <" + name + ">" ); } } -void VulkanHppGenerator::readExtensionDisabledRequire(std::string const& extensionName, tinyxml2::XMLElement const* element) +void VulkanHppGenerator::readExtensionDisabledRequire( std::string const & extensionName, + tinyxml2::XMLElement const * element ) { int line = element->GetLineNum(); - checkAttributes(line, getAttributes(element), {}, {}); - std::vector children = getChildElements(element); - checkElements(line, children, { { "enum", false } }, { "command", "comment", "type" }); + checkAttributes( line, getAttributes( element ), {}, {} ); + std::vector children = getChildElements( element ); + checkElements( line, children, { { "enum", false } }, { "command", "comment", "type" } ); - for (auto child : children) + for ( auto child : children ) { std::string value = child->Value(); - if (value == "command") + if ( value == "command" ) { - readExtensionDisabledCommand(child); + readExtensionDisabledCommand( child ); } - else if (value == "comment") + else if ( value == "comment" ) { - readComment(child); + readComment( child ); } - else if (value == "enum") + else if ( value == "enum" ) { - readExtensionDisabledEnum(extensionName, child); + readExtensionDisabledEnum( extensionName, child ); } else { - assert(value == "type"); - readExtensionDisabledType(child); + assert( value == "type" ); + readExtensionDisabledType( child ); } } } -void VulkanHppGenerator::readExtensionDisabledType(tinyxml2::XMLElement const* element) +void VulkanHppGenerator::readExtensionDisabledType( tinyxml2::XMLElement const * element ) { - int line = element->GetLineNum(); - std::map attributes = getAttributes(element); - checkAttributes(line, attributes, { { "name",{} } }, {}); - checkElements(line, getChildElements(element), {}); + int line = element->GetLineNum(); + std::map attributes = getAttributes( element ); + checkAttributes( line, attributes, { { "name", {} } }, {} ); + checkElements( line, getChildElements( element ), {} ); - std::string name = attributes.find("name")->second; + std::string name = attributes.find( "name" )->second; - auto typeIt = m_types.find(name); - check(typeIt != m_types.end(), line, "trying to remove unknown type <" + name + ">"); + auto typeIt = m_types.find( name ); + check( typeIt != m_types.end(), line, "trying to remove unknown type <" + name + ">" ); - switch (typeIt->second) + switch ( typeIt->second ) { - case TypeCategory::Bitmask: + case TypeCategory::Bitmask: { - auto bitmasksIt = m_bitmasks.find(name); - check(bitmasksIt != m_bitmasks.end(), line, "trying to remove unknown bitmask <" + name + ">"); - check(bitmasksIt->second.alias.empty(), line, "trying to remove disabled bitmask <" + name + "> which has alias <" + bitmasksIt->second.alias + ">"); - m_bitmasks.erase(bitmasksIt); + auto bitmasksIt = m_bitmasks.find( name ); + check( bitmasksIt != m_bitmasks.end(), line, "trying to remove unknown bitmask <" + name + ">" ); + check( bitmasksIt->second.alias.empty(), + line, + "trying to remove disabled bitmask <" + name + "> which has alias <" + bitmasksIt->second.alias + ">" ); + m_bitmasks.erase( bitmasksIt ); } break; - case TypeCategory::Enum: + case TypeCategory::Enum: { - auto enumIt = m_enums.find(name); - check(enumIt != m_enums.end(), line, "trying to remove unknown enum <" + name + ">"); - check(enumIt->second.alias.empty(), line, "trying to remove disabled enum <" + name + "> which has alias <" + enumIt->second.alias + ">"); - m_enums.erase(enumIt); + auto enumIt = m_enums.find( name ); + check( enumIt != m_enums.end(), line, "trying to remove unknown enum <" + name + ">" ); + check( enumIt->second.alias.empty(), + line, + "trying to remove disabled enum <" + name + "> which has alias <" + enumIt->second.alias + ">" ); + m_enums.erase( enumIt ); } break; - case TypeCategory::Struct: + case TypeCategory::Struct: { - auto structIt = m_structures.find(name); - check(structIt != m_structures.end(), line, "trying to remove unknown struct <" + name + ">"); - check(structIt->second.aliases.empty(), line, "trying to remove disabled structure <" + name + "> which has " + std::to_string(structIt->second.aliases.size()) + "aliases"); - m_structures.erase(structIt); + auto structIt = m_structures.find( name ); + check( structIt != m_structures.end(), line, "trying to remove unknown struct <" + name + ">" ); + check( structIt->second.aliases.empty(), + line, + "trying to remove disabled structure <" + name + "> which has " + + std::to_string( structIt->second.aliases.size() ) + "aliases" ); + m_structures.erase( structIt ); } break; - default: - check(false, line, "trying to remove <" + name + "> of unhandled type <" + toString(typeIt->second) + ">"); - break; + default: + check( false, line, "trying to remove <" + name + "> of unhandled type <" + toString( typeIt->second ) + ">" ); + break; } } -void VulkanHppGenerator::readExtensionRequire(tinyxml2::XMLElement const* element, std::string const& platform, std::string const& tag, std::map & requirements) +void VulkanHppGenerator::readExtensionRequire( tinyxml2::XMLElement const * element, + std::string const & platform, + std::string const & tag, + std::map & requirements ) { - int line = element->GetLineNum(); - std::map attributes = getAttributes(element); - checkAttributes(line, attributes, {}, { { "extension",{} },{ "feature",{} } }); - std::vector children = getChildElements(element); - checkElements(line, children, {}, { "command", "comment", "enum", "type" }); + int line = element->GetLineNum(); + std::map attributes = getAttributes( element ); + checkAttributes( line, attributes, {}, { { "extension", {} }, { "feature", {} } } ); + std::vector children = getChildElements( element ); + checkElements( line, children, {}, { "command", "comment", "enum", "type" } ); - for (auto const& attribute : attributes) + for ( auto const & attribute : attributes ) { - if (attribute.first == "extension") + if ( attribute.first == "extension" ) { - check(requirements.insert(std::make_pair(attribute.second, line)).second, line, "required extension <" + attribute.second + "> already listed"); + check( requirements.insert( std::make_pair( attribute.second, line ) ).second, + line, + "required extension <" + attribute.second + "> already listed" ); } else { - assert(attribute.first == "feature"); - check(m_features.find(attribute.second) != m_features.end(), line, "unknown feature <" + attribute.second + ">"); + assert( attribute.first == "feature" ); + check( + m_features.find( attribute.second ) != m_features.end(), line, "unknown feature <" + attribute.second + ">" ); } } - for (auto child : children) + for ( auto child : children ) { std::string value = child->Value(); - if (value == "command") + if ( value == "command" ) { - readExtensionRequireCommand(child, platform); + readExtensionRequireCommand( child, platform ); } - else if (value == "comment") + else if ( value == "comment" ) { - readComment(child); + readComment( child ); } - else if (value == "enum") + else if ( value == "enum" ) { - readRequireEnum(child, tag); + readRequireEnum( child, tag ); } - else if (value == "type") + else if ( value == "type" ) { - readExtensionRequireType(child, platform); + readExtensionRequireType( child, platform ); } } } -void VulkanHppGenerator::readExtensionRequireCommand(tinyxml2::XMLElement const* element, std::string const& platform) +void VulkanHppGenerator::readExtensionRequireCommand( tinyxml2::XMLElement const * element, + std::string const & platform ) { - int line = element->GetLineNum(); - std::map attributes = getAttributes(element); - checkAttributes(line, attributes, { { "name",{} } }, {}); - checkElements(line, getChildElements(element), {}); + int line = element->GetLineNum(); + std::map attributes = getAttributes( element ); + checkAttributes( line, attributes, { { "name", {} } }, {} ); + checkElements( line, getChildElements( element ), {} ); // just add the protect string to the CommandData - if (!platform.empty()) + if ( !platform.empty() ) { - std::string name = attributes.find("name")->second; + std::string name = attributes.find( "name" )->second; - check(m_platforms.find(platform) != m_platforms.end(), line, "unknown platform <" + platform + ">"); - auto commandToHandleIt = m_commandToHandle.find(name); - check(commandToHandleIt != m_commandToHandle.end(), line, "unknown command <" + name + ">"); - auto const& handlesIt = m_handles.find(commandToHandleIt->second); - check(handlesIt != m_handles.end(), line, "unknown handle for command <" + name + ">"); - auto const& commandsIt = handlesIt->second.commands.find(name); - check(commandsIt != handlesIt->second.commands.end(), line, "unknown command <" + name + "> for handle <" + handlesIt->first + ">"); + check( m_platforms.find( platform ) != m_platforms.end(), line, "unknown platform <" + platform + ">" ); + auto commandToHandleIt = m_commandToHandle.find( name ); + check( commandToHandleIt != m_commandToHandle.end(), line, "unknown command <" + name + ">" ); + auto const & handlesIt = m_handles.find( commandToHandleIt->second ); + check( handlesIt != m_handles.end(), line, "unknown handle for command <" + name + ">" ); + auto const & commandsIt = handlesIt->second.commands.find( name ); + check( commandsIt != handlesIt->second.commands.end(), + line, + "unknown command <" + name + "> for handle <" + handlesIt->first + ">" ); commandsIt->second.platform = platform; } } -void VulkanHppGenerator::readExtensionRequireType(tinyxml2::XMLElement const* element, std::string const& platform) +void VulkanHppGenerator::readExtensionRequireType( tinyxml2::XMLElement const * element, std::string const & platform ) { - int line = element->GetLineNum(); - std::map attributes = getAttributes(element); - checkAttributes(line, attributes, { { "name",{} } }, {}); - checkElements(line, getChildElements(element), {}); + int line = element->GetLineNum(); + std::map attributes = getAttributes( element ); + checkAttributes( line, attributes, { { "name", {} } }, {} ); + checkElements( line, getChildElements( element ), {} ); // add the protect-string to the appropriate type: enum, flag, handle, scalar, or struct - std::string name = attributes.find("name")->second; + std::string name = attributes.find( "name" )->second; - auto typeIt = m_types.find(name); - check(typeIt != m_types.end(), line, "failed to find required type <" + name + ">"); + auto typeIt = m_types.find( name ); + check( typeIt != m_types.end(), line, "failed to find required type <" + name + ">" ); - if (typeIt->second == TypeCategory::Handle) + if ( typeIt->second == TypeCategory::Handle ) { - assert(beginsWith(name, "Vk")); - auto objectTypeIt = m_enums.find("VkObjectType"); - assert(objectTypeIt != m_enums.end()); - std::string objectTypeName = "e" + stripPrefix(name, "Vk"); + assert( beginsWith( name, "Vk" ) ); + auto objectTypeIt = m_enums.find( "VkObjectType" ); + assert( objectTypeIt != m_enums.end() ); + std::string objectTypeName = "e" + stripPrefix( name, "Vk" ); - auto handleIt = m_handles.find(name); - if (handleIt != m_handles.end()) + auto handleIt = m_handles.find( name ); + if ( handleIt != m_handles.end() ) { - auto valueIt = std::find_if(objectTypeIt->second.values.begin(), objectTypeIt->second.values.end(), [objectTypeName](EnumValueData const& evd) {return evd.vkValue == objectTypeName; }); - check(valueIt != objectTypeIt->second.values.end(), line, "missing entry in VkObjectType enum for handle <" + name + ">."); + auto valueIt = + std::find_if( objectTypeIt->second.values.begin(), + objectTypeIt->second.values.end(), + [objectTypeName]( EnumValueData const & evd ) { return evd.vkValue == objectTypeName; } ); + check( valueIt != objectTypeIt->second.values.end(), + line, + "missing entry in VkObjectType enum for handle <" + name + ">." ); } else { - handleIt = std::find_if(m_handles.begin(), m_handles.end(), [name](auto const& h) { return h.second.alias == name; }); - auto aliasIt = std::find_if(objectTypeIt->second.aliases.begin(), objectTypeIt->second.aliases.end(), [objectTypeName](auto const& alias) { return alias.second == objectTypeName; }); - check(aliasIt != objectTypeIt->second.aliases.end(), line, "missing alias entry in VkObjectType enum for alias handle <" + name + ">."); + handleIt = + std::find_if( m_handles.begin(), m_handles.end(), [name]( auto const & h ) { return h.second.alias == name; } ); + auto aliasIt = std::find_if( objectTypeIt->second.aliases.begin(), + objectTypeIt->second.aliases.end(), + [objectTypeName]( auto const & alias ) { return alias.second == objectTypeName; } ); + check( aliasIt != objectTypeIt->second.aliases.end(), + line, + "missing alias entry in VkObjectType enum for alias handle <" + name + ">." ); } } - if (!platform.empty()) + if ( !platform.empty() ) { - switch (typeIt->second) + switch ( typeIt->second ) { - case TypeCategory::Bitmask: + case TypeCategory::Bitmask: { - auto bitmaskIt = m_bitmasks.find(name); - check(bitmaskIt != m_bitmasks.end(), line, "failed to find required bitmask <" + name + ">"); - check(bitmaskIt->second.platform.empty(), line, "platform already specified for bitmask <" + name + ">"); + auto bitmaskIt = m_bitmasks.find( name ); + check( bitmaskIt != m_bitmasks.end(), line, "failed to find required bitmask <" + name + ">" ); + check( bitmaskIt->second.platform.empty(), line, "platform already specified for bitmask <" + name + ">" ); bitmaskIt->second.platform = platform; - assert((m_enums.find(bitmaskIt->second.requirements) == m_enums.end()) || (m_enums.find(bitmaskIt->second.requirements)->second.isBitmask)); + assert( ( m_enums.find( bitmaskIt->second.requirements ) == m_enums.end() ) || + ( m_enums.find( bitmaskIt->second.requirements )->second.isBitmask ) ); } break; - case TypeCategory::Define: - // no need to protect a "defined" type - break; - case TypeCategory::Enum: + case TypeCategory::Define: + // no need to protect a "defined" type + break; + case TypeCategory::Enum: { - auto enumIt = m_enums.find(name); - check(enumIt != m_enums.end(), line, "failed to find required enum <" + name + ">"); - check(enumIt->second.platform.empty(), line, "platform already specified for enum <" + name + ">"); + auto enumIt = m_enums.find( name ); + check( enumIt != m_enums.end(), line, "failed to find required enum <" + name + ">" ); + check( enumIt->second.platform.empty(), line, "platform already specified for enum <" + name + ">" ); enumIt->second.platform = platform; } break; - case TypeCategory::Handle: + case TypeCategory::Handle: { - auto handleIt = m_handles.find(name); - check(handleIt != m_handles.end(), line, "failed to find required handle <" + name + ">"); - check(handleIt->second.platform.empty(), line, "platform already specified for handle <" + name + ">"); + auto handleIt = m_handles.find( name ); + check( handleIt != m_handles.end(), line, "failed to find required handle <" + name + ">" ); + check( handleIt->second.platform.empty(), line, "platform already specified for handle <" + name + ">" ); handleIt->second.platform = platform; } break; - case TypeCategory::Struct: - case TypeCategory::Union: // unions are listed together with the structures! + case TypeCategory::Struct: + case TypeCategory::Union: // unions are listed together with the structures! { - auto structIt = m_structures.find(name); - check(structIt != m_structures.end(), line, "failed to find required struct <" + name + ">"); - check(structIt->second.platform.empty(), line, "platform already specified for structure <" + name + ">"); + auto structIt = m_structures.find( name ); + check( structIt != m_structures.end(), line, "failed to find required struct <" + name + ">" ); + check( structIt->second.platform.empty(), line, "platform already specified for structure <" + name + ">" ); structIt->second.platform = platform; } break; - default: - check(false, line, "trying to protect <" + name + "> of unhandled type <" + toString(typeIt->second) + ">"); - break; + default: + check( false, line, "trying to protect <" + name + "> of unhandled type <" + toString( typeIt->second ) + ">" ); + break; } } } -void VulkanHppGenerator::readExtensions(tinyxml2::XMLElement const* element) +void VulkanHppGenerator::readExtensions( tinyxml2::XMLElement const * element ) { int line = element->GetLineNum(); - checkAttributes(line, getAttributes(element), { { "comment",{} } }, {}); - std::vector children = getChildElements(element); - checkElements(line, children, { { "extension", false } }); + checkAttributes( line, getAttributes( element ), { { "comment", {} } }, {} ); + std::vector children = getChildElements( element ); + checkElements( line, children, { { "extension", false } } ); - for (auto child : children) + for ( auto child : children ) { - readExtension(child); + readExtension( child ); } } -void VulkanHppGenerator::readFeature(tinyxml2::XMLElement const* element) +void VulkanHppGenerator::readFeature( tinyxml2::XMLElement const * element ) { - int line = element->GetLineNum(); - std::map attributes = getAttributes(element); - checkAttributes(line, attributes, { { "api",{ "vulkan" } },{ "comment",{} },{ "name",{} },{ "number",{} } }, {}); - std::vector children = getChildElements(element); - checkElements(line, children, { { "require", false } }); + int line = element->GetLineNum(); + std::map attributes = getAttributes( element ); + checkAttributes( + line, attributes, { { "api", { "vulkan" } }, { "comment", {} }, { "name", {} }, { "number", {} } }, {} ); + std::vector children = getChildElements( element ); + checkElements( line, children, { { "require", false } } ); std::string name, number, modifiedNumber; - for (auto const& attribute : attributes) + for ( auto const & attribute : attributes ) { - if (attribute.first == "name") + if ( attribute.first == "name" ) { name = attribute.second; } - else if (attribute.first == "number") + else if ( attribute.first == "number" ) { - number = attribute.second; + number = attribute.second; modifiedNumber = number; - std::replace(modifiedNumber.begin(), modifiedNumber.end(), '.', '_'); + std::replace( modifiedNumber.begin(), modifiedNumber.end(), '.', '_' ); } } - assert(!name.empty() && !number.empty()); - check(name == "VK_VERSION_" + modifiedNumber, line, "unexpected formatting of name <" + name + ">"); - check(m_features.insert(std::make_pair(name, number)).second, line, "already specified feature <" + name + ">"); + assert( !name.empty() && !number.empty() ); + check( name == "VK_VERSION_" + modifiedNumber, line, "unexpected formatting of name <" + name + ">" ); + check( m_features.insert( std::make_pair( name, number ) ).second, line, "already specified feature <" + name + ">" ); - for (auto child : children) + for ( auto child : children ) { - readFeatureRequire(child); + readFeatureRequire( child ); } } -void VulkanHppGenerator::readFeatureRequire(tinyxml2::XMLElement const* element) +void VulkanHppGenerator::readFeatureRequire( tinyxml2::XMLElement const * element ) { int line = element->GetLineNum(); - checkAttributes(line, getAttributes(element), {}, { { "comment",{} } }); - std::vector children = getChildElements(element); - checkElements(line, children, {}, { "command", "comment", "enum", "type" }); + checkAttributes( line, getAttributes( element ), {}, { { "comment", {} } } ); + std::vector children = getChildElements( element ); + checkElements( line, children, {}, { "command", "comment", "enum", "type" } ); - for (auto child : children) + for ( auto child : children ) { std::string value = child->Value(); - if (value == "command") + if ( value == "command" ) { - readRequireCommand(child); + readRequireCommand( child ); } - else if (value == "comment") + else if ( value == "comment" ) { - readComment(child); + readComment( child ); } - else if (value == "enum") + else if ( value == "enum" ) { - readRequireEnum(child, ""); + readRequireEnum( child, "" ); } - else if (value == "type") + else if ( value == "type" ) { - readRequireType(child); + readRequireType( child ); } } } -void VulkanHppGenerator::readFuncpointer(tinyxml2::XMLElement const* element, std::map const& attributes) +void VulkanHppGenerator::readFuncpointer( tinyxml2::XMLElement const * element, + std::map const & attributes ) { int line = element->GetLineNum(); - checkAttributes(line, attributes, { { "category",{ "funcpointer" } } }, { { "requires",{} } }); - std::vector children = getChildElements(element); - checkElements(line, children, { { "name", true } }, { "type" }); + checkAttributes( line, attributes, { { "category", { "funcpointer" } } }, { { "requires", {} } } ); + std::vector children = getChildElements( element ); + checkElements( line, children, { { "name", true } }, { "type" } ); std::string requirements; - for (auto const& attribute : attributes) + for ( auto const & attribute : attributes ) { - if (attribute.first == "requires") + if ( attribute.first == "requires" ) { requirements = attribute.second; } } - for (auto const& child : children) + for ( auto const & child : children ) { - std::string value = child->Value(); - int childLine = child->GetLineNum(); - if (value == "name") + std::string value = child->Value(); + int childLine = child->GetLineNum(); + if ( value == "name" ) { std::string name = child->GetText(); - check(!name.empty(), childLine, "funcpointer with empty name"); - check(m_funcPointers.insert(std::make_pair(name, FuncPointerData(requirements, line))).second, childLine, "funcpointer <" + name + "> already specified"); - check(m_types.insert(std::make_pair(name, TypeCategory::FuncPointer)).second, childLine, "funcpointer <" + name + "> already specified as a type"); + check( !name.empty(), childLine, "funcpointer with empty name" ); + check( m_funcPointers.insert( std::make_pair( name, FuncPointerData( requirements, line ) ) ).second, + childLine, + "funcpointer <" + name + "> already specified" ); + check( m_types.insert( std::make_pair( name, TypeCategory::FuncPointer ) ).second, + childLine, + "funcpointer <" + name + "> already specified as a type" ); } - else if (value == "type") + else if ( value == "type" ) { std::string type = child->GetText(); - check(!type.empty(), childLine, "funcpointer argument with empty type"); - check((m_types.find(type) != m_types.end()) || (type == requirements), childLine, "funcpointer argument of unknown type <" + type + ">"); + check( !type.empty(), childLine, "funcpointer argument with empty type" ); + check( ( m_types.find( type ) != m_types.end() ) || ( type == requirements ), + childLine, + "funcpointer argument of unknown type <" + type + ">" ); } } } -void VulkanHppGenerator::readHandle(tinyxml2::XMLElement const* element, std::map const& attributes) +void VulkanHppGenerator::readHandle( tinyxml2::XMLElement const * element, + std::map const & attributes ) { int line = element->GetLineNum(); - auto aliasIt = attributes.find("alias"); - if (aliasIt != attributes.end()) + auto aliasIt = attributes.find( "alias" ); + if ( aliasIt != attributes.end() ) { - checkAttributes(line, attributes, { { "alias",{} },{ "category",{ "handle" } },{ "name",{} } }, {}); - checkElements(line, getChildElements(element), {}); + checkAttributes( line, attributes, { { "alias", {} }, { "category", { "handle" } }, { "name", {} } }, {} ); + checkElements( line, getChildElements( element ), {} ); - auto handlesIt = m_handles.find(aliasIt->second); - check(handlesIt != m_handles.end(), line, "using unspecified alias <" + aliasIt->second + ">."); - check(handlesIt->second.alias.empty(), line, "handle <" + handlesIt->first + "> already has an alias <" + handlesIt->second.alias + ">"); - handlesIt->second.alias = attributes.find("name")->second; - check(m_types.insert(std::make_pair(handlesIt->second.alias, TypeCategory::Handle)).second, line, "handle alias <" + handlesIt->second.alias + "> already specified as a type"); + auto handlesIt = m_handles.find( aliasIt->second ); + check( handlesIt != m_handles.end(), line, "using unspecified alias <" + aliasIt->second + ">." ); + check( handlesIt->second.alias.empty(), + line, + "handle <" + handlesIt->first + "> already has an alias <" + handlesIt->second.alias + ">" ); + handlesIt->second.alias = attributes.find( "name" )->second; + check( m_types.insert( std::make_pair( handlesIt->second.alias, TypeCategory::Handle ) ).second, + line, + "handle alias <" + handlesIt->second.alias + "> already specified as a type" ); } else { - checkAttributes(line, attributes, { { "category",{ "handle" } } }, { { "parent",{} }}); + checkAttributes( line, attributes, { { "category", { "handle" } } }, { { "parent", {} } } ); std::string parent; - for (auto const& attribute : attributes) + for ( auto const & attribute : attributes ) { - if (attribute.first == "parent") + if ( attribute.first == "parent" ) { parent = attribute.second; - check(!parent.empty(), line, "handle with empty parent"); + check( !parent.empty(), line, "handle with empty parent" ); } } NameData nameData; TypeData typeData; - std::tie(nameData, typeData) = readNameAndType(element); + std::tie( nameData, typeData ) = readNameAndType( element ); - check(beginsWith(nameData.name, "Vk"), line, "name <" + nameData.name + "> does not begin with "); - check(nameData.arraySizes.empty(), line, "name <" + nameData.name + "> with unsupported arraySizes"); - check(nameData.bitCount.empty(), line, "name <" + nameData.name + "> with unsupported bitCount <" + nameData.bitCount + ">"); - check((typeData.type == "VK_DEFINE_HANDLE") || (typeData.type == "VK_DEFINE_NON_DISPATCHABLE_HANDLE"), line, "handle with invalid type <" + typeData.type + ">"); - check(typeData.prefix.empty(), line, "unexpected type prefix <" + typeData.prefix + ">"); - check(typeData.postfix == "(", line, "unexpected type postfix <" + typeData.postfix + ">"); + check( beginsWith( nameData.name, "Vk" ), line, "name <" + nameData.name + "> does not begin with " ); + check( nameData.arraySizes.empty(), line, "name <" + nameData.name + "> with unsupported arraySizes" ); + check( nameData.bitCount.empty(), + line, + "name <" + nameData.name + "> with unsupported bitCount <" + nameData.bitCount + ">" ); + check( ( typeData.type == "VK_DEFINE_HANDLE" ) || ( typeData.type == "VK_DEFINE_NON_DISPATCHABLE_HANDLE" ), + line, + "handle with invalid type <" + typeData.type + ">" ); + check( typeData.prefix.empty(), line, "unexpected type prefix <" + typeData.prefix + ">" ); + check( typeData.postfix == "(", line, "unexpected type postfix <" + typeData.postfix + ">" ); - check(m_handles.insert(std::make_pair(nameData.name, HandleData(tokenize(parent, ","), line))).second, line, "handle <" + nameData.name + "> already specified"); - check(m_types.insert(std::make_pair(nameData.name, TypeCategory::Handle)).second, line, "handle <" + nameData.name + "> already specified as a type"); + check( m_handles.insert( std::make_pair( nameData.name, HandleData( tokenize( parent, "," ), line ) ) ).second, + line, + "handle <" + nameData.name + "> already specified" ); + check( m_types.insert( std::make_pair( nameData.name, TypeCategory::Handle ) ).second, + line, + "handle <" + nameData.name + "> already specified as a type" ); } } -std::pair VulkanHppGenerator::readNameAndType(tinyxml2::XMLElement const* element) +std::pair + VulkanHppGenerator::readNameAndType( tinyxml2::XMLElement const * element ) { - int line = element->GetLineNum(); - std::vector children = getChildElements(element); - checkElements(line, children, { { "name", true }, { "type", true } }); + int line = element->GetLineNum(); + std::vector children = getChildElements( element ); + checkElements( line, children, { { "name", true }, { "type", true } } ); NameData nameData; TypeData typeData; - for (auto child : children) + for ( auto child : children ) { line = child->GetLineNum(); - checkAttributes(line, getAttributes(child), {}, {}); - checkElements(line, getChildElements(child), {}); + checkAttributes( line, getAttributes( child ), {}, {} ); + checkElements( line, getChildElements( child ), {} ); std::string value = child->Value(); - if (value == "name") + if ( value == "name" ) { - nameData.name = child->GetText(); - std::tie(nameData.arraySizes, nameData.bitCount) = readModifiers(child->NextSibling()); + nameData.name = child->GetText(); + std::tie( nameData.arraySizes, nameData.bitCount ) = readModifiers( child->NextSibling() ); } - else if (value == "type") + else if ( value == "type" ) { - typeData.prefix = readTypePrefix(child->PreviousSibling()); - typeData.type = child->GetText(); - typeData.postfix = readTypePostfix(child->NextSibling()); + typeData.prefix = readTypePrefix( child->PreviousSibling() ); + typeData.type = child->GetText(); + typeData.postfix = readTypePostfix( child->NextSibling() ); } } - return std::make_pair(nameData, typeData); + return std::make_pair( nameData, typeData ); } -void VulkanHppGenerator::readPlatform(tinyxml2::XMLElement const* element) +void VulkanHppGenerator::readPlatform( tinyxml2::XMLElement const * element ) { - int line = element->GetLineNum(); - std::map attributes = getAttributes(element); - checkAttributes(line, attributes, { { "comment",{} },{ "name",{} },{ "protect",{} } }, {}); - checkElements(line, getChildElements(element), {}); + int line = element->GetLineNum(); + std::map attributes = getAttributes( element ); + checkAttributes( line, attributes, { { "comment", {} }, { "name", {} }, { "protect", {} } }, {} ); + checkElements( line, getChildElements( element ), {} ); std::string name, protect; - for (auto const& attribute : attributes) + for ( auto const & attribute : attributes ) { - if (attribute.first == "name") + if ( attribute.first == "name" ) { name = attribute.second; - check(!name.empty(), line, "attribute is empty"); + check( !name.empty(), line, "attribute is empty" ); } - else if (attribute.first == "protect") + else if ( attribute.first == "protect" ) { protect = attribute.second; - check(!protect.empty(), line, "attribute is empty"); + check( !protect.empty(), line, "attribute is empty" ); } } - assert(!name.empty() && !protect.empty()); - check(m_platforms.find(name) == m_platforms.end(), line, "platform name <" + name + "> already specified"); - check(std::find_if(m_platforms.begin(), m_platforms.end(), [&protect](std::pair const& p) { return p.second == protect; }) == m_platforms.end(), line, "platform protect <" + protect + "> already specified"); + assert( !name.empty() && !protect.empty() ); + check( m_platforms.find( name ) == m_platforms.end(), line, "platform name <" + name + "> already specified" ); + check( std::find_if( m_platforms.begin(), + m_platforms.end(), + [&protect]( std::pair const & p ) { return p.second == protect; } ) == + m_platforms.end(), + line, + "platform protect <" + protect + "> already specified" ); m_platforms[name] = protect; } -void VulkanHppGenerator::readPlatforms(tinyxml2::XMLElement const* element) +void VulkanHppGenerator::readPlatforms( tinyxml2::XMLElement const * element ) { int line = element->GetLineNum(); - checkAttributes(line, getAttributes(element), { { "comment",{} } }, {}); - std::vector children = getChildElements(element); - checkElements(line, children, { { "platform", false } }); + checkAttributes( line, getAttributes( element ), { { "comment", {} } }, {} ); + std::vector children = getChildElements( element ); + checkElements( line, children, { { "platform", false } } ); - for (auto child : children) + for ( auto child : children ) { - readPlatform(child); + readPlatform( child ); } } -void VulkanHppGenerator::readRegistry(tinyxml2::XMLElement const* element) +void VulkanHppGenerator::readRegistry( tinyxml2::XMLElement const * element ) { int line = element->GetLineNum(); - checkAttributes(line, getAttributes(element), {}, {}); + checkAttributes( line, getAttributes( element ), {}, {} ); - std::vector children = getChildElements(element); - checkElements(line, children, { { "commands", true }, { "comment", false }, { "enums", false }, { "extensions", true }, { "feature", false }, { "platforms", true }, { "tags", true }, { "types", true } }); - for (auto child : children) + std::vector children = getChildElements( element ); + checkElements( line, + children, + { { "commands", true }, + { "comment", false }, + { "enums", false }, + { "extensions", true }, + { "feature", false }, + { "platforms", true }, + { "tags", true }, + { "types", true } } ); + for ( auto child : children ) { const std::string value = child->Value(); - if (value == "commands") + if ( value == "commands" ) { - readCommands(child); + readCommands( child ); } - else if (value == "comment") + else if ( value == "comment" ) { - std::string comment = readComment(child); - if (comment.find("\nCopyright") == 0) + std::string comment = readComment( child ); + if ( comment.find( "\nCopyright" ) == 0 ) { - setVulkanLicenseHeader(child->GetLineNum(), comment); + setVulkanLicenseHeader( child->GetLineNum(), comment ); } } - else if (value == "enums") + else if ( value == "enums" ) { - readEnums(child); + readEnums( child ); } - else if (value == "extensions") + else if ( value == "extensions" ) { - readExtensions(child); + readExtensions( child ); } - else if (value == "feature") + else if ( value == "feature" ) { - readFeature(child); + readFeature( child ); } - else if (value == "platforms") + else if ( value == "platforms" ) { - readPlatforms(child); + readPlatforms( child ); } - else if (value == "tags") + else if ( value == "tags" ) { - readTags(child); + readTags( child ); } - else if (value == "types") + else if ( value == "types" ) { - readTypes(child); + readTypes( child ); } } } -void VulkanHppGenerator::readRequireCommand(tinyxml2::XMLElement const* element) +void VulkanHppGenerator::readRequireCommand( tinyxml2::XMLElement const * element ) { - int line = element->GetLineNum(); - std::map attributes = getAttributes(element); - checkAttributes(line, attributes, {}, { { "name",{} } }); - std::string command = attributes.find("name")->second; - check(m_commandToHandle.find(command) != m_commandToHandle.end(), line, "feature requires unknown command <" + command + ">"); + int line = element->GetLineNum(); + std::map attributes = getAttributes( element ); + checkAttributes( line, attributes, {}, { { "name", {} } } ); + std::string command = attributes.find( "name" )->second; + check( m_commandToHandle.find( command ) != m_commandToHandle.end(), + line, + "feature requires unknown command <" + command + ">" ); } -void VulkanHppGenerator::readRequireEnum(tinyxml2::XMLElement const* element, std::string const& tag) +void VulkanHppGenerator::readRequireEnum( tinyxml2::XMLElement const * element, std::string const & tag ) { - std::map attributes = getAttributes(element); - if (attributes.find("alias") != attributes.end()) + std::map attributes = getAttributes( element ); + if ( attributes.find( "alias" ) != attributes.end() ) { - readRequireEnumAlias(element, attributes, tag); + readRequireEnumAlias( element, attributes, tag ); } else { - readRequireEnum(element, attributes, tag); + readRequireEnum( element, attributes, tag ); } } -void VulkanHppGenerator::readRequireEnum(tinyxml2::XMLElement const* element, std::map const& attributes, std::string const& tag) +void VulkanHppGenerator::readRequireEnum( tinyxml2::XMLElement const * element, + std::map const & attributes, + std::string const & tag ) { int line = element->GetLineNum(); - checkAttributes(line, attributes, - { - { "name",{} } - }, - { - { "bitpos",{} }, - { "comment",{} }, - { "extends",{} }, - { "dir",{ "-" } }, - { "extnumber",{} }, - { "offset",{} }, - { "value",{} } - }); - checkElements(line, getChildElements(element), {}); + checkAttributes( line, + attributes, + { { "name", {} } }, + { { "bitpos", {} }, + { "comment", {} }, + { "extends", {} }, + { "dir", { "-" } }, + { "extnumber", {} }, + { "offset", {} }, + { "value", {} } } ); + checkElements( line, getChildElements( element ), {} ); std::string bitpos, name, extends, extnumber, offset, value; - for (auto const& attribute : attributes) + for ( auto const & attribute : attributes ) { - if (attribute.first == "bitpos") + if ( attribute.first == "bitpos" ) { bitpos = attribute.second; } - else if (attribute.first == "extends") + else if ( attribute.first == "extends" ) { extends = attribute.second; } - else if (attribute.first == "name") + else if ( attribute.first == "name" ) { name = attribute.second; } - else if (attribute.first == "offset") + else if ( attribute.first == "offset" ) { offset = attribute.second; } - else if (attribute.first == "value") + else if ( attribute.first == "value" ) { value = attribute.second; } } - if (!extends.empty()) + if ( !extends.empty() ) { - auto enumIt = m_enums.find(extends); - check(enumIt != m_enums.end(), line, "feature extends unknown enum <" + extends + ">"); + auto enumIt = m_enums.find( extends ); + check( enumIt != m_enums.end(), line, "feature extends unknown enum <" + extends + ">" ); - std::string prefix = getEnumPrefix(element->GetLineNum(), enumIt->first, enumIt->second.isBitmask); - std::string postfix = getEnumPostfix(enumIt->first, m_tags, prefix); + std::string prefix = getEnumPrefix( element->GetLineNum(), enumIt->first, enumIt->second.isBitmask ); + std::string postfix = getEnumPostfix( enumIt->first, m_tags, prefix ); // add this enum name to the list of values - check(bitpos.empty() + offset.empty() + value.empty() == 2, line, "exactly one out of bitpos = <" + bitpos + ">, offset = <" + offset + ">, and value = <" + value + "> are supposed to be empty"); - enumIt->second.addEnumValue(element->GetLineNum(), name, enumIt->second.isBitmask, !bitpos.empty(), prefix, postfix, tag); + check( bitpos.empty() + offset.empty() + value.empty() == 2, + line, + "exactly one out of bitpos = <" + bitpos + ">, offset = <" + offset + ">, and value = <" + value + + "> are supposed to be empty" ); + enumIt->second.addEnumValue( + element->GetLineNum(), name, enumIt->second.isBitmask, !bitpos.empty(), prefix, postfix, tag ); } - else if (value.empty()) + else if ( value.empty() ) { - check(m_constants.find(name) != m_constants.end(), line, "unknown required enum <" + name + ">"); + check( m_constants.find( name ) != m_constants.end(), line, "unknown required enum <" + name + ">" ); } } -void VulkanHppGenerator::readRequireEnumAlias(tinyxml2::XMLElement const* element, std::map const& attributes, std::string const& tag) +void VulkanHppGenerator::readRequireEnumAlias( tinyxml2::XMLElement const * element, + std::map const & attributes, + std::string const & tag ) { int line = element->GetLineNum(); - checkAttributes(line, attributes, { { "alias",{} }, { "extends",{} }, { "name",{} } }, { { "comment",{} } }); - checkElements(line, getChildElements(element), {}); + checkAttributes( line, attributes, { { "alias", {} }, { "extends", {} }, { "name", {} } }, { { "comment", {} } } ); + checkElements( line, getChildElements( element ), {} ); std::string alias, bitpos, name, extends, extnumber, offset, value; - for (auto const& attribute : attributes) + for ( auto const & attribute : attributes ) { - if (attribute.first == "alias") + if ( attribute.first == "alias" ) { alias = attribute.second; } - else if (attribute.first == "extends") + else if ( attribute.first == "extends" ) { extends = attribute.second; } - else if (attribute.first == "name") + else if ( attribute.first == "name" ) { name = attribute.second; } } - auto enumIt = m_enums.find(extends); - check(enumIt != m_enums.end(), line, "feature extends unknown enum <" + extends + ">"); + auto enumIt = m_enums.find( extends ); + check( enumIt != m_enums.end(), line, "feature extends unknown enum <" + extends + ">" ); - std::string prefix = getEnumPrefix(element->GetLineNum(), enumIt->first, enumIt->second.isBitmask); - std::string postfix = getEnumPostfix(enumIt->first, m_tags, prefix); + std::string prefix = getEnumPrefix( element->GetLineNum(), enumIt->first, enumIt->second.isBitmask ); + std::string postfix = getEnumPostfix( enumIt->first, m_tags, prefix ); // add this enum name to the list of aliases - std::string valueName = createEnumValueName(name, prefix, postfix, enumIt->second.isBitmask, tag); - if (!enumIt->second.alias.empty()) + std::string valueName = createEnumValueName( name, prefix, postfix, enumIt->second.isBitmask, tag ); + if ( !enumIt->second.alias.empty() ) { - prefix = getEnumPrefix(element->GetLineNum(), enumIt->second.alias, enumIt->second.isBitmask); - postfix = getEnumPostfix(enumIt->second.alias, m_tags, prefix); - if (endsWith(name, postfix)) + prefix = getEnumPrefix( element->GetLineNum(), enumIt->second.alias, enumIt->second.isBitmask ); + postfix = getEnumPostfix( enumIt->second.alias, m_tags, prefix ); + if ( endsWith( name, postfix ) ) { - valueName = createEnumValueName(name, prefix, postfix, enumIt->second.isBitmask, tag); + valueName = createEnumValueName( name, prefix, postfix, enumIt->second.isBitmask, tag ); } } - check(std::find_if(enumIt->second.aliases.begin(), enumIt->second.aliases.end(), [&valueName](std::pair const& aliasPair) { return valueName == aliasPair.second; }) == enumIt->second.aliases.end(), - line, "alias <" + valueName + "> already specified"); - if (std::find_if(enumIt->second.values.begin(), enumIt->second.values.end(), [&valueName](EnumValueData const& evd) { return evd.vkValue == valueName; }) == enumIt->second.values.end()) + check( std::find_if( enumIt->second.aliases.begin(), + enumIt->second.aliases.end(), + [&valueName]( std::pair const & aliasPair ) { + return valueName == aliasPair.second; + } ) == enumIt->second.aliases.end(), + line, + "alias <" + valueName + "> already specified" ); + if ( std::find_if( + enumIt->second.values.begin(), enumIt->second.values.end(), [&valueName]( EnumValueData const & evd ) { + return evd.vkValue == valueName; + } ) == enumIt->second.values.end() ) { - enumIt->second.aliases.push_back(std::make_pair(name, valueName)); + enumIt->second.aliases.push_back( std::make_pair( name, valueName ) ); } } -void VulkanHppGenerator::readRequires(tinyxml2::XMLElement const* element, std::map const& attributes) +void VulkanHppGenerator::readRequires( tinyxml2::XMLElement const * element, + std::map const & attributes ) { int line = element->GetLineNum(); - checkAttributes(line, attributes, { {"name", {}}, { "requires", {}} }, {}); - checkElements(line, getChildElements(element), {}); + checkAttributes( line, attributes, { { "name", {} }, { "requires", {} } }, {} ); + checkElements( line, getChildElements( element ), {} ); - for (auto attribute : attributes) + for ( auto attribute : attributes ) { - if (attribute.first == "name") + if ( attribute.first == "name" ) { - check(m_types.insert(std::make_pair(attribute.second, TypeCategory::Requires)).second, line, "type named <" + attribute.second + "> already specified"); + check( m_types.insert( std::make_pair( attribute.second, TypeCategory::Requires ) ).second, + line, + "type named <" + attribute.second + "> already specified" ); } else { - assert(attribute.first == "requires"); - check(m_includes.find(attribute.second) != m_includes.end(), line, "type requires unknown include <" + attribute.second + ">"); + assert( attribute.first == "requires" ); + check( m_includes.find( attribute.second ) != m_includes.end(), + line, + "type requires unknown include <" + attribute.second + ">" ); } } } -void VulkanHppGenerator::readRequireType(tinyxml2::XMLElement const* element) +void VulkanHppGenerator::readRequireType( tinyxml2::XMLElement const * element ) { int line = element->GetLineNum(); - checkAttributes(line, getAttributes(element), { { "name", {} } }, {}); - checkElements(line, getChildElements(element), {}); + checkAttributes( line, getAttributes( element ), { { "name", {} } }, {} ); + checkElements( line, getChildElements( element ), {} ); } -void VulkanHppGenerator::readStruct(tinyxml2::XMLElement const* element, bool isUnion, std::map const& attributes) +void VulkanHppGenerator::readStruct( tinyxml2::XMLElement const * element, + bool isUnion, + std::map const & attributes ) { int line = element->GetLineNum(); - if (attributes.find("alias") != attributes.end()) + if ( attributes.find( "alias" ) != attributes.end() ) { - readStructAlias(element, attributes); + readStructAlias( element, attributes ); } else { - checkAttributes(line, attributes, - { - { "category",{ isUnion ? "union" : "struct" } }, - { "name",{} } - }, - { - { "comment",{} }, - { "returnedonly",{ "true" } }, - { "structextends",{} } - }); - std::vector children = getChildElements(element); - checkElements(line, children, {}, { "member", "comment" }); + checkAttributes( line, + attributes, + { { "category", { isUnion ? "union" : "struct" } }, { "name", {} } }, + { { "comment", {} }, { "returnedonly", { "true" } }, { "structextends", {} } } ); + std::vector children = getChildElements( element ); + checkElements( line, children, {}, { "member", "comment" } ); - std::string category, name; - std::vector structExtends; - bool returnedOnly = false; - for (auto const& attribute : attributes) + std::string category, name; + std::vector structExtends; + bool returnedOnly = false; + for ( auto const & attribute : attributes ) { - if (attribute.first == "category") + if ( attribute.first == "category" ) { category = attribute.second; } - else if (attribute.first == "name") + else if ( attribute.first == "name" ) { name = attribute.second; } - else if (attribute.first == "returnedonly") + else if ( attribute.first == "returnedonly" ) { - check(attribute.second == "true", line, "unknown value for attribute returnedonly: <" + attribute.second + ">"); + check( + attribute.second == "true", line, "unknown value for attribute returnedonly: <" + attribute.second + ">" ); returnedOnly = true; } - else if (attribute.first == "structextends") + else if ( attribute.first == "structextends" ) { - structExtends = tokenize(attribute.second, ","); + structExtends = tokenize( attribute.second, "," ); } } - assert(!name.empty()); + assert( !name.empty() ); - check(m_structures.find(name) == m_structures.end(), line, "struct <" + name + "> already specfied"); - std::map::iterator it = m_structures.insert(std::make_pair(name, StructureData(structExtends, line))).first; + check( m_structures.find( name ) == m_structures.end(), line, "struct <" + name + "> already specfied" ); + std::map::iterator it = + m_structures.insert( std::make_pair( name, StructureData( structExtends, line ) ) ).first; it->second.returnedOnly = returnedOnly; - it->second.isUnion = isUnion; + it->second.isUnion = isUnion; - for (auto child : children) + for ( auto child : children ) { std::string value = child->Value(); - if (value == "comment") + if ( value == "comment" ) { - readComment(child); + readComment( child ); } - else if (value == "member") + else if ( value == "member" ) { - readStructMember(child, it->second.members); + readStructMember( child, it->second.members ); } } - it->second.subStruct = determineSubStruct(*it); + it->second.subStruct = determineSubStruct( *it ); - m_extendedStructs.insert(structExtends.begin(), structExtends.end()); - check(m_types.insert(std::make_pair(name, (category == "struct") ? TypeCategory::Struct : TypeCategory::Union)).second, line, "struct <" + name + "> already specified as a type"); // log type and alias in m_types + m_extendedStructs.insert( structExtends.begin(), structExtends.end() ); + check( + m_types.insert( std::make_pair( name, ( category == "struct" ) ? TypeCategory::Struct : TypeCategory::Union ) ) + .second, + line, + "struct <" + name + "> already specified as a type" ); // log type and alias in m_types } } -void VulkanHppGenerator::readStructAlias(tinyxml2::XMLElement const* element, std::map const& attributes) +void VulkanHppGenerator::readStructAlias( tinyxml2::XMLElement const * element, + std::map const & attributes ) { int line = element->GetLineNum(); - checkAttributes(line, attributes, { { "alias",{} },{ "category",{ "struct" } },{ "name",{} } }, {}); - checkElements(line, getChildElements(element), {}, {}); + checkAttributes( line, attributes, { { "alias", {} }, { "category", { "struct" } }, { "name", {} } }, {} ); + checkElements( line, getChildElements( element ), {}, {} ); std::string alias, name; - for (auto const& attribute : attributes) + for ( auto const & attribute : attributes ) { - if (attribute.first == "alias") + if ( attribute.first == "alias" ) { alias = attribute.second; } - else if (attribute.first == "name") + else if ( attribute.first == "name" ) { name = attribute.second; } } - auto structIt = m_structures.find(alias); - check(structIt != m_structures.end(), line, "missing alias <" + alias + ">."); - check(structIt->second.aliases.insert(name).second, line, "struct <" + alias + "> already uses alias <" + name + ">"); - check(m_structureAliases.insert(std::make_pair(name, alias)).second, line, "structure alias <" + name + "> already used"); - check(m_types.insert(std::make_pair(name, TypeCategory::Struct)).second, line, "struct <" + name + "> already specified as a type"); + auto structIt = m_structures.find( alias ); + check( structIt != m_structures.end(), line, "missing alias <" + alias + ">." ); + check( + structIt->second.aliases.insert( name ).second, line, "struct <" + alias + "> already uses alias <" + name + ">" ); + check( m_structureAliases.insert( std::make_pair( name, alias ) ).second, + line, + "structure alias <" + name + "> already used" ); + check( m_types.insert( std::make_pair( name, TypeCategory::Struct ) ).second, + line, + "struct <" + name + "> already specified as a type" ); } -void VulkanHppGenerator::readStructMember(tinyxml2::XMLElement const* element, std::vector & members) +void VulkanHppGenerator::readStructMember( tinyxml2::XMLElement const * element, std::vector & members ) { - int line = element->GetLineNum(); - std::map attributes = getAttributes(element); - checkAttributes(line, attributes, {}, - { - { "altlen",{} }, - { "externsync",{ "true" } }, - { "len",{} }, - { "noautovalidity",{ "true" } }, - { "optional",{ "false", "true" } }, - { "values",{} } - }); - std::vector children = getChildElements(element); - checkElements(line, children, { { "name", true }, { "type", true } }, { "comment", "enum" }); + int line = element->GetLineNum(); + std::map attributes = getAttributes( element ); + checkAttributes( line, + attributes, + {}, + { { "altlen", {} }, + { "externsync", { "true" } }, + { "len", {} }, + { "noautovalidity", { "true" } }, + { "optional", { "false", "true" } }, + { "values", {} } } ); + std::vector children = getChildElements( element ); + checkElements( line, children, { { "name", true }, { "type", true } }, { "comment", "enum" } ); - members.push_back(MemberData(line)); + members.push_back( MemberData( line ) ); MemberData & memberData = members.back(); - auto valuesIt = attributes.find("values"); - if (valuesIt != attributes.end()) + auto valuesIt = attributes.find( "values" ); + if ( valuesIt != attributes.end() ) { memberData.values = valuesIt->second; } - for (auto child : children) + for ( auto child : children ) { std::string value = child->Value(); - if (value == "enum") + if ( value == "enum" ) { - readStructMemberEnum(child, memberData); + readStructMemberEnum( child, memberData ); } - else if (value == "name") + else if ( value == "name" ) { - readStructMemberName(child, memberData, members); + readStructMemberName( child, memberData, members ); } - else if (value == "type") + else if ( value == "type" ) { - readStructMemberType(child, memberData); + readStructMemberType( child, memberData ); } } } -void VulkanHppGenerator::readStructMemberEnum(tinyxml2::XMLElement const* element, MemberData & memberData) +void VulkanHppGenerator::readStructMemberEnum( tinyxml2::XMLElement const * element, MemberData & memberData ) { int line = element->GetLineNum(); - checkAttributes(line, getAttributes(element), {}, {}); - checkElements(line, getChildElements(element), {}, {}); + checkAttributes( line, getAttributes( element ), {}, {} ); + checkElements( line, getChildElements( element ), {}, {} ); std::string enumString = element->GetText(); - check(element->PreviousSibling() && (strcmp(element->PreviousSibling()->Value(), "[") == 0) && element->NextSibling() && (strcmp(element->NextSibling()->Value(), "]") == 0), - line, std::string("structure member array specifiation is ill-formatted: <") + enumString + ">"); + check( element->PreviousSibling() && ( strcmp( element->PreviousSibling()->Value(), "[" ) == 0 ) && + element->NextSibling() && ( strcmp( element->NextSibling()->Value(), "]" ) == 0 ), + line, + std::string( "structure member array specifiation is ill-formatted: <" ) + enumString + ">" ); - memberData.arraySizes.push_back(enumString); - check(memberData.usedConstant.empty(), line, "struct already holds a constant <" + memberData.usedConstant + ">"); + memberData.arraySizes.push_back( enumString ); + check( memberData.usedConstant.empty(), line, "struct already holds a constant <" + memberData.usedConstant + ">" ); memberData.usedConstant = enumString; } -void VulkanHppGenerator::readStructMemberName(tinyxml2::XMLElement const* element, MemberData & memberData, std::vector const& members) +void VulkanHppGenerator::readStructMemberName( tinyxml2::XMLElement const * element, + MemberData & memberData, + std::vector const & members ) { int line = element->GetLineNum(); - checkAttributes(line, getAttributes(element), {}, {}); - checkElements(line, getChildElements(element), {}, {}); + checkAttributes( line, getAttributes( element ), {}, {} ); + checkElements( line, getChildElements( element ), {}, {} ); std::string name = element->GetText(); - check(std::find_if(members.begin(), members.end(), [&name](MemberData const& md) { return md.name == name; }) == members.end(), line, "structure member name <" + name + "> already used"); + check( std::find_if( members.begin(), members.end(), [&name]( MemberData const & md ) { return md.name == name; } ) == + members.end(), + line, + "structure member name <" + name + "> already used" ); - memberData.name = name; - std::tie(memberData.arraySizes, memberData.bitCount) = readModifiers(element->NextSibling()); + memberData.name = name; + std::tie( memberData.arraySizes, memberData.bitCount ) = readModifiers( element->NextSibling() ); } -void VulkanHppGenerator::readStructMemberType(tinyxml2::XMLElement const* element, MemberData & memberData) +void VulkanHppGenerator::readStructMemberType( tinyxml2::XMLElement const * element, MemberData & memberData ) { int line = element->GetLineNum(); - checkAttributes(line, getAttributes(element), {}, {}); - checkElements(line, getChildElements(element), {}, {}); + checkAttributes( line, getAttributes( element ), {}, {} ); + checkElements( line, getChildElements( element ), {}, {} ); - memberData.type.prefix = readTypePrefix(element->PreviousSibling()); - memberData.type.type = element->GetText(); - memberData.type.postfix = readTypePostfix(element->NextSibling()); + memberData.type.prefix = readTypePrefix( element->PreviousSibling() ); + memberData.type.type = element->GetText(); + memberData.type.postfix = readTypePostfix( element->NextSibling() ); } -void VulkanHppGenerator::readTag(tinyxml2::XMLElement const* element) +void VulkanHppGenerator::readTag( tinyxml2::XMLElement const * element ) { - int line = element->GetLineNum(); - std::map attributes = getAttributes(element); - checkAttributes(line, attributes, { { "author",{} },{ "contact",{} },{ "name",{} } }, {}); - checkElements(line, getChildElements(element), {}); + int line = element->GetLineNum(); + std::map attributes = getAttributes( element ); + checkAttributes( line, attributes, { { "author", {} }, { "contact", {} }, { "name", {} } }, {} ); + checkElements( line, getChildElements( element ), {} ); - for (auto const& attribute : attributes) + for ( auto const & attribute : attributes ) { - if (attribute.first == "name") + if ( attribute.first == "name" ) { - check(m_tags.find(attribute.second) == m_tags.end(), line, "tag named <" + attribute.second + "> has already been specified"); - m_tags.insert(attribute.second); + check( m_tags.find( attribute.second ) == m_tags.end(), + line, + "tag named <" + attribute.second + "> has already been specified" ); + m_tags.insert( attribute.second ); } else { - check((attribute.first == "author") || (attribute.first == "contact"), line, "unknown attribute <" + attribute.first + ">"); + check( ( attribute.first == "author" ) || ( attribute.first == "contact" ), + line, + "unknown attribute <" + attribute.first + ">" ); } } } -void VulkanHppGenerator::readTags(tinyxml2::XMLElement const* element) +void VulkanHppGenerator::readTags( tinyxml2::XMLElement const * element ) { int line = element->GetLineNum(); - checkAttributes(line, getAttributes(element), { { "comment",{} } }, {}); - std::vector children = getChildElements(element); - checkElements(line, children, { { "tag", false } }); + checkAttributes( line, getAttributes( element ), { { "comment", {} } }, {} ); + std::vector children = getChildElements( element ); + checkElements( line, children, { { "tag", false } } ); - for (auto child : children) + for ( auto child : children ) { - readTag(child); + readTag( child ); } } -void VulkanHppGenerator::readType(tinyxml2::XMLElement const* element) +void VulkanHppGenerator::readType( tinyxml2::XMLElement const * element ) { - int line = element->GetLineNum(); - std::map attributes = getAttributes(element); + int line = element->GetLineNum(); + std::map attributes = getAttributes( element ); - auto categoryIt = attributes.find("category"); - if (categoryIt != attributes.end()) + auto categoryIt = attributes.find( "category" ); + if ( categoryIt != attributes.end() ) { - if (categoryIt->second == "basetype") + if ( categoryIt->second == "basetype" ) { - readBaseType(element, attributes); + readBaseType( element, attributes ); } - else if (categoryIt->second == "bitmask") + else if ( categoryIt->second == "bitmask" ) { - readBitmask(element, attributes); + readBitmask( element, attributes ); } - else if (categoryIt->second == "define") + else if ( categoryIt->second == "define" ) { - readDefine(element, attributes); + readDefine( element, attributes ); } - else if (categoryIt->second == "enum") + else if ( categoryIt->second == "enum" ) { - readTypeEnum(element, attributes); + readTypeEnum( element, attributes ); } - else if (categoryIt->second == "funcpointer") + else if ( categoryIt->second == "funcpointer" ) { - readFuncpointer(element, attributes); + readFuncpointer( element, attributes ); } - else if (categoryIt->second == "handle") + else if ( categoryIt->second == "handle" ) { - readHandle(element, attributes); + readHandle( element, attributes ); } - else if (categoryIt->second == "include") + else if ( categoryIt->second == "include" ) { - readTypeInclude(element, attributes); + readTypeInclude( element, attributes ); } - else if (categoryIt->second == "struct") + else if ( categoryIt->second == "struct" ) { - readStruct(element, false, attributes); + readStruct( element, false, attributes ); } else { - check(categoryIt->second == "union", element->GetLineNum(), "unknown type category <" + categoryIt->second + ">"); - readStruct(element, true, attributes); + check( + categoryIt->second == "union", element->GetLineNum(), "unknown type category <" + categoryIt->second + ">" ); + readStruct( element, true, attributes ); } } else { - auto requiresIt = attributes.find("requires"); - if (requiresIt != attributes.end()) + auto requiresIt = attributes.find( "requires" ); + if ( requiresIt != attributes.end() ) { - readRequires(element, attributes); + readRequires( element, attributes ); } else { - check((attributes.size() == 1) && (attributes.begin()->first == "name") && (attributes.begin()->second == "int"), line, "unknown type"); - check(m_types.insert(std::make_pair(attributes.begin()->second, TypeCategory::Unknown)).second, line, "type <" + attributes.begin()->second + "> already specified"); + check( ( attributes.size() == 1 ) && ( attributes.begin()->first == "name" ) && + ( attributes.begin()->second == "int" ), + line, + "unknown type" ); + check( m_types.insert( std::make_pair( attributes.begin()->second, TypeCategory::Unknown ) ).second, + line, + "type <" + attributes.begin()->second + "> already specified" ); } } } -void VulkanHppGenerator::readTypeEnum(tinyxml2::XMLElement const* element, std::map const& attributes) +void VulkanHppGenerator::readTypeEnum( tinyxml2::XMLElement const * element, + std::map const & attributes ) { int line = element->GetLineNum(); - checkAttributes(line, attributes, { { "category",{ "enum" } }, { "name",{} } }, { { "alias",{} }}); - checkElements(line, getChildElements(element), {}); + checkAttributes( line, attributes, { { "category", { "enum" } }, { "name", {} } }, { { "alias", {} } } ); + checkElements( line, getChildElements( element ), {} ); std::string alias, name; - for (auto const& attribute : attributes) + for ( auto const & attribute : attributes ) { - if (attribute.first == "alias") + if ( attribute.first == "alias" ) { alias = attribute.second; - check(!alias.empty(), line, "enum with empty alias"); + check( !alias.empty(), line, "enum with empty alias" ); } - else if (attribute.first == "name") + else if ( attribute.first == "name" ) { name = attribute.second; - check(!name.empty(), line, "enum with empty name"); - check(m_enums.find(name) == m_enums.end(), line, "enum <" + name + "> already specified"); + check( !name.empty(), line, "enum with empty name" ); + check( m_enums.find( name ) == m_enums.end(), line, "enum <" + name + "> already specified" ); } } - assert(!name.empty()); + assert( !name.empty() ); - if (alias.empty()) + if ( alias.empty() ) { - check(m_enums.insert(std::make_pair(name, EnumData())).second, line, "enum <" + name + "> already specified"); + check( m_enums.insert( std::make_pair( name, EnumData() ) ).second, line, "enum <" + name + "> already specified" ); } else { - auto enumIt = m_enums.find(alias); - check(enumIt != m_enums.end(), line, "enum with unknown alias <" + alias + ">"); - check(enumIt->second.alias.empty(), line, "enum <" + enumIt->first + "> already has an alias <" + enumIt->second.alias + ">"); + auto enumIt = m_enums.find( alias ); + check( enumIt != m_enums.end(), line, "enum with unknown alias <" + alias + ">" ); + check( enumIt->second.alias.empty(), + line, + "enum <" + enumIt->first + "> already has an alias <" + enumIt->second.alias + ">" ); enumIt->second.alias = name; } - check(m_types.insert(std::make_pair(name, TypeCategory::Enum)).second, line, "enum <" + name + "> already specified as a type"); + check( m_types.insert( std::make_pair( name, TypeCategory::Enum ) ).second, + line, + "enum <" + name + "> already specified as a type" ); } -void VulkanHppGenerator::readTypeInclude(tinyxml2::XMLElement const* element, std::map const& attributes) +void VulkanHppGenerator::readTypeInclude( tinyxml2::XMLElement const * element, + std::map const & attributes ) { int line = element->GetLineNum(); - checkAttributes(line, attributes, { { "category", { "include" } }, { "name",{} } }, {}); - checkElements(line, getChildElements(element), {}); + checkAttributes( line, attributes, { { "category", { "include" } }, { "name", {} } }, {} ); + checkElements( line, getChildElements( element ), {} ); - std::string name = attributes.find("name")->second; - check(m_includes.insert(name).second, element->GetLineNum(), "include named <" + name + "> already specified"); + std::string name = attributes.find( "name" )->second; + check( m_includes.insert( name ).second, element->GetLineNum(), "include named <" + name + "> already specified" ); } -void VulkanHppGenerator::readTypes(tinyxml2::XMLElement const* element) +void VulkanHppGenerator::readTypes( tinyxml2::XMLElement const * element ) { int line = element->GetLineNum(); - checkAttributes(line, getAttributes(element), { { "comment",{} } }, {}); - std::vector children = getChildElements(element); - checkElements(line, children, { { "comment", false }, { "type", false } }); + checkAttributes( line, getAttributes( element ), { { "comment", {} } }, {} ); + std::vector children = getChildElements( element ); + checkElements( line, children, { { "comment", false }, { "type", false } } ); - for (auto child : children) + for ( auto child : children ) { std::string value = child->Value(); - if (value == "comment") + if ( value == "comment" ) { - readComment(child); + readComment( child ); } else { - assert(value == "type"); - readType(child); + assert( value == "type" ); + readType( child ); } } } -void VulkanHppGenerator::registerDeleter(std::string const& name, std::pair const& commandData) +void VulkanHppGenerator::registerDeleter( std::string const & name, + std::pair const & commandData ) { - if ((commandData.first.substr(2, 7) == "Destroy") || (commandData.first.substr(2, 4) == "Free")) + if ( ( commandData.first.substr( 2, 7 ) == "Destroy" ) || ( commandData.first.substr( 2, 4 ) == "Free" ) ) { std::string key; - size_t valueIndex; - switch (commandData.second.params.size()) + size_t valueIndex; + switch ( commandData.second.params.size() ) { - case 2: - case 3: - assert(commandData.second.params.back().type.type == "VkAllocationCallbacks"); - key = (commandData.second.params.size() == 2) ? "" : commandData.second.params[0].type.type; - valueIndex = commandData.second.params.size() - 2; - break; - case 4: - key = commandData.second.params[0].type.type; - valueIndex = 3; - assert(m_handles.find(commandData.second.params[valueIndex].type.type) != m_handles.end()); - m_handles.find(commandData.second.params[valueIndex].type.type)->second.deletePool = commandData.second.params[1].type.type; - break; - default: - assert(false); - valueIndex = 0; + case 2: + case 3: + assert( commandData.second.params.back().type.type == "VkAllocationCallbacks" ); + key = ( commandData.second.params.size() == 2 ) ? "" : commandData.second.params[0].type.type; + valueIndex = commandData.second.params.size() - 2; + break; + case 4: + key = commandData.second.params[0].type.type; + valueIndex = 3; + assert( m_handles.find( commandData.second.params[valueIndex].type.type ) != m_handles.end() ); + m_handles.find( commandData.second.params[valueIndex].type.type )->second.deletePool = + commandData.second.params[1].type.type; + break; + default: assert( false ); valueIndex = 0; } - auto keyHandleIt = m_handles.find(key); - assert((keyHandleIt != m_handles.end()) && (keyHandleIt->second.childrenHandles.find(commandData.second.params[valueIndex].type.type) == keyHandleIt->second.childrenHandles.end())); - keyHandleIt->second.childrenHandles.insert(commandData.second.params[valueIndex].type.type); + auto keyHandleIt = m_handles.find( key ); + assert( ( keyHandleIt != m_handles.end() ) && + ( keyHandleIt->second.childrenHandles.find( commandData.second.params[valueIndex].type.type ) == + keyHandleIt->second.childrenHandles.end() ) ); + keyHandleIt->second.childrenHandles.insert( commandData.second.params[valueIndex].type.type ); - auto handleIt = m_handles.find(commandData.second.params[valueIndex].type.type); - assert(handleIt != m_handles.end()); + auto handleIt = m_handles.find( commandData.second.params[valueIndex].type.type ); + assert( handleIt != m_handles.end() ); handleIt->second.deleteCommand = name; } } -void VulkanHppGenerator::setVulkanLicenseHeader(int line, std::string const& comment) +void VulkanHppGenerator::setVulkanLicenseHeader( int line, std::string const & comment ) { - check(m_vulkanLicenseHeader.empty(), line, "second encounter of a Copyright comment"); + check( m_vulkanLicenseHeader.empty(), line, "second encounter of a Copyright comment" ); m_vulkanLicenseHeader = comment; // replace any '\n' with "\n// " - for (size_t pos = m_vulkanLicenseHeader.find('\n'); pos != std::string::npos; pos = m_vulkanLicenseHeader.find('\n', pos + 1)) + for ( size_t pos = m_vulkanLicenseHeader.find( '\n' ); pos != std::string::npos; + pos = m_vulkanLicenseHeader.find( '\n', pos + 1 ) ) { - m_vulkanLicenseHeader.replace(pos, 1, "\n// "); + m_vulkanLicenseHeader.replace( pos, 1, "\n// " ); } // and add a little message on our own m_vulkanLicenseHeader += "\n\n// This header is generated from the Khronos Vulkan XML API Registry."; - m_vulkanLicenseHeader = trim(m_vulkanLicenseHeader) + "\n"; + m_vulkanLicenseHeader = trim( m_vulkanLicenseHeader ) + "\n"; } -std::string VulkanHppGenerator::toString(TypeCategory category) +std::string VulkanHppGenerator::toString( TypeCategory category ) { - switch (category) + switch ( category ) { - case TypeCategory::Bitmask: return "bitmask"; - case TypeCategory::BaseType: return "basetype"; - case TypeCategory::Define: return "define"; - case TypeCategory::Enum: return "enum"; - case TypeCategory::FuncPointer: return "funcpointer"; - case TypeCategory::Handle: return "handle"; - case TypeCategory::Requires: return "requires"; - case TypeCategory::Struct: return "struct"; - case TypeCategory::Union: return "union"; - case TypeCategory::Unknown: return "unkown"; - default: - assert(false); - return ""; + case TypeCategory::Bitmask: return "bitmask"; + case TypeCategory::BaseType: return "basetype"; + case TypeCategory::Define: return "define"; + case TypeCategory::Enum: return "enum"; + case TypeCategory::FuncPointer: return "funcpointer"; + case TypeCategory::Handle: return "handle"; + case TypeCategory::Requires: return "requires"; + case TypeCategory::Struct: return "struct"; + case TypeCategory::Union: return "union"; + case TypeCategory::Unknown: return "unkown"; + default: assert( false ); return ""; } } - std::string VulkanHppGenerator::TypeData::compose() const { - return prefix + (prefix.empty() ? "" : " ") + ((type.substr(0, 2) == "Vk") ? "VULKAN_HPP_NAMESPACE::" : "") + stripPrefix(type, "Vk") + postfix; + return prefix + ( prefix.empty() ? "" : " " ) + ( ( type.substr( 0, 2 ) == "Vk" ) ? "VULKAN_HPP_NAMESPACE::" : "" ) + + stripPrefix( type, "Vk" ) + postfix; } -std::string to_string(tinyxml2::XMLError error) +std::string to_string( tinyxml2::XMLError error ) { - switch (error) + switch ( error ) { - case tinyxml2::XML_SUCCESS : return "XML_SUCCESS"; - case tinyxml2::XML_NO_ATTRIBUTE : return "XML_NO_ATTRIBUTE"; - case tinyxml2::XML_WRONG_ATTRIBUTE_TYPE : return "XML_WRONG_ATTRIBUTE_TYPE"; - case tinyxml2::XML_ERROR_FILE_NOT_FOUND : return "XML_ERROR_FILE_NOT_FOUND"; - case tinyxml2::XML_ERROR_FILE_COULD_NOT_BE_OPENED : return "XML_ERROR_FILE_COULD_NOT_BE_OPENED"; - case tinyxml2::XML_ERROR_FILE_READ_ERROR : return "XML_ERROR_FILE_READ_ERROR"; - case tinyxml2::XML_ERROR_PARSING_ELEMENT : return "XML_ERROR_PARSING_ELEMENT"; - case tinyxml2::XML_ERROR_PARSING_ATTRIBUTE : return "XML_ERROR_PARSING_ATTRIBUTE"; - case tinyxml2::XML_ERROR_PARSING_TEXT : return "XML_ERROR_PARSING_TEXT"; - case tinyxml2::XML_ERROR_PARSING_CDATA : return "XML_ERROR_PARSING_CDATA"; - case tinyxml2::XML_ERROR_PARSING_COMMENT : return "XML_ERROR_PARSING_COMMENT"; - case tinyxml2::XML_ERROR_PARSING_DECLARATION : return "XML_ERROR_PARSING_DECLARATION"; - case tinyxml2::XML_ERROR_PARSING_UNKNOWN : return "XML_ERROR_PARSING_UNKNOWN"; - case tinyxml2::XML_ERROR_EMPTY_DOCUMENT : return "XML_ERROR_EMPTY_DOCUMENT"; - case tinyxml2::XML_ERROR_MISMATCHED_ELEMENT : return "XML_ERROR_MISMATCHED_ELEMENT"; - case tinyxml2::XML_ERROR_PARSING : return "XML_ERROR_PARSING"; - case tinyxml2::XML_CAN_NOT_CONVERT_TEXT : return "XML_CAN_NOT_CONVERT_TEXT"; - case tinyxml2::XML_NO_TEXT_NODE : return "XML_NO_TEXT_NODE"; - default : return "unknown error code <" + std::to_string(error) + ">"; + case tinyxml2::XML_SUCCESS: return "XML_SUCCESS"; + case tinyxml2::XML_NO_ATTRIBUTE: return "XML_NO_ATTRIBUTE"; + case tinyxml2::XML_WRONG_ATTRIBUTE_TYPE: return "XML_WRONG_ATTRIBUTE_TYPE"; + case tinyxml2::XML_ERROR_FILE_NOT_FOUND: return "XML_ERROR_FILE_NOT_FOUND"; + case tinyxml2::XML_ERROR_FILE_COULD_NOT_BE_OPENED: return "XML_ERROR_FILE_COULD_NOT_BE_OPENED"; + case tinyxml2::XML_ERROR_FILE_READ_ERROR: return "XML_ERROR_FILE_READ_ERROR"; + case tinyxml2::XML_ERROR_PARSING_ELEMENT: return "XML_ERROR_PARSING_ELEMENT"; + case tinyxml2::XML_ERROR_PARSING_ATTRIBUTE: return "XML_ERROR_PARSING_ATTRIBUTE"; + case tinyxml2::XML_ERROR_PARSING_TEXT: return "XML_ERROR_PARSING_TEXT"; + case tinyxml2::XML_ERROR_PARSING_CDATA: return "XML_ERROR_PARSING_CDATA"; + case tinyxml2::XML_ERROR_PARSING_COMMENT: return "XML_ERROR_PARSING_COMMENT"; + case tinyxml2::XML_ERROR_PARSING_DECLARATION: return "XML_ERROR_PARSING_DECLARATION"; + case tinyxml2::XML_ERROR_PARSING_UNKNOWN: return "XML_ERROR_PARSING_UNKNOWN"; + case tinyxml2::XML_ERROR_EMPTY_DOCUMENT: return "XML_ERROR_EMPTY_DOCUMENT"; + case tinyxml2::XML_ERROR_MISMATCHED_ELEMENT: return "XML_ERROR_MISMATCHED_ELEMENT"; + case tinyxml2::XML_ERROR_PARSING: return "XML_ERROR_PARSING"; + case tinyxml2::XML_CAN_NOT_CONVERT_TEXT: return "XML_CAN_NOT_CONVERT_TEXT"; + case tinyxml2::XML_NO_TEXT_NODE: return "XML_NO_TEXT_NODE"; + default: return "unknown error code <" + std::to_string( error ) + ">"; } } -int main(int argc, char **argv) +int main( int argc, char ** argv ) { static const std::string classArrayProxy = R"( #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) @@ -5625,7 +6682,7 @@ int main(int argc, char **argv) #endif )"; - static const std::string classArrayWrapper = R"( + static const std::string classArrayWrapper = R"( template class ArrayWrapper1D : public std::array { @@ -6772,85 +7829,78 @@ namespace std }; )"; - try { + try + { tinyxml2::XMLDocument doc; - std::string filename = (argc == 1) ? VK_SPEC : argv[1]; + std::string filename = ( argc == 1 ) ? VK_SPEC : argv[1]; std::cout << "Loading vk.xml from " << filename << std::endl; std::cout << "Writing vulkan.hpp to " << VULKAN_HPP_FILE << std::endl; - tinyxml2::XMLError error = doc.LoadFile(filename.c_str()); - if (error != tinyxml2::XML_SUCCESS) + tinyxml2::XMLError error = doc.LoadFile( filename.c_str() ); + if ( error != tinyxml2::XML_SUCCESS ) { - std::cout << "VkGenerate: failed to load file " << filename << " with error <" << to_string(error) << ">" << std::endl; + std::cout << "VulkanHppGenerator: failed to load file " << filename << " with error <" << to_string( error ) + << ">" << std::endl; return -1; } - VulkanHppGenerator generator(doc); + VulkanHppGenerator generator( doc ); - std::string str; + std::string str; static const size_t estimatedLength = 4 * 1024 * 1024; - str.reserve(estimatedLength); - str += generator.getVulkanLicenseHeader() - + includes - + "\n"; - appendVersionCheck(str, generator.getVersion()); - appendTypesafeStuff(str, generator.getTypesafeCheck()); - str += defines - + "\n" - + "namespace VULKAN_HPP_NAMESPACE\n" - + "{\n" - + classArrayProxy - + classArrayWrapper - + classFlags - + classOptional - + classStructureChain - + classUniqueHandle; - generator.appendDispatchLoaderStatic(str); - generator.appendDispatchLoaderDefault(str); - str += classObjectDestroy - + classObjectFree - + classPoolFree - + "\n"; - generator.appendBaseTypes(str); - generator.appendEnums(str); + str.reserve( estimatedLength ); + str += generator.getVulkanLicenseHeader() + includes + "\n"; + appendVersionCheck( str, generator.getVersion() ); + appendTypesafeStuff( str, generator.getTypesafeCheck() ); + str += defines + "\n" + "namespace VULKAN_HPP_NAMESPACE\n" + "{\n" + classArrayProxy + classArrayWrapper + + classFlags + classOptional + classStructureChain + classUniqueHandle; + generator.appendDispatchLoaderStatic( str ); + generator.appendDispatchLoaderDefault( str ); + str += classObjectDestroy + classObjectFree + classPoolFree + "\n"; + generator.appendBaseTypes( str ); + generator.appendEnums( str ); str += typeTraits; - generator.appendBitmasks(str); - str += "} // namespace VULKAN_HPP_NAMESPACE\n" - + is_error_code_enum - + "\n" - + "namespace VULKAN_HPP_NAMESPACE\n" - + "{\n" - + "#ifndef VULKAN_HPP_NO_EXCEPTIONS" - + exceptions; - generator.appendResultExceptions(str); - generator.appendThrowExceptions(str); - str += "#endif\n" - + structResultValue; - generator.appendForwardDeclarations(str); - generator.appendHandles(str); - generator.appendStructs(str); - generator.appendHandlesCommandDefintions(str); - generator.appendStructureChainValidation(str); - generator.appendDispatchLoaderDynamic(str); + generator.appendBitmasks( str ); + str += "} // namespace VULKAN_HPP_NAMESPACE\n" + is_error_code_enum + "\n" + "namespace VULKAN_HPP_NAMESPACE\n" + + "{\n" + "#ifndef VULKAN_HPP_NO_EXCEPTIONS" + exceptions; + generator.appendResultExceptions( str ); + generator.appendThrowExceptions( str ); + str += "#endif\n" + structResultValue; + generator.appendForwardDeclarations( str ); + generator.appendHandles( str ); + generator.appendStructs( str ); + generator.appendHandlesCommandDefintions( str ); + generator.appendStructureChainValidation( str ); + generator.appendDispatchLoaderDynamic( str ); str += "} // namespace VULKAN_HPP_NAMESPACE\n"; - generator.appendHashStructures(str); + generator.appendHashStructures( str ); str += "#endif\n"; - assert(str.length() < estimatedLength); - cleanup(str); - - std::ofstream ofs(VULKAN_HPP_FILE); - assert(!ofs.fail()); + std::ofstream ofs( VULKAN_HPP_FILE ); + assert( !ofs.fail() ); ofs << str; ofs.close(); + +#if defined( CLANG_FORMAT_EXECUTABLE ) + std::cout << "VulkanHppGenerator: formatting vulkan.hpp using clang-format..."; + int ret = std::system( "\"" CLANG_FORMAT_EXECUTABLE "\" -i --style=file " VULKAN_HPP_FILE ); + if ( ret != 0 ) + { + std::cout << "VulkanHppGenerator: failed to format file " << filename << " with error <" << ret << ">\n"; + return -1; + } +#else + std::cout + << "VulkanHppGenerator: could not find clang-format. The generated vulkan.hpp will not be formatted accordingly.\n"; +#endif } - catch (std::exception const& e) + catch ( std::exception const & e ) { std::cout << "caught exception: " << e.what() << std::endl; return -1; } - catch (...) + catch ( ... ) { std::cout << "caught unknown exception" << std::endl; return -1; diff --git a/VulkanHppGenerator.hpp b/VulkanHppGenerator.hpp index 54e6d93..bcae7d2 100644 --- a/VulkanHppGenerator.hpp +++ b/VulkanHppGenerator.hpp @@ -14,342 +14,566 @@ #pragma once -#include #include +#include #include -#include #include +#include class VulkanHppGenerator { - public: - VulkanHppGenerator(tinyxml2::XMLDocument const& document); +public: + VulkanHppGenerator( tinyxml2::XMLDocument const & document ); - void appendBaseTypes(std::string & str) const; - void appendBitmasks(std::string & str) const; - void appendDispatchLoaderDynamic(std::string & str); // use vkGet*ProcAddress to get function pointers - void appendDispatchLoaderStatic(std::string & str); // use exported symbols from loader - void appendDispatchLoaderDefault(std::string & str); // typedef to DispatchLoaderStatic or undefined type, based on VK_NO_PROTOTYPES - void appendEnums(std::string & str) const; - void appendForwardDeclarations(std::string & str) const; - void appendHandles(std::string & str) const; - void appendHandlesCommandDefintions(std::string & str) const; - void appendHashStructures(std::string& str) const; - void appendResultExceptions(std::string & str) const; - void appendStructs(std::string & str) const; - void appendStructureChainValidation(std::string & str); - void appendThrowExceptions(std::string & str) const; - std::string const& getTypesafeCheck() const; - std::string const& getVersion() const; - std::string const& getVulkanLicenseHeader() const; + void appendBaseTypes( std::string & str ) const; + void appendBitmasks( std::string & str ) const; + void appendDispatchLoaderDynamic( std::string & str ); // use vkGet*ProcAddress to get function pointers + void appendDispatchLoaderStatic( std::string & str ); // use exported symbols from loader + void appendDispatchLoaderDefault( + std::string & str ); // typedef to DispatchLoaderStatic or undefined type, based on VK_NO_PROTOTYPES + void appendEnums( std::string & str ) const; + void appendForwardDeclarations( std::string & str ) const; + void appendHandles( std::string & str ) const; + void appendHandlesCommandDefintions( std::string & str ) const; + void appendHashStructures( std::string & str ) const; + void appendResultExceptions( std::string & str ) const; + void appendStructs( std::string & str ) const; + void appendStructureChainValidation( std::string & str ); + void appendThrowExceptions( std::string & str ) const; + std::string const & getTypesafeCheck() const; + std::string const & getVersion() const; + std::string const & getVulkanLicenseHeader() const; - private: - struct BaseTypeData +private: + struct BaseTypeData + { + BaseTypeData( std::string const & type_, int line ) : type( type_ ), xmlLine( line ) {} + + std::string type; + int xmlLine; + }; + + struct BitmaskData + { + BitmaskData( std::string const & r, std::string const & t, int line ) + : requirements( r ), type( t ), xmlLine( line ) + {} + + std::string requirements; + std::string type; + std::string platform; + std::string alias; + int xmlLine; + }; + + struct NameData + { + std::string name; + std::vector arraySizes; + std::string bitCount; + }; + + struct TypeData + { + std::string compose() const; + + bool operator==( TypeData const & rhs ) const { - BaseTypeData(std::string const& type_, int line) - : type(type_) - , xmlLine(line) - {} + return ( prefix == rhs.prefix ) && ( type == rhs.type ) && ( postfix == rhs.postfix ); + } - std::string type; - int xmlLine; - }; + std::string prefix; + std::string type; + std::string postfix; + }; - struct BitmaskData - { - BitmaskData(std::string const& r, std::string const& t, int line) - : requirements(r) - , type(t) - , xmlLine(line) - {} + struct ParamData + { + ParamData( int line ) : optional( false ), xmlLine( line ) {} - std::string requirements; - std::string type; - std::string platform; - std::string alias; - int xmlLine; - }; + TypeData type; + std::string name; + std::vector arraySizes; + std::string len; + bool optional; + int xmlLine; + }; - struct NameData - { - std::string name; - std::vector arraySizes; - std::string bitCount; - }; + struct CommandData + { + CommandData( int line ) : xmlLine( line ) {} - struct TypeData - { - std::string compose() const; + std::vector params; + std::string platform; + std::string returnType; + std::vector successCodes; + std::vector errorCodes; + std::set aliases; + int xmlLine; + }; - bool operator==(TypeData const& rhs) const - { - return (prefix == rhs.prefix) && (type == rhs.type) && (postfix == rhs.postfix); - } + struct EnumValueData + { + EnumValueData( std::string const & vulkan, std::string const & vk, bool singleBit_ ) + : vulkanValue( vulkan ), vkValue( vk ), singleBit( singleBit_ ) + {} - std::string prefix; - std::string type; - std::string postfix; - }; + std::string vulkanValue; + std::string vkValue; + bool singleBit; + }; - struct ParamData - { - ParamData(int line) - : optional(false) - , xmlLine(line) - {} + struct EnumData + { + void addEnumValue( int line, + std::string const & valueName, + bool bitmask, + bool bitpos, + std::string const & prefix, + std::string const & postfix, + std::string const & tag ); - TypeData type; - std::string name; - std::vector arraySizes; - std::string len; - bool optional; - int xmlLine; - }; + std::string alias; // alias for this enum + std::vector> + aliases; // pairs of vulkan enum value and corresponding vk::-namespace enum value + bool isBitmask = false; + std::string platform; + std::vector values; + }; - struct CommandData - { - CommandData(int line) - : xmlLine(line) - {} + struct ExtensionData + { + ExtensionData( int line ) : xmlLine( line ) {} - std::vector params; - std::string platform; - std::string returnType; - std::vector successCodes; - std::vector errorCodes; - std::set aliases; - int xmlLine; - }; + std::string deprecatedBy; + std::string obsoletedBy; + std::string promotedTo; + std::map requirements; + int xmlLine; + }; - struct EnumValueData - { - EnumValueData(std::string const& vulkan, std::string const& vk, bool singleBit_) - : vulkanValue(vulkan) - , vkValue(vk) - , singleBit(singleBit_) - {} + struct FuncPointerData + { + FuncPointerData( std::string const & r, int line ) : requirements( r ), xmlLine( line ) {} - std::string vulkanValue; - std::string vkValue; - bool singleBit; - }; + std::string requirements; + int xmlLine; + }; - struct EnumData - { - void addEnumValue(int line, std::string const& valueName, bool bitmask, bool bitpos, std::string const& prefix, std::string const& postfix, std::string const& tag); + struct HandleData + { + HandleData( std::vector const & p, int line ) : parents( p ), xmlLine( line ) {} - std::string alias; // alias for this enum - std::vector> aliases; // pairs of vulkan enum value and corresponding vk::-namespace enum value - bool isBitmask = false; - std::string platform; - std::vector values; - }; + std::string alias; + std::set childrenHandles; + std::map commands; + std::string deleteCommand; + std::string deletePool; + std::string platform; + std::vector parents; + int xmlLine; + }; - struct ExtensionData - { - ExtensionData(int line) - : xmlLine(line) - {} + struct MemberData + { + MemberData( int line ) : xmlLine( line ) {} - std::string deprecatedBy; - std::string obsoletedBy; - std::string promotedTo; - std::map requirements; - int xmlLine; - }; + TypeData type; + std::string name; + std::vector arraySizes; + std::string bitCount; + std::string values; + std::string usedConstant; + int xmlLine; + }; - struct FuncPointerData - { - FuncPointerData(std::string const& r, int line) - : requirements(r) - , xmlLine(line) - {} + struct StructureData + { + StructureData( std::vector const & extends, int line ) + : returnedOnly( false ), isUnion( false ), structExtends( extends ), xmlLine( line ) + {} - std::string requirements; - int xmlLine; - }; + bool returnedOnly; + bool isUnion; + std::vector members; + std::string platform; + std::vector structExtends; + std::set aliases; + std::string subStruct; + int xmlLine; + }; - struct HandleData - { - HandleData(std::vector const& p, int line) - : parents(p) - , xmlLine(line) - {} + enum class TypeCategory + { + Bitmask, + BaseType, + Define, + Enum, + FuncPointer, + Handle, + Requires, + Struct, + Union, + Unknown + }; - std::string alias; - std::set childrenHandles; - std::map commands; - std::string deleteCommand; - std::string deletePool; - std::string platform; - std::vector parents; - int xmlLine; - }; - - struct MemberData - { - MemberData(int line) - : xmlLine(line) - {} - - TypeData type; - std::string name; - std::vector arraySizes; - std::string bitCount; - std::string values; - std::string usedConstant; - int xmlLine; - }; - - struct StructureData - { - StructureData(std::vector const& extends, int line) - : returnedOnly(false) - , isUnion(false) - , structExtends(extends) - , xmlLine(line) - {} - - bool returnedOnly; - bool isUnion; - std::vector members; - std::string platform; - std::vector structExtends; - std::set aliases; - std::string subStruct; - int xmlLine; - }; - - enum class TypeCategory - { - Bitmask, - BaseType, - Define, - Enum, - FuncPointer, - Handle, - Requires, - Struct, - Union, - Unknown - }; - - private: - void appendArgumentPlainType(std::string & str, ParamData const& paramData) const; - void appendArguments(std::string & str, CommandData const& commandData, size_t returnParamIndex, size_t templateParamIndex, std::map const& vectorParamIndices, bool twoStep, bool firstCall, bool singular, size_t from, size_t to) const; - void appendArgumentVector(std::string & str, size_t paramIndex, ParamData const& paramData, size_t returnParamIndex, size_t templateParamIndex, bool twoStep, bool firstCall, bool singular) const; - void appendArgumentVulkanType(std::string & str, ParamData const& paramData) const; - void appendBitmask(std::string & os, std::string const& bitmaskName, std::string const& bitmaskType, std::string const& bitmaskAlias, std::string const& enumName, std::vector const& enumValues) const; - void appendBitmaskToStringFunction(std::string & str, std::string const& flagsName, std::string const& enumName, std::vector const& enumValues) const; - void appendCall(std::string &str, std::string const& name, CommandData const& commandData, size_t returnParamIndex, size_t templateParamIndex, std::map const& vectorParamIndices, bool twoStep, bool firstCall, bool singular) const; - void appendCommand(std::string & str, std::string const& indentation, std::string const& name, CommandData const& commandData, bool definition) const; - void appendEnum(std::string & str, std::pair const& enumData) const; - void appendEnumInitializer(std::string& str, TypeData const& type, std::vector const& arraySizes, std::vector const& values, bool argument) const; - void appendEnumToString(std::string & str, std::pair const& enumData) const; - void appendFunction(std::string & str, std::string const& indentation, std::string const& name, CommandData const& commandData, size_t returnParamIndex, size_t templateParamIndex, std::map const& vectorParamIndices, bool twoStep, std::string const& enhancedReturnType, bool definition, bool enhanced, bool singular, bool unique, bool isStructureChain, bool withAllocator) const; - void appendFunctionBodyEnhanced(std::string & str, std::string const& indentation, std::string const& name, CommandData const& commandData, size_t returnParamIndex, size_t templateParamIndex, std::map const& vectorParamIndices, bool twoStep, std::string const& enhancedReturnType, bool singular, bool unique, bool isStructureChain, bool withAllocator) const; - std::string appendFunctionBodyEnhancedLocalReturnVariable(std::string & str, std::string const& indentation, CommandData const& commandData, size_t returnParamIndex, std::map const& vectorParamIndices, bool twoStep, std::string const& enhancedReturnType, bool singular, bool isStructureChain, bool withAllocator) const; - void appendFunctionBodyEnhancedLocalReturnVariableVectorSize(std::string & str, std::vector const& params, std::pair const& vectorParamIndex, size_t returnParamIndex, std::map const& vectorParamIndices, bool withAllocator) const; - void appendFunctionBodyEnhancedMultiVectorSizeCheck(std::string & str, std::string const& indentation, std::string const& name, CommandData const& commandData, size_t returnParamIndex, std::map const& vectorParamIndices) const; - void appendFunctionBodyEnhancedReturnResultValue(std::string & str, std::string const& indentation, std::string const& returnName, std::string const& name, CommandData const& commandData, size_t returnParamIndex, bool twoStep, bool singular, bool unique) const; - void appendFunctionBodyEnhancedSingleStep(std::string & str, std::string const& indentation, std::string const& name, CommandData const& commandData, size_t returnParamIndex, size_t templateParamIndex, std::map const& vectorParamIndices, bool singular) const; - void appendFunctionBodyEnhancedTwoStep(std::string & str, std::string const& indentation, std::string const& name, CommandData const& commandData, size_t returnParamIndex, size_t templateParamIndex, std::map const& vectorParamIndices, bool singular, std::string const& returnName) const; - void appendFunctionBodyEnhancedVectorOfStructureChain(std::string & str, std::string const& indentation, std::string const& name, CommandData const& commandData, size_t returnParamIndex, std::map const& vectorParamIndices, bool withAllocator) const; - void appendFunctionBodyEnhancedVectorOfUniqueHandles(std::string & str, std::string const& indentation, std::string const& name, CommandData const& commandData, size_t returnParamIndex, size_t templateParamIndex, std::map const& vectorParamIndices, bool twoStep, bool singular, bool withAllocator) const; - void appendFunctionBodyStandard(std::string & str, std::string const& indentation, std::string const& commandName, CommandData const& commandData) const; - void appendFunctionBodyStandardArgument(std::string & str, TypeData const& typeData, std::string const& name, std::vector const& arraySizes) const; - bool appendFunctionHeaderArgumentEnhanced(std::string & str, ParamData const& param, size_t paramIndex, std::map const& vectorParamIndices, bool skip, bool argEncountered, bool isTemplateParam, bool isLastArgument, bool singular, bool withDefaults, bool withAllocator) const; - void appendFunctionHeaderArgumentEnhancedPointer(std::string & str, ParamData const& param, std::string const& strippedParameterName, bool withDefaults, bool withAllocator) const; - void appendFunctionHeaderArgumentEnhancedSimple(std::string & str, ParamData const& param, bool lastArgument, bool withDefaults, bool withAllocator) const; - void appendFunctionHeaderArgumentEnhancedVector(std::string & str, ParamData const& param, std::string const& strippedParameterName, bool hasSizeParam, bool isTemplateParam, bool singular, bool withDefaults, bool withAllocator) const; - void appendFunctionHeaderArguments(std::string & str, std::string const& name, CommandData const& commandData, size_t returnParamIndex, size_t templateParamIndex, std::map const& vectorParamIndices, bool enhanced, bool singular, bool withDefaults, bool withAllocator) const; - void appendFunctionHeaderArgumentsEnhanced(std::string & str, std::string const& name, CommandData const& commandData, size_t returnParamIndex, size_t templateParamIndex, std::map const& vectorParamIndices, bool singular, bool withDefaults, bool withAllocator) const; - void appendFunctionHeaderArgumentsStandard(std::string & str, std::string const& name, CommandData const& commandData, bool withDefaults) const; - bool appendFunctionHeaderArgumentStandard(std::string & str, ParamData const& param, bool argEncountered, bool isLastArgument, bool withDefaults) const; - void appendFunctionHeaderReturnType(std::string & str, CommandData const& commandData, size_t returnParamIndex, std::map const& vectorParamIndices, std::string const& enhancedReturnType, bool enhanced, bool twoStep, bool singular, bool unique, bool isStructureChain) const; - void appendFunctionHeaderTemplate(std::string & str, std::string const& indentation, size_t returnParamIndex, size_t templateParamIndex, std::string const& enhancedReturnType, bool enhanced, bool singular, bool unique, bool withDefault, bool isStructureChain) const; - void appendHandle(std::string & str, std::pair const& handle, std::set & listedHandles) const; - void appendPlatformEnter(std::string & str, bool isAliased, std::string const& platform) const; - void appendPlatformLeave(std::string & str, bool isAliased, std::string const& platform) const; - void appendStruct(std::string & str, std::pair const& structure, std::set & listedStructures) const; - void appendStructAssignmentOperator(std::string &str, std::pair const& structure, std::string const& prefix) const; - void appendStructCompareOperators(std::string & str, std::pair const& structure) const; - void appendStructConstructor(std::string &str, std::pair const& structData, std::string const& prefix) const; - bool appendStructConstructorArgument(std::string & str, bool listedArgument, std::string const& indentation, MemberData const& memberData) const; - void appendStructCopyConstructors(std::string & str, std::string const& vkName) const; - void appendStructMembers(std::string & str, std::pair const& structData, std::string const& prefix) const; - void appendStructSetter(std::string & str, std::string const& structureName, bool isUnion, MemberData const& memberData) const; - void appendStructSubConstructor(std::string &str, std::pair const& structData, std::string const& prefix) const; - void appendStructure(std::string & str, std::pair const& structure) const; - void appendUnion(std::string & str, std::pair const& structure) const; - void appendUniqueTypes(std::string &str, std::string const& parentType, std::set const& childrenTypes) const; - std::string constructConstexprString(std::pair const& structData) const; - void checkCorrectness(); - bool checkLenAttribute(std::string const& len, std::vector const& params); - bool containsArray(std::string const& type) const; - bool containsUnion(std::string const& type) const; - std::string determineEnhancedReturnType(CommandData const& commandData, size_t returnParamIndex, std::map const& vectorParamIndices, bool isStructureChain) const; - size_t determineReturnParamIndex(CommandData const& commandData, std::map const& vectorParamIndices, bool twoStep) const; - std::string determineSubStruct(std::pair const& structure) const; - size_t determineTemplateParamIndex(std::vector const& params, std::map const& vectorParamIndices) const; - std::map determineVectorParamIndices(std::vector const& params) const; - bool holdsSType(std::string const& type) const; - bool isTwoStepAlgorithm(std::vector const& params) const; - void linkCommandToHandle(int line, std::string const& name, CommandData const& commandData); - void readBaseType(tinyxml2::XMLElement const* element, std::map const& attributes); - void readBitmask(tinyxml2::XMLElement const* element, std::map const& attributes); - void readBitmaskAlias(tinyxml2::XMLElement const* element, std::map const& attributes); - void readCommand(tinyxml2::XMLElement const* element); - void readCommand(tinyxml2::XMLElement const* element, std::map const& attributess); - void readCommandAlias(tinyxml2::XMLElement const* element, std::map const& attributes); - ParamData readCommandParam(tinyxml2::XMLElement const* element, std::vector const& params); - std::pair readCommandProto(tinyxml2::XMLElement const* element); - void readCommands(tinyxml2::XMLElement const* element); - std::string readComment(tinyxml2::XMLElement const* element); - void readDefine(tinyxml2::XMLElement const* element, std::map const& attributes); - void readEnum(tinyxml2::XMLElement const* element, EnumData & enumData, bool bitmask, std::string const& prefix, std::string const& postfix); - void readEnum(tinyxml2::XMLElement const* element, std::map const& attributes, EnumData & enumData, bool bitmask, std::string const& prefix, std::string const& postfix); - void readEnumAlias(tinyxml2::XMLElement const* element, std::map const& attributes, EnumData & enumData, bool bitmask, std::string const& prefix, std::string const& postfix); - void readEnumConstant(tinyxml2::XMLElement const* element); - void readEnums(tinyxml2::XMLElement const* element); - void readExtension(tinyxml2::XMLElement const* element); - void readExtensionDisabledCommand(tinyxml2::XMLElement const* element); - void readExtensionDisabledEnum(std::string const& extensionName, tinyxml2::XMLElement const* element); - void readExtensionDisabledRequire(std::string const& extensionName, tinyxml2::XMLElement const* element); - void readExtensionDisabledType(tinyxml2::XMLElement const* element); - void readExtensionRequire(tinyxml2::XMLElement const* element, std::string const& platform, std::string const& tag, std::map & requirements); - void readExtensionRequireCommand(tinyxml2::XMLElement const* element, std::string const& platform); - void readExtensionRequireType(tinyxml2::XMLElement const* element, std::string const& platform); - void readExtensions(tinyxml2::XMLElement const* element); - void readFeature(tinyxml2::XMLElement const* element); - void readFeatureRequire(tinyxml2::XMLElement const* element); - void readFuncpointer(tinyxml2::XMLElement const* element, std::map const& attributes); - void readHandle(tinyxml2::XMLElement const* element, std::map const& attributes); - std::pair readNameAndType(tinyxml2::XMLElement const* elements); - void readPlatform(tinyxml2::XMLElement const* element); - void readPlatforms(tinyxml2::XMLElement const* element); - void readRegistry(tinyxml2::XMLElement const* element); - void readRequireCommand(tinyxml2::XMLElement const* element); - void readRequireEnum(tinyxml2::XMLElement const* element, std::string const& tag); - void readRequireEnum(tinyxml2::XMLElement const* element, std::map const& attributes, std::string const& tag); - void readRequireEnumAlias(tinyxml2::XMLElement const* element, std::map const& attributes, std::string const& tag); - void readRequires(tinyxml2::XMLElement const* element, std::map const& attributes); - void readRequireType(tinyxml2::XMLElement const* element); - void readStruct(tinyxml2::XMLElement const* element, bool isUnion, std::map const& attributes); - void readStructAlias(tinyxml2::XMLElement const* element, std::map const& attributes); - void readStructMember(tinyxml2::XMLElement const* element, std::vector & members); - void readStructMemberEnum(tinyxml2::XMLElement const* element, MemberData & memberData); - void readStructMemberName(tinyxml2::XMLElement const* element, MemberData & memberData, std::vector const& members); - void readStructMemberType(tinyxml2::XMLElement const* element, MemberData & memberData); - void readTag(tinyxml2::XMLElement const* element); - void readTags(tinyxml2::XMLElement const* element); - void readType(tinyxml2::XMLElement const* element); - void readTypeEnum(tinyxml2::XMLElement const* element, std::map const& attributes); - void readTypeInclude(tinyxml2::XMLElement const* element, std::map const& attributes); - void readTypes(tinyxml2::XMLElement const* element); - void registerDeleter(std::string const& name, std::pair const& commandData); - void setVulkanLicenseHeader(int line, std::string const& comment); - std::string toString(TypeCategory category); +private: + void appendArgumentPlainType( std::string & str, ParamData const & paramData ) const; + void appendArguments( std::string & str, + CommandData const & commandData, + size_t returnParamIndex, + size_t templateParamIndex, + std::map const & vectorParamIndices, + bool twoStep, + bool firstCall, + bool singular, + size_t from, + size_t to ) const; + void appendArgumentVector( std::string & str, + size_t paramIndex, + ParamData const & paramData, + size_t returnParamIndex, + size_t templateParamIndex, + bool twoStep, + bool firstCall, + bool singular ) const; + void appendArgumentVulkanType( std::string & str, ParamData const & paramData ) const; + void appendBitmask( std::string & os, + std::string const & bitmaskName, + std::string const & bitmaskType, + std::string const & bitmaskAlias, + std::string const & enumName, + std::vector const & enumValues ) const; + void appendBitmaskToStringFunction( std::string & str, + std::string const & flagsName, + std::string const & enumName, + std::vector const & enumValues ) const; + void appendCall( std::string & str, + std::string const & name, + CommandData const & commandData, + size_t returnParamIndex, + size_t templateParamIndex, + std::map const & vectorParamIndices, + bool twoStep, + bool firstCall, + bool singular ) const; + void appendCommand( std::string & str, + std::string const & indentation, + std::string const & name, + CommandData const & commandData, + bool definition ) const; + void appendEnum( std::string & str, std::pair const & enumData ) const; + void appendEnumInitializer( std::string & str, + TypeData const & type, + std::vector const & arraySizes, + std::vector const & values, + bool argument ) const; + void appendEnumToString( std::string & str, std::pair const & enumData ) const; + void appendFunction( std::string & str, + std::string const & indentation, + std::string const & name, + CommandData const & commandData, + size_t returnParamIndex, + size_t templateParamIndex, + std::map const & vectorParamIndices, + bool twoStep, + std::string const & enhancedReturnType, + bool definition, + bool enhanced, + bool singular, + bool unique, + bool isStructureChain, + bool withAllocator ) const; + void appendFunctionBodyEnhanced( std::string & str, + std::string const & indentation, + std::string const & name, + CommandData const & commandData, + size_t returnParamIndex, + size_t templateParamIndex, + std::map const & vectorParamIndices, + bool twoStep, + std::string const & enhancedReturnType, + bool singular, + bool unique, + bool isStructureChain, + bool withAllocator ) const; + std::string appendFunctionBodyEnhancedLocalReturnVariable( std::string & str, + std::string const & indentation, + CommandData const & commandData, + size_t returnParamIndex, + std::map const & vectorParamIndices, + bool twoStep, + std::string const & enhancedReturnType, + bool singular, + bool isStructureChain, + bool withAllocator ) const; + void appendFunctionBodyEnhancedLocalReturnVariableVectorSize( std::string & str, + std::vector const & params, + std::pair const & vectorParamIndex, + size_t returnParamIndex, + std::map const & vectorParamIndices, + bool withAllocator ) const; + void appendFunctionBodyEnhancedMultiVectorSizeCheck( std::string & str, + std::string const & indentation, + std::string const & name, + CommandData const & commandData, + size_t returnParamIndex, + std::map const & vectorParamIndices ) const; + void appendFunctionBodyEnhancedReturnResultValue( std::string & str, + std::string const & indentation, + std::string const & returnName, + std::string const & name, + CommandData const & commandData, + size_t returnParamIndex, + bool twoStep, + bool singular, + bool unique ) const; + void appendFunctionBodyEnhancedSingleStep( std::string & str, + std::string const & indentation, + std::string const & name, + CommandData const & commandData, + size_t returnParamIndex, + size_t templateParamIndex, + std::map const & vectorParamIndices, + bool singular ) const; + void appendFunctionBodyEnhancedTwoStep( std::string & str, + std::string const & indentation, + std::string const & name, + CommandData const & commandData, + size_t returnParamIndex, + size_t templateParamIndex, + std::map const & vectorParamIndices, + bool singular, + std::string const & returnName ) const; + void appendFunctionBodyEnhancedVectorOfStructureChain( std::string & str, + std::string const & indentation, + std::string const & name, + CommandData const & commandData, + size_t returnParamIndex, + std::map const & vectorParamIndices, + bool withAllocator ) const; + void appendFunctionBodyEnhancedVectorOfUniqueHandles( std::string & str, + std::string const & indentation, + std::string const & name, + CommandData const & commandData, + size_t returnParamIndex, + size_t templateParamIndex, + std::map const & vectorParamIndices, + bool twoStep, + bool singular, + bool withAllocator ) const; + void appendFunctionBodyStandard( std::string & str, + std::string const & indentation, + std::string const & commandName, + CommandData const & commandData ) const; + void appendFunctionBodyStandardArgument( std::string & str, + TypeData const & typeData, + std::string const & name, + std::vector const & arraySizes ) const; + bool appendFunctionHeaderArgumentEnhanced( std::string & str, + ParamData const & param, + size_t paramIndex, + std::map const & vectorParamIndices, + bool skip, + bool argEncountered, + bool isTemplateParam, + bool isLastArgument, + bool singular, + bool withDefaults, + bool withAllocator ) const; + void appendFunctionHeaderArgumentEnhancedPointer( std::string & str, + ParamData const & param, + std::string const & strippedParameterName, + bool withDefaults, + bool withAllocator ) const; + void appendFunctionHeaderArgumentEnhancedSimple( + std::string & str, ParamData const & param, bool lastArgument, bool withDefaults, bool withAllocator ) const; + void appendFunctionHeaderArgumentEnhancedVector( std::string & str, + ParamData const & param, + std::string const & strippedParameterName, + bool hasSizeParam, + bool isTemplateParam, + bool singular, + bool withDefaults, + bool withAllocator ) const; + void appendFunctionHeaderArguments( std::string & str, + std::string const & name, + CommandData const & commandData, + size_t returnParamIndex, + size_t templateParamIndex, + std::map const & vectorParamIndices, + bool enhanced, + bool singular, + bool withDefaults, + bool withAllocator ) const; + void appendFunctionHeaderArgumentsEnhanced( std::string & str, + std::string const & name, + CommandData const & commandData, + size_t returnParamIndex, + size_t templateParamIndex, + std::map const & vectorParamIndices, + bool singular, + bool withDefaults, + bool withAllocator ) const; + void appendFunctionHeaderArgumentsStandard( std::string & str, + std::string const & name, + CommandData const & commandData, + bool withDefaults ) const; + bool appendFunctionHeaderArgumentStandard( + std::string & str, ParamData const & param, bool argEncountered, bool isLastArgument, bool withDefaults ) const; + void appendFunctionHeaderReturnType( std::string & str, + CommandData const & commandData, + size_t returnParamIndex, + std::map const & vectorParamIndices, + std::string const & enhancedReturnType, + bool enhanced, + bool twoStep, + bool singular, + bool unique, + bool isStructureChain ) const; + void appendFunctionHeaderTemplate( std::string & str, + std::string const & indentation, + size_t returnParamIndex, + size_t templateParamIndex, + std::string const & enhancedReturnType, + bool enhanced, + bool singular, + bool unique, + bool withDefault, + bool isStructureChain ) const; + void appendHandle( std::string & str, + std::pair const & handle, + std::set & listedHandles ) const; + void appendPlatformEnter( std::string & str, bool isAliased, std::string const & platform ) const; + void appendPlatformLeave( std::string & str, bool isAliased, std::string const & platform ) const; + void appendStruct( std::string & str, + std::pair const & structure, + std::set & listedStructures ) const; + void appendStructAssignmentOperator( std::string & str, + std::pair const & structure, + std::string const & prefix ) const; + void appendStructCompareOperators( std::string & str, std::pair const & structure ) const; + void appendStructConstructor( std::string & str, + std::pair const & structData, + std::string const & prefix ) const; + bool appendStructConstructorArgument( std::string & str, + bool listedArgument, + std::string const & indentation, + MemberData const & memberData ) const; + void appendStructCopyConstructors( std::string & str, std::string const & vkName ) const; + void appendStructMembers( std::string & str, + std::pair const & structData, + std::string const & prefix ) const; + void appendStructSetter( std::string & str, + std::string const & structureName, + bool isUnion, + MemberData const & memberData ) const; + void appendStructSubConstructor( std::string & str, + std::pair const & structData, + std::string const & prefix ) const; + void appendStructure( std::string & str, std::pair const & structure ) const; + void appendUnion( std::string & str, std::pair const & structure ) const; + void appendUniqueTypes( std::string & str, + std::string const & parentType, + std::set const & childrenTypes ) const; + std::string constructConstexprString( std::pair const & structData ) const; + void checkCorrectness(); + bool checkLenAttribute( std::string const & len, std::vector const & params ); + bool containsArray( std::string const & type ) const; + bool containsUnion( std::string const & type ) const; + std::string determineEnhancedReturnType( CommandData const & commandData, + size_t returnParamIndex, + std::map const & vectorParamIndices, + bool isStructureChain ) const; + size_t determineReturnParamIndex( CommandData const & commandData, + std::map const & vectorParamIndices, + bool twoStep ) const; + std::string determineSubStruct( std::pair const & structure ) const; + size_t determineTemplateParamIndex( std::vector const & params, + std::map const & vectorParamIndices ) const; + std::map determineVectorParamIndices( std::vector const & params ) const; + bool holdsSType( std::string const & type ) const; + bool isTwoStepAlgorithm( std::vector const & params ) const; + void linkCommandToHandle( int line, std::string const & name, CommandData const & commandData ); + void readBaseType( tinyxml2::XMLElement const * element, std::map const & attributes ); + void readBitmask( tinyxml2::XMLElement const * element, std::map const & attributes ); + void readBitmaskAlias( tinyxml2::XMLElement const * element, std::map const & attributes ); + void readCommand( tinyxml2::XMLElement const * element ); + void readCommand( tinyxml2::XMLElement const * element, std::map const & attributess ); + void readCommandAlias( tinyxml2::XMLElement const * element, std::map const & attributes ); + ParamData readCommandParam( tinyxml2::XMLElement const * element, std::vector const & params ); + std::pair readCommandProto( tinyxml2::XMLElement const * element ); + void readCommands( tinyxml2::XMLElement const * element ); + std::string readComment( tinyxml2::XMLElement const * element ); + void readDefine( tinyxml2::XMLElement const * element, std::map const & attributes ); + void readEnum( tinyxml2::XMLElement const * element, + EnumData & enumData, + bool bitmask, + std::string const & prefix, + std::string const & postfix ); + void readEnum( tinyxml2::XMLElement const * element, + std::map const & attributes, + EnumData & enumData, + bool bitmask, + std::string const & prefix, + std::string const & postfix ); + void readEnumAlias( tinyxml2::XMLElement const * element, + std::map const & attributes, + EnumData & enumData, + bool bitmask, + std::string const & prefix, + std::string const & postfix ); + void readEnumConstant( tinyxml2::XMLElement const * element ); + void readEnums( tinyxml2::XMLElement const * element ); + void readExtension( tinyxml2::XMLElement const * element ); + void readExtensionDisabledCommand( tinyxml2::XMLElement const * element ); + void readExtensionDisabledEnum( std::string const & extensionName, tinyxml2::XMLElement const * element ); + void readExtensionDisabledRequire( std::string const & extensionName, tinyxml2::XMLElement const * element ); + void readExtensionDisabledType( tinyxml2::XMLElement const * element ); + void readExtensionRequire( tinyxml2::XMLElement const * element, + std::string const & platform, + std::string const & tag, + std::map & requirements ); + void readExtensionRequireCommand( tinyxml2::XMLElement const * element, std::string const & platform ); + void readExtensionRequireType( tinyxml2::XMLElement const * element, std::string const & platform ); + void readExtensions( tinyxml2::XMLElement const * element ); + void readFeature( tinyxml2::XMLElement const * element ); + void readFeatureRequire( tinyxml2::XMLElement const * element ); + void readFuncpointer( tinyxml2::XMLElement const * element, std::map const & attributes ); + void readHandle( tinyxml2::XMLElement const * element, std::map const & attributes ); + std::pair readNameAndType( tinyxml2::XMLElement const * elements ); + void readPlatform( tinyxml2::XMLElement const * element ); + void readPlatforms( tinyxml2::XMLElement const * element ); + void readRegistry( tinyxml2::XMLElement const * element ); + void readRequireCommand( tinyxml2::XMLElement const * element ); + void readRequireEnum( tinyxml2::XMLElement const * element, std::string const & tag ); + void readRequireEnum( tinyxml2::XMLElement const * element, + std::map const & attributes, + std::string const & tag ); + void readRequireEnumAlias( tinyxml2::XMLElement const * element, + std::map const & attributes, + std::string const & tag ); + void readRequires( tinyxml2::XMLElement const * element, std::map const & attributes ); + void readRequireType( tinyxml2::XMLElement const * element ); + void readStruct( tinyxml2::XMLElement const * element, + bool isUnion, + std::map const & attributes ); + void readStructAlias( tinyxml2::XMLElement const * element, std::map const & attributes ); + void readStructMember( tinyxml2::XMLElement const * element, std::vector & members ); + void readStructMemberEnum( tinyxml2::XMLElement const * element, MemberData & memberData ); + void readStructMemberName( tinyxml2::XMLElement const * element, + MemberData & memberData, + std::vector const & members ); + void readStructMemberType( tinyxml2::XMLElement const * element, MemberData & memberData ); + void readTag( tinyxml2::XMLElement const * element ); + void readTags( tinyxml2::XMLElement const * element ); + void readType( tinyxml2::XMLElement const * element ); + void readTypeEnum( tinyxml2::XMLElement const * element, std::map const & attributes ); + void readTypeInclude( tinyxml2::XMLElement const * element, std::map const & attributes ); + void readTypes( tinyxml2::XMLElement const * element ); + void registerDeleter( std::string const & name, std::pair const & commandData ); + void setVulkanLicenseHeader( int line, std::string const & comment ); + std::string toString( TypeCategory category ); private: std::map m_baseTypes; diff --git a/samples/01_InitInstance/01_InitInstance.cpp b/samples/01_InitInstance/01_InitInstance.cpp index 7531e1c..77b6553 100644 --- a/samples/01_InitInstance/01_InitInstance.cpp +++ b/samples/01_InitInstance/01_InitInstance.cpp @@ -16,38 +16,39 @@ // Create and destroy a vk::UniqueInstance #include "vulkan/vulkan.hpp" + #include -static char const* AppName = "01_InitInstance"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "01_InitInstance"; +static char const * EngineName = "Vulkan.hpp"; -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { /* VULKAN_HPP_KEY_START */ try { // initialize the vk::ApplicationInfo structure - vk::ApplicationInfo applicationInfo(AppName, 1, EngineName, 1, VK_API_VERSION_1_1); + vk::ApplicationInfo applicationInfo( AppName, 1, EngineName, 1, VK_API_VERSION_1_1 ); // initialize the vk::InstanceCreateInfo - vk::InstanceCreateInfo instanceCreateInfo({}, &applicationInfo); + vk::InstanceCreateInfo instanceCreateInfo( {}, &applicationInfo ); // create a UniqueInstance - vk::UniqueInstance instance = vk::createInstanceUnique(instanceCreateInfo); + vk::UniqueInstance instance = vk::createInstanceUnique( instanceCreateInfo ); // Note: No need to explicitly destroy the instance, as the corresponding destroy function is // called by the destructor of the UniqueInstance on leaving this scope. } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } /* VULKAN_HPP_KEY_END */ diff --git a/samples/02_EnumerateDevices/02_EnumerateDevices.cpp b/samples/02_EnumerateDevices/02_EnumerateDevices.cpp index 6403f24..59fa74e 100644 --- a/samples/02_EnumerateDevices/02_EnumerateDevices.cpp +++ b/samples/02_EnumerateDevices/02_EnumerateDevices.cpp @@ -17,18 +17,19 @@ #include "../utils/utils.hpp" #include "vulkan/vulkan.hpp" + #include -static char const* AppName = "02_EnumerateDevices"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "02_EnumerateDevices"; +static char const * EngineName = "Vulkan.hpp"; -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { - vk::UniqueInstance instance = vk::su::createInstance(AppName, EngineName); -#if !defined(NDEBUG) - vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger(instance); + vk::UniqueInstance instance = vk::su::createInstance( AppName, EngineName ); +#if !defined( NDEBUG ) + vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger( instance ); #endif /* VULKAN_HPP_KEY_START */ @@ -36,20 +37,20 @@ int main(int /*argc*/, char ** /*argv*/) // enumerate the physicalDevices vk::PhysicalDevice physicalDevice = instance->enumeratePhysicalDevices().front(); - // Note: PhysicalDevices are not created, but just enumerated. Therefore, there is nothing like a UniquePhysicalDevice. - // A PhysicalDevice is unique by definition, and there's no need to destroy it. + // Note: PhysicalDevices are not created, but just enumerated. Therefore, there is nothing like a + // UniquePhysicalDevice. A PhysicalDevice is unique by definition, and there's no need to destroy it. /* VULKAN_HPP_KEY_END */ } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/03_InitDevice/03_InitDevice.cpp b/samples/03_InitDevice/03_InitDevice.cpp index 28d5597..8bd283f 100644 --- a/samples/03_InitDevice/03_InitDevice.cpp +++ b/samples/03_InitDevice/03_InitDevice.cpp @@ -17,18 +17,19 @@ #include "../utils/utils.hpp" #include "vulkan/vulkan.hpp" + #include -static char const* AppName = "03_InitDevice"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "03_InitDevice"; +static char const * EngineName = "Vulkan.hpp"; -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { - vk::UniqueInstance instance = vk::su::createInstance(AppName, EngineName); -#if !defined(NDEBUG) - vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger(instance); + vk::UniqueInstance instance = vk::su::createInstance( AppName, EngineName ); +#if !defined( NDEBUG ) + vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger( instance ); #endif vk::PhysicalDevice physicalDevice = instance->enumeratePhysicalDevices().front(); @@ -39,31 +40,35 @@ int main(int /*argc*/, char ** /*argv*/) std::vector queueFamilyProperties = physicalDevice.getQueueFamilyProperties(); // get the first index into queueFamiliyProperties which supports graphics - size_t graphicsQueueFamilyIndex = std::distance(queueFamilyProperties.begin(), - std::find_if(queueFamilyProperties.begin(), - queueFamilyProperties.end(), - [](vk::QueueFamilyProperties const& qfp) { return qfp.queueFlags & vk::QueueFlagBits::eGraphics; })); - assert(graphicsQueueFamilyIndex < queueFamilyProperties.size()); + size_t graphicsQueueFamilyIndex = std::distance( + queueFamilyProperties.begin(), + std::find_if( + queueFamilyProperties.begin(), queueFamilyProperties.end(), []( vk::QueueFamilyProperties const & qfp ) { + return qfp.queueFlags & vk::QueueFlagBits::eGraphics; + } ) ); + assert( graphicsQueueFamilyIndex < queueFamilyProperties.size() ); // create a UniqueDevice - float queuePriority = 0.0f; - vk::DeviceQueueCreateInfo deviceQueueCreateInfo(vk::DeviceQueueCreateFlags(), static_cast(graphicsQueueFamilyIndex), 1, &queuePriority); - vk::UniqueDevice device = physicalDevice.createDeviceUnique(vk::DeviceCreateInfo(vk::DeviceCreateFlags(), 1, &deviceQueueCreateInfo)); + float queuePriority = 0.0f; + vk::DeviceQueueCreateInfo deviceQueueCreateInfo( + vk::DeviceQueueCreateFlags(), static_cast( graphicsQueueFamilyIndex ), 1, &queuePriority ); + vk::UniqueDevice device = + physicalDevice.createDeviceUnique( vk::DeviceCreateInfo( vk::DeviceCreateFlags(), 1, &deviceQueueCreateInfo ) ); // Note: No need to explicitly destroy the device, as the corresponding destroy function is // called by the destructor of the UniqueDevice on leaving this scope. /* VULKAN_HPP_KEY_END */ } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/04_InitCommandBuffer/04_InitCommandBuffer.cpp b/samples/04_InitCommandBuffer/04_InitCommandBuffer.cpp index dcbba5d..7c6df8e 100644 --- a/samples/04_InitCommandBuffer/04_InitCommandBuffer.cpp +++ b/samples/04_InitCommandBuffer/04_InitCommandBuffer.cpp @@ -17,47 +17,54 @@ #include "../utils/utils.hpp" #include "vulkan/vulkan.hpp" + #include -static char const* AppName = "04_InitCommandBuffer"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "04_InitCommandBuffer"; +static char const * EngineName = "Vulkan.hpp"; -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { - vk::UniqueInstance instance = vk::su::createInstance(AppName, EngineName); -#if !defined(NDEBUG) - vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger(instance); + vk::UniqueInstance instance = vk::su::createInstance( AppName, EngineName ); +#if !defined( NDEBUG ) + vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger( instance ); #endif vk::PhysicalDevice physicalDevice = instance->enumeratePhysicalDevices().front(); - uint32_t graphicsQueueFamilyIndex = vk::su::findGraphicsQueueFamilyIndex(physicalDevice.getQueueFamilyProperties()); - vk::UniqueDevice device = vk::su::createDevice(physicalDevice, graphicsQueueFamilyIndex); + uint32_t graphicsQueueFamilyIndex = + vk::su::findGraphicsQueueFamilyIndex( physicalDevice.getQueueFamilyProperties() ); + vk::UniqueDevice device = vk::su::createDevice( physicalDevice, graphicsQueueFamilyIndex ); /* VULKAN_HPP_KEY_START */ // create a UniqueCommandPool to allocate a CommandBuffer from - vk::UniqueCommandPool commandPool = device->createCommandPoolUnique(vk::CommandPoolCreateInfo(vk::CommandPoolCreateFlags(), graphicsQueueFamilyIndex)); + vk::UniqueCommandPool commandPool = device->createCommandPoolUnique( + vk::CommandPoolCreateInfo( vk::CommandPoolCreateFlags(), graphicsQueueFamilyIndex ) ); // allocate a CommandBuffer from the CommandPool - vk::UniqueCommandBuffer commandBuffer = std::move(device->allocateCommandBuffersUnique(vk::CommandBufferAllocateInfo(commandPool.get(), vk::CommandBufferLevel::ePrimary, 1)).front()); + vk::UniqueCommandBuffer commandBuffer = std::move( device + ->allocateCommandBuffersUnique( vk::CommandBufferAllocateInfo( + commandPool.get(), vk::CommandBufferLevel::ePrimary, 1 ) ) + .front() ); - // Note: No need to explicitly free the CommandBuffer or destroy the CommandPool, as the corresponding free and destroy - // functions are called by the destructor of the UniqueCommandBuffer and the UniqueCommandPool on leaving this scope. + // Note: No need to explicitly free the CommandBuffer or destroy the CommandPool, as the corresponding free and + // destroy functions are called by the destructor of the UniqueCommandBuffer and the UniqueCommandPool on leaving + // this scope. /* VULKAN_HPP_KEY_END */ } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/05_InitSwapchain/05_InitSwapchain.cpp b/samples/05_InitSwapchain/05_InitSwapchain.cpp index 9f87ffc..7bc7857 100644 --- a/samples/05_InitSwapchain/05_InitSwapchain.cpp +++ b/samples/05_InitSwapchain/05_InitSwapchain.cpp @@ -17,59 +17,66 @@ #include "../utils/utils.hpp" #include "vulkan/vulkan.hpp" + #include -static char const* AppName = "05_InitSwapchain"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "05_InitSwapchain"; +static char const * EngineName = "Vulkan.hpp"; -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { - vk::UniqueInstance instance = vk::su::createInstance(AppName, EngineName, {}, vk::su::getInstanceExtensions()); -#if !defined(NDEBUG) - vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger(instance); + vk::UniqueInstance instance = vk::su::createInstance( AppName, EngineName, {}, vk::su::getInstanceExtensions() ); +#if !defined( NDEBUG ) + vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger( instance ); #endif vk::PhysicalDevice physicalDevice = instance->enumeratePhysicalDevices().front(); std::vector queueFamilyProperties = physicalDevice.getQueueFamilyProperties(); - uint32_t graphicsQueueFamilyIndex = vk::su::findGraphicsQueueFamilyIndex(queueFamilyProperties); + uint32_t graphicsQueueFamilyIndex = vk::su::findGraphicsQueueFamilyIndex( queueFamilyProperties ); /* VULKAN_HPP_KEY_START */ - uint32_t width = 64; - uint32_t height = 64; - vk::su::WindowData window = vk::su::createWindow(AppName, {width, height}); + uint32_t width = 64; + uint32_t height = 64; + vk::su::WindowData window = vk::su::createWindow( AppName, { width, height } ); vk::UniqueSurfaceKHR surface; { VkSurfaceKHR _surface; - glfwCreateWindowSurface(VkInstance(instance.get()), window.handle, nullptr, &_surface); - vk::ObjectDestroy _deleter(instance.get()); - surface = vk::UniqueSurfaceKHR(vk::SurfaceKHR(_surface), _deleter); + glfwCreateWindowSurface( VkInstance( instance.get() ), window.handle, nullptr, &_surface ); + vk::ObjectDestroy _deleter( instance.get() ); + surface = vk::UniqueSurfaceKHR( vk::SurfaceKHR( _surface ), _deleter ); } // determine a queueFamilyIndex that suports present // first check if the graphicsQueueFamiliyIndex is good enough - size_t presentQueueFamilyIndex = physicalDevice.getSurfaceSupportKHR(static_cast(graphicsQueueFamilyIndex), surface.get()) ? graphicsQueueFamilyIndex : queueFamilyProperties.size(); - if (presentQueueFamilyIndex == queueFamilyProperties.size()) + size_t presentQueueFamilyIndex = + physicalDevice.getSurfaceSupportKHR( static_cast( graphicsQueueFamilyIndex ), surface.get() ) + ? graphicsQueueFamilyIndex + : queueFamilyProperties.size(); + if ( presentQueueFamilyIndex == queueFamilyProperties.size() ) { - // the graphicsQueueFamilyIndex doesn't support present -> look for an other family index that supports both graphics and present - for (size_t i = 0; i < queueFamilyProperties.size(); i++) + // the graphicsQueueFamilyIndex doesn't support present -> look for an other family index that supports both + // graphics and present + for ( size_t i = 0; i < queueFamilyProperties.size(); i++ ) { - if ((queueFamilyProperties[i].queueFlags & vk::QueueFlagBits::eGraphics) && physicalDevice.getSurfaceSupportKHR(static_cast(i), surface.get())) + if ( ( queueFamilyProperties[i].queueFlags & vk::QueueFlagBits::eGraphics ) && + physicalDevice.getSurfaceSupportKHR( static_cast( i ), surface.get() ) ) { - graphicsQueueFamilyIndex = vk::su::checked_cast(i); - presentQueueFamilyIndex = i; + graphicsQueueFamilyIndex = vk::su::checked_cast( i ); + presentQueueFamilyIndex = i; break; } } - if (presentQueueFamilyIndex == queueFamilyProperties.size()) + if ( presentQueueFamilyIndex == queueFamilyProperties.size() ) { - // there's nothing like a single family index that supports both graphics and present -> look for an other family index that supports present - for (size_t i = 0; i < queueFamilyProperties.size(); i++) + // there's nothing like a single family index that supports both graphics and present -> look for an other + // family index that supports present + for ( size_t i = 0; i < queueFamilyProperties.size(); i++ ) { - if (physicalDevice.getSurfaceSupportKHR(static_cast(i), surface.get())) + if ( physicalDevice.getSurfaceSupportKHR( static_cast( i ), surface.get() ) ) { presentQueueFamilyIndex = i; break; @@ -77,26 +84,31 @@ int main(int /*argc*/, char ** /*argv*/) } } } - if ((graphicsQueueFamilyIndex == queueFamilyProperties.size()) || (presentQueueFamilyIndex == queueFamilyProperties.size())) + if ( ( graphicsQueueFamilyIndex == queueFamilyProperties.size() ) || + ( presentQueueFamilyIndex == queueFamilyProperties.size() ) ) { - throw std::runtime_error("Could not find a queue for graphics or present -> terminating"); + throw std::runtime_error( "Could not find a queue for graphics or present -> terminating" ); } // create a device - vk::UniqueDevice device = vk::su::createDevice(physicalDevice, graphicsQueueFamilyIndex, vk::su::getDeviceExtensions()); + vk::UniqueDevice device = + vk::su::createDevice( physicalDevice, graphicsQueueFamilyIndex, vk::su::getDeviceExtensions() ); // get the supported VkFormats - std::vector formats = physicalDevice.getSurfaceFormatsKHR(surface.get()); - assert(!formats.empty()); - vk::Format format = (formats[0].format == vk::Format::eUndefined) ? vk::Format::eB8G8R8A8Unorm : formats[0].format; + std::vector formats = physicalDevice.getSurfaceFormatsKHR( surface.get() ); + assert( !formats.empty() ); + vk::Format format = + ( formats[0].format == vk::Format::eUndefined ) ? vk::Format::eB8G8R8A8Unorm : formats[0].format; - vk::SurfaceCapabilitiesKHR surfaceCapabilities = physicalDevice.getSurfaceCapabilitiesKHR(surface.get()); - VkExtent2D swapchainExtent; - if (surfaceCapabilities.currentExtent.width == std::numeric_limits::max()) + vk::SurfaceCapabilitiesKHR surfaceCapabilities = physicalDevice.getSurfaceCapabilitiesKHR( surface.get() ); + VkExtent2D swapchainExtent; + if ( surfaceCapabilities.currentExtent.width == std::numeric_limits::max() ) { // If the surface size is undefined, the size is set to the size of the images requested. - swapchainExtent.width = vk::su::clamp(width, surfaceCapabilities.minImageExtent.width, surfaceCapabilities.maxImageExtent.width); - swapchainExtent.height = vk::su::clamp(height, surfaceCapabilities.minImageExtent.height, surfaceCapabilities.maxImageExtent.height); + swapchainExtent.width = + vk::su::clamp( width, surfaceCapabilities.minImageExtent.width, surfaceCapabilities.maxImageExtent.width ); + swapchainExtent.height = + vk::su::clamp( height, surfaceCapabilities.minImageExtent.height, surfaceCapabilities.maxImageExtent.height ); } else { @@ -107,38 +119,63 @@ int main(int /*argc*/, char ** /*argv*/) // The FIFO present mode is guaranteed by the spec to be supported vk::PresentModeKHR swapchainPresentMode = vk::PresentModeKHR::eFifo; - vk::SurfaceTransformFlagBitsKHR preTransform = (surfaceCapabilities.supportedTransforms & vk::SurfaceTransformFlagBitsKHR::eIdentity) ? vk::SurfaceTransformFlagBitsKHR::eIdentity : surfaceCapabilities.currentTransform; + vk::SurfaceTransformFlagBitsKHR preTransform = + ( surfaceCapabilities.supportedTransforms & vk::SurfaceTransformFlagBitsKHR::eIdentity ) + ? vk::SurfaceTransformFlagBitsKHR::eIdentity + : surfaceCapabilities.currentTransform; vk::CompositeAlphaFlagBitsKHR compositeAlpha = - (surfaceCapabilities.supportedCompositeAlpha & vk::CompositeAlphaFlagBitsKHR::ePreMultiplied) ? vk::CompositeAlphaFlagBitsKHR::ePreMultiplied : - (surfaceCapabilities.supportedCompositeAlpha & vk::CompositeAlphaFlagBitsKHR::ePostMultiplied) ? vk::CompositeAlphaFlagBitsKHR::ePostMultiplied : - (surfaceCapabilities.supportedCompositeAlpha & vk::CompositeAlphaFlagBitsKHR::eInherit) ? vk::CompositeAlphaFlagBitsKHR::eInherit : vk::CompositeAlphaFlagBitsKHR::eOpaque; + ( surfaceCapabilities.supportedCompositeAlpha & vk::CompositeAlphaFlagBitsKHR::ePreMultiplied ) + ? vk::CompositeAlphaFlagBitsKHR::ePreMultiplied + : ( surfaceCapabilities.supportedCompositeAlpha & vk::CompositeAlphaFlagBitsKHR::ePostMultiplied ) + ? vk::CompositeAlphaFlagBitsKHR::ePostMultiplied + : ( surfaceCapabilities.supportedCompositeAlpha & vk::CompositeAlphaFlagBitsKHR::eInherit ) + ? vk::CompositeAlphaFlagBitsKHR::eInherit + : vk::CompositeAlphaFlagBitsKHR::eOpaque; - vk::SwapchainCreateInfoKHR swapChainCreateInfo(vk::SwapchainCreateFlagsKHR(), surface.get(), surfaceCapabilities.minImageCount, format, vk::ColorSpaceKHR::eSrgbNonlinear, - swapchainExtent, 1, vk::ImageUsageFlagBits::eColorAttachment, vk::SharingMode::eExclusive, 0, nullptr, preTransform, compositeAlpha, swapchainPresentMode, true, nullptr); + vk::SwapchainCreateInfoKHR swapChainCreateInfo( vk::SwapchainCreateFlagsKHR(), + surface.get(), + surfaceCapabilities.minImageCount, + format, + vk::ColorSpaceKHR::eSrgbNonlinear, + swapchainExtent, + 1, + vk::ImageUsageFlagBits::eColorAttachment, + vk::SharingMode::eExclusive, + 0, + nullptr, + preTransform, + compositeAlpha, + swapchainPresentMode, + true, + nullptr ); - uint32_t queueFamilyIndices[2] = { static_cast(graphicsQueueFamilyIndex), static_cast(presentQueueFamilyIndex) }; - if (graphicsQueueFamilyIndex != presentQueueFamilyIndex) + uint32_t queueFamilyIndices[2] = { static_cast( graphicsQueueFamilyIndex ), + static_cast( presentQueueFamilyIndex ) }; + if ( graphicsQueueFamilyIndex != presentQueueFamilyIndex ) { - // If the graphics and present queues are from different queue families, we either have to explicitly transfer ownership of images between - // the queues, or we have to create the swapchain with imageSharingMode as VK_SHARING_MODE_CONCURRENT - swapChainCreateInfo.imageSharingMode = vk::SharingMode::eConcurrent; + // If the graphics and present queues are from different queue families, we either have to explicitly transfer + // ownership of images between the queues, or we have to create the swapchain with imageSharingMode as + // VK_SHARING_MODE_CONCURRENT + swapChainCreateInfo.imageSharingMode = vk::SharingMode::eConcurrent; swapChainCreateInfo.queueFamilyIndexCount = 2; - swapChainCreateInfo.pQueueFamilyIndices = queueFamilyIndices; + swapChainCreateInfo.pQueueFamilyIndices = queueFamilyIndices; } - vk::UniqueSwapchainKHR swapChain = device->createSwapchainKHRUnique(swapChainCreateInfo); + vk::UniqueSwapchainKHR swapChain = device->createSwapchainKHRUnique( swapChainCreateInfo ); - std::vector swapChainImages = device->getSwapchainImagesKHR(swapChain.get()); + std::vector swapChainImages = device->getSwapchainImagesKHR( swapChain.get() ); std::vector imageViews; - imageViews.reserve(swapChainImages.size()); - vk::ComponentMapping componentMapping(vk::ComponentSwizzle::eR, vk::ComponentSwizzle::eG, vk::ComponentSwizzle::eB, vk::ComponentSwizzle::eA); - vk::ImageSubresourceRange subResourceRange(vk::ImageAspectFlagBits::eColor, 0, 1, 0, 1); - for (auto image : swapChainImages) + imageViews.reserve( swapChainImages.size() ); + vk::ComponentMapping componentMapping( + vk::ComponentSwizzle::eR, vk::ComponentSwizzle::eG, vk::ComponentSwizzle::eB, vk::ComponentSwizzle::eA ); + vk::ImageSubresourceRange subResourceRange( vk::ImageAspectFlagBits::eColor, 0, 1, 0, 1 ); + for ( auto image : swapChainImages ) { - vk::ImageViewCreateInfo imageViewCreateInfo(vk::ImageViewCreateFlags(), image, vk::ImageViewType::e2D, format, componentMapping, subResourceRange); - imageViews.push_back(device->createImageViewUnique(imageViewCreateInfo)); + vk::ImageViewCreateInfo imageViewCreateInfo( + vk::ImageViewCreateFlags(), image, vk::ImageViewType::e2D, format, componentMapping, subResourceRange ); + imageViews.push_back( device->createImageViewUnique( imageViewCreateInfo ) ); } // Note: No need to explicitly destroy the ImageViews or the swapChain, as the corresponding destroy @@ -146,20 +183,20 @@ int main(int /*argc*/, char ** /*argv*/) /* VULKAN_HPP_KEY_END */ } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (std::runtime_error& err) + catch ( std::runtime_error & err ) { std::cout << "std::runtime_error: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/06_InitDepthBuffer/06_InitDepthBuffer.cpp b/samples/06_InitDepthBuffer/06_InitDepthBuffer.cpp index 76e8f03..274850c 100644 --- a/samples/06_InitDepthBuffer/06_InitDepthBuffer.cpp +++ b/samples/06_InitDepthBuffer/06_InitDepthBuffer.cpp @@ -17,86 +17,106 @@ #include "../utils/utils.hpp" #include "vulkan/vulkan.hpp" + #include -static char const* AppName = "06_InitDepthBuffer"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "06_InitDepthBuffer"; +static char const * EngineName = "Vulkan.hpp"; -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { - vk::UniqueInstance instance = vk::su::createInstance(AppName, EngineName, {}, vk::su::getInstanceExtensions()); -#if !defined(NDEBUG) - vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger(instance); + vk::UniqueInstance instance = vk::su::createInstance( AppName, EngineName, {}, vk::su::getInstanceExtensions() ); +#if !defined( NDEBUG ) + vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger( instance ); #endif vk::PhysicalDevice physicalDevice = instance->enumeratePhysicalDevices().front(); - vk::su::SurfaceData surfaceData(instance, AppName, vk::Extent2D(500, 500)); + vk::su::SurfaceData surfaceData( instance, AppName, vk::Extent2D( 500, 500 ) ); - std::pair graphicsAndPresentQueueFamilyIndex = vk::su::findGraphicsAndPresentQueueFamilyIndex(physicalDevice, *surfaceData.surface); - vk::UniqueDevice device = vk::su::createDevice(physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions()); + std::pair graphicsAndPresentQueueFamilyIndex = + vk::su::findGraphicsAndPresentQueueFamilyIndex( physicalDevice, *surfaceData.surface ); + vk::UniqueDevice device = + vk::su::createDevice( physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions() ); /* VULKAN_HPP_KEY_START */ - const vk::Format depthFormat = vk::Format::eD16Unorm; - vk::FormatProperties formatProperties = physicalDevice.getFormatProperties(depthFormat); + const vk::Format depthFormat = vk::Format::eD16Unorm; + vk::FormatProperties formatProperties = physicalDevice.getFormatProperties( depthFormat ); vk::ImageTiling tiling; - if (formatProperties.linearTilingFeatures & vk::FormatFeatureFlagBits::eDepthStencilAttachment) + if ( formatProperties.linearTilingFeatures & vk::FormatFeatureFlagBits::eDepthStencilAttachment ) { tiling = vk::ImageTiling::eLinear; } - else if (formatProperties.optimalTilingFeatures & vk::FormatFeatureFlagBits::eDepthStencilAttachment) + else if ( formatProperties.optimalTilingFeatures & vk::FormatFeatureFlagBits::eDepthStencilAttachment ) { tiling = vk::ImageTiling::eOptimal; } else { - throw std::runtime_error("DepthStencilAttachment is not supported for D16Unorm depth format."); + throw std::runtime_error( "DepthStencilAttachment is not supported for D16Unorm depth format." ); } - vk::ImageCreateInfo imageCreateInfo(vk::ImageCreateFlags(), vk::ImageType::e2D, depthFormat, vk::Extent3D(surfaceData.extent, 1), 1, 1, vk::SampleCountFlagBits::e1, tiling, vk::ImageUsageFlagBits::eDepthStencilAttachment); - vk::UniqueImage depthImage = device->createImageUnique(imageCreateInfo); + vk::ImageCreateInfo imageCreateInfo( vk::ImageCreateFlags(), + vk::ImageType::e2D, + depthFormat, + vk::Extent3D( surfaceData.extent, 1 ), + 1, + 1, + vk::SampleCountFlagBits::e1, + tiling, + vk::ImageUsageFlagBits::eDepthStencilAttachment ); + vk::UniqueImage depthImage = device->createImageUnique( imageCreateInfo ); - vk::PhysicalDeviceMemoryProperties memoryProperties = physicalDevice.getMemoryProperties(); - vk::MemoryRequirements memoryRequirements = device->getImageMemoryRequirements(depthImage.get()); - uint32_t typeBits = memoryRequirements.memoryTypeBits; - uint32_t typeIndex = uint32_t(~0); - for (uint32_t i = 0; i < memoryProperties.memoryTypeCount; i++) + vk::PhysicalDeviceMemoryProperties memoryProperties = physicalDevice.getMemoryProperties(); + vk::MemoryRequirements memoryRequirements = device->getImageMemoryRequirements( depthImage.get() ); + uint32_t typeBits = memoryRequirements.memoryTypeBits; + uint32_t typeIndex = uint32_t( ~0 ); + for ( uint32_t i = 0; i < memoryProperties.memoryTypeCount; i++ ) { - if ((typeBits & 1) && ((memoryProperties.memoryTypes[i].propertyFlags & vk::MemoryPropertyFlagBits::eDeviceLocal) == vk::MemoryPropertyFlagBits::eDeviceLocal)) + if ( ( typeBits & 1 ) && + ( ( memoryProperties.memoryTypes[i].propertyFlags & vk::MemoryPropertyFlagBits::eDeviceLocal ) == + vk::MemoryPropertyFlagBits::eDeviceLocal ) ) { typeIndex = i; break; } typeBits >>= 1; } - assert(typeIndex != ~0); - vk::UniqueDeviceMemory depthMemory = device->allocateMemoryUnique(vk::MemoryAllocateInfo(memoryRequirements.size, typeIndex)); + assert( typeIndex != ~0 ); + vk::UniqueDeviceMemory depthMemory = + device->allocateMemoryUnique( vk::MemoryAllocateInfo( memoryRequirements.size, typeIndex ) ); - device->bindImageMemory(depthImage.get(), depthMemory.get(), 0); + device->bindImageMemory( depthImage.get(), depthMemory.get(), 0 ); - vk::ComponentMapping componentMapping(vk::ComponentSwizzle::eR, vk::ComponentSwizzle::eG, vk::ComponentSwizzle::eB, vk::ComponentSwizzle::eA); - vk::ImageSubresourceRange subResourceRange(vk::ImageAspectFlagBits::eDepth, 0, 1, 0, 1); - vk::UniqueImageView depthView = device->createImageViewUnique(vk::ImageViewCreateInfo(vk::ImageViewCreateFlags(), depthImage.get(), vk::ImageViewType::e2D, depthFormat, componentMapping, subResourceRange)); + vk::ComponentMapping componentMapping( + vk::ComponentSwizzle::eR, vk::ComponentSwizzle::eG, vk::ComponentSwizzle::eB, vk::ComponentSwizzle::eA ); + vk::ImageSubresourceRange subResourceRange( vk::ImageAspectFlagBits::eDepth, 0, 1, 0, 1 ); + vk::UniqueImageView depthView = device->createImageViewUnique( vk::ImageViewCreateInfo( vk::ImageViewCreateFlags(), + depthImage.get(), + vk::ImageViewType::e2D, + depthFormat, + componentMapping, + subResourceRange ) ); /* VULKAN_HPP_KEY_END */ } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (std::runtime_error& err) + catch ( std::runtime_error & err ) { std::cout << "std::runtime_error: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/07_InitUniformBuffer/07_InitUniformBuffer.cpp b/samples/07_InitUniformBuffer/07_InitUniformBuffer.cpp index 27ac386..3b02e64 100644 --- a/samples/07_InitUniformBuffer/07_InitUniformBuffer.cpp +++ b/samples/07_InitUniformBuffer/07_InitUniformBuffer.cpp @@ -17,67 +17,92 @@ #include "../utils/utils.hpp" #include "vulkan/vulkan.hpp" + #include #define GLM_FORCE_RADIANS -#pragma warning(disable:4201) // disable warning C4201: nonstandard extension used: nameless struct/union; needed to get glm/detail/type_vec?.hpp without warnings +#pragma warning( disable : 4201 ) // disable warning C4201: nonstandard extension used: nameless struct/union; needed + // to get glm/detail/type_vec?.hpp without warnings #include -static char const* AppName = "07_InitUniformBuffer"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "07_InitUniformBuffer"; +static char const * EngineName = "Vulkan.hpp"; -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { - vk::UniqueInstance instance = vk::su::createInstance(AppName, EngineName); -#if !defined(NDEBUG) - vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger(instance); + vk::UniqueInstance instance = vk::su::createInstance( AppName, EngineName ); +#if !defined( NDEBUG ) + vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger( instance ); #endif vk::PhysicalDevice physicalDevice = instance->enumeratePhysicalDevices().front(); - vk::UniqueDevice device = vk::su::createDevice(physicalDevice, vk::su::findGraphicsQueueFamilyIndex(physicalDevice.getQueueFamilyProperties())); + vk::UniqueDevice device = vk::su::createDevice( + physicalDevice, vk::su::findGraphicsQueueFamilyIndex( physicalDevice.getQueueFamilyProperties() ) ); /* VULKAN_HPP_KEY_START */ - glm::mat4x4 model = glm::mat4x4(1.0f); - glm::mat4x4 view = glm::lookAt(glm::vec3(-5.0f, 3.0f, -10.0f), glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, -1.0f, 0.0f)); - glm::mat4x4 projection = glm::perspective(glm::radians(45.0f), 1.0f, 0.1f, 100.0f); - glm::mat4x4 clip = glm::mat4x4(1.0f, 0.0f, 0.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.5f, 0.0f, 0.0f, 0.0f, 0.5f, 1.0f); // vulkan clip space has inverted y and half z ! - glm::mat4x4 mvpc = clip * projection * view * model; + glm::mat4x4 model = glm::mat4x4( 1.0f ); + glm::mat4x4 view = + glm::lookAt( glm::vec3( -5.0f, 3.0f, -10.0f ), glm::vec3( 0.0f, 0.0f, 0.0f ), glm::vec3( 0.0f, -1.0f, 0.0f ) ); + glm::mat4x4 projection = glm::perspective( glm::radians( 45.0f ), 1.0f, 0.1f, 100.0f ); + glm::mat4x4 clip = glm::mat4x4( 1.0f, + 0.0f, + 0.0f, + 0.0f, + 0.0f, + -1.0f, + 0.0f, + 0.0f, + 0.0f, + 0.0f, + 0.5f, + 0.0f, + 0.0f, + 0.0f, + 0.5f, + 1.0f ); // vulkan clip space has inverted y and half z ! + glm::mat4x4 mvpc = clip * projection * view * model; - vk::UniqueBuffer uniformDataBuffer = device->createBufferUnique(vk::BufferCreateInfo(vk::BufferCreateFlags(), sizeof(mvpc), vk::BufferUsageFlagBits::eUniformBuffer)); + vk::UniqueBuffer uniformDataBuffer = device->createBufferUnique( + vk::BufferCreateInfo( vk::BufferCreateFlags(), sizeof( mvpc ), vk::BufferUsageFlagBits::eUniformBuffer ) ); - vk::MemoryRequirements memoryRequirements = device->getBufferMemoryRequirements(uniformDataBuffer.get()); - uint32_t typeIndex = vk::su::findMemoryType(physicalDevice.getMemoryProperties(), memoryRequirements.memoryTypeBits, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent); - vk::UniqueDeviceMemory uniformDataMemory = device->allocateMemoryUnique(vk::MemoryAllocateInfo(memoryRequirements.size, typeIndex)); + vk::MemoryRequirements memoryRequirements = device->getBufferMemoryRequirements( uniformDataBuffer.get() ); + uint32_t typeIndex = + vk::su::findMemoryType( physicalDevice.getMemoryProperties(), + memoryRequirements.memoryTypeBits, + vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent ); + vk::UniqueDeviceMemory uniformDataMemory = + device->allocateMemoryUnique( vk::MemoryAllocateInfo( memoryRequirements.size, typeIndex ) ); - uint8_t* pData = static_cast(device->mapMemory(uniformDataMemory.get(), 0, memoryRequirements.size)); - memcpy(pData, &mvpc, sizeof(mvpc)); - device->unmapMemory(uniformDataMemory.get()); + uint8_t * pData = + static_cast( device->mapMemory( uniformDataMemory.get(), 0, memoryRequirements.size ) ); + memcpy( pData, &mvpc, sizeof( mvpc ) ); + device->unmapMemory( uniformDataMemory.get() ); - device->bindBufferMemory(uniformDataBuffer.get(), uniformDataMemory.get(), 0); + device->bindBufferMemory( uniformDataBuffer.get(), uniformDataMemory.get(), 0 ); // Note: No need to explicitly destroy the memory or the buffer, as the corresponding destroy function is // called by the destructor of the UniqueMemory or UniqueBuffer, respectively, on leaving this scope. /* VULKAN_HPP_KEY_END */ } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (std::runtime_error& err) + catch ( std::runtime_error & err ) { std::cout << "std::runtime_error: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/08_InitPipelineLayout/08_InitPipelineLayout.cpp b/samples/08_InitPipelineLayout/08_InitPipelineLayout.cpp index 5b625a7..1f525cc 100644 --- a/samples/08_InitPipelineLayout/08_InitPipelineLayout.cpp +++ b/samples/08_InitPipelineLayout/08_InitPipelineLayout.cpp @@ -17,52 +17,58 @@ #include "../utils/utils.hpp" #include "vulkan/vulkan.hpp" + #include -static char const* AppName = "08_InitPipelineLayout"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "08_InitPipelineLayout"; +static char const * EngineName = "Vulkan.hpp"; -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { - vk::UniqueInstance instance = vk::su::createInstance(AppName, EngineName); -#if !defined(NDEBUG) - vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger(instance); + vk::UniqueInstance instance = vk::su::createInstance( AppName, EngineName ); +#if !defined( NDEBUG ) + vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger( instance ); #endif vk::PhysicalDevice physicalDevice = instance->enumeratePhysicalDevices().front(); - vk::UniqueDevice device = vk::su::createDevice(physicalDevice, vk::su::findGraphicsQueueFamilyIndex(physicalDevice.getQueueFamilyProperties())); + vk::UniqueDevice device = vk::su::createDevice( + physicalDevice, vk::su::findGraphicsQueueFamilyIndex( physicalDevice.getQueueFamilyProperties() ) ); /* VULKAN_HPP_KEY_START */ // create a DescriptorSetLayout - vk::DescriptorSetLayoutBinding descriptorSetLayoutBinding(0, vk::DescriptorType::eUniformBuffer, 1, vk::ShaderStageFlagBits::eVertex); - vk::UniqueDescriptorSetLayout descriptorSetLayout = device->createDescriptorSetLayoutUnique(vk::DescriptorSetLayoutCreateInfo(vk::DescriptorSetLayoutCreateFlags(), 1, &descriptorSetLayoutBinding)); + vk::DescriptorSetLayoutBinding descriptorSetLayoutBinding( + 0, vk::DescriptorType::eUniformBuffer, 1, vk::ShaderStageFlagBits::eVertex ); + vk::UniqueDescriptorSetLayout descriptorSetLayout = device->createDescriptorSetLayoutUnique( + vk::DescriptorSetLayoutCreateInfo( vk::DescriptorSetLayoutCreateFlags(), 1, &descriptorSetLayoutBinding ) ); // create a PipelineLayout using that DescriptorSetLayout - vk::UniquePipelineLayout pipelineLayout = device->createPipelineLayoutUnique(vk::PipelineLayoutCreateInfo(vk::PipelineLayoutCreateFlags(), 1, &descriptorSetLayout.get())); + vk::UniquePipelineLayout pipelineLayout = device->createPipelineLayoutUnique( + vk::PipelineLayoutCreateInfo( vk::PipelineLayoutCreateFlags(), 1, &descriptorSetLayout.get() ) ); // Note: No need to explicitly destroy the layouts, as the corresponding destroy function is - // called by the destructor of the UniqueDescriptorSetLayout or UniquePipelineLayout, respectively, on leaving this scope. + // called by the destructor of the UniqueDescriptorSetLayout or UniquePipelineLayout, respectively, on leaving this + // scope. /* VULKAN_HPP_KEY_END */ } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (std::runtime_error& err) + catch ( std::runtime_error & err ) { std::cout << "std::runtime_error: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/09_InitDescriptorSet/09_InitDescriptorSet.cpp b/samples/09_InitDescriptorSet/09_InitDescriptorSet.cpp index 0636692..fc79661 100644 --- a/samples/09_InitDescriptorSet/09_InitDescriptorSet.cpp +++ b/samples/09_InitDescriptorSet/09_InitDescriptorSet.cpp @@ -18,61 +18,73 @@ #include "../utils/math.hpp" #include "../utils/utils.hpp" #include "vulkan/vulkan.hpp" + #include #define GLM_FORCE_RADIANS -#pragma warning(disable:4201) // disable warning C4201: nonstandard extension used: nameless struct/union; needed to get glm/detail/type_vec?.hpp without warnings +#pragma warning( disable : 4201 ) // disable warning C4201: nonstandard extension used: nameless struct/union; needed + // to get glm/detail/type_vec?.hpp without warnings #include -static char const* AppName = "09_InitDescriptorSet"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "09_InitDescriptorSet"; +static char const * EngineName = "Vulkan.hpp"; -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { - vk::UniqueInstance instance = vk::su::createInstance(AppName, EngineName); -#if !defined(NDEBUG) - vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger(instance); + vk::UniqueInstance instance = vk::su::createInstance( AppName, EngineName ); +#if !defined( NDEBUG ) + vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger( instance ); #endif vk::PhysicalDevice physicalDevice = instance->enumeratePhysicalDevices().front(); - vk::UniqueDevice device = vk::su::createDevice(physicalDevice, vk::su::findGraphicsQueueFamilyIndex(physicalDevice.getQueueFamilyProperties())); + vk::UniqueDevice device = vk::su::createDevice( + physicalDevice, vk::su::findGraphicsQueueFamilyIndex( physicalDevice.getQueueFamilyProperties() ) ); - vk::su::BufferData uniformBufferData(physicalDevice, device, sizeof(glm::mat4x4), vk::BufferUsageFlagBits::eUniformBuffer); - vk::su::copyToDevice(device, uniformBufferData.deviceMemory, vk::su::createModelViewProjectionClipMatrix(vk::Extent2D(0, 0))); + vk::su::BufferData uniformBufferData( + physicalDevice, device, sizeof( glm::mat4x4 ), vk::BufferUsageFlagBits::eUniformBuffer ); + vk::su::copyToDevice( + device, uniformBufferData.deviceMemory, vk::su::createModelViewProjectionClipMatrix( vk::Extent2D( 0, 0 ) ) ); - vk::UniqueDescriptorSetLayout descriptorSetLayout = vk::su::createDescriptorSetLayout(device, { {vk::DescriptorType::eUniformBuffer, 1, vk::ShaderStageFlagBits::eVertex} }); + vk::UniqueDescriptorSetLayout descriptorSetLayout = vk::su::createDescriptorSetLayout( + device, { { vk::DescriptorType::eUniformBuffer, 1, vk::ShaderStageFlagBits::eVertex } } ); /* VULKAN_HPP_KEY_START */ // create a descriptor pool - vk::DescriptorPoolSize poolSize(vk::DescriptorType::eUniformBuffer, 1); - vk::UniqueDescriptorPool descriptorPool = device->createDescriptorPoolUnique(vk::DescriptorPoolCreateInfo(vk::DescriptorPoolCreateFlagBits::eFreeDescriptorSet, 1, 1, &poolSize)); + vk::DescriptorPoolSize poolSize( vk::DescriptorType::eUniformBuffer, 1 ); + vk::UniqueDescriptorPool descriptorPool = device->createDescriptorPoolUnique( + vk::DescriptorPoolCreateInfo( vk::DescriptorPoolCreateFlagBits::eFreeDescriptorSet, 1, 1, &poolSize ) ); // allocate a descriptor set - vk::UniqueDescriptorSet descriptorSet = std::move(device->allocateDescriptorSetsUnique(vk::DescriptorSetAllocateInfo(*descriptorPool, 1, &*descriptorSetLayout)).front()); + vk::UniqueDescriptorSet descriptorSet = std::move( + device->allocateDescriptorSetsUnique( vk::DescriptorSetAllocateInfo( *descriptorPool, 1, &*descriptorSetLayout ) ) + .front() ); - vk::DescriptorBufferInfo descriptorBufferInfo(uniformBufferData.buffer.get(), 0, sizeof(glm::mat4x4)); - device->updateDescriptorSets(vk::WriteDescriptorSet(descriptorSet.get(), 0, 0, 1, vk::DescriptorType::eUniformBuffer, nullptr, &descriptorBufferInfo), {}); + vk::DescriptorBufferInfo descriptorBufferInfo( uniformBufferData.buffer.get(), 0, sizeof( glm::mat4x4 ) ); + device->updateDescriptorSets( + vk::WriteDescriptorSet( + descriptorSet.get(), 0, 0, 1, vk::DescriptorType::eUniformBuffer, nullptr, &descriptorBufferInfo ), + {} ); /* VULKAN_HPP_KEY_END */ } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (std::runtime_error& err) + catch ( std::runtime_error & err ) { std::cout << "std::runtime_error: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/10_InitRenderPass/10_InitRenderPass.cpp b/samples/10_InitRenderPass/10_InitRenderPass.cpp index 9cb52ca..534e06a 100644 --- a/samples/10_InitRenderPass/10_InitRenderPass.cpp +++ b/samples/10_InitRenderPass/10_InitRenderPass.cpp @@ -17,67 +17,94 @@ #include "../utils/utils.hpp" #include "vulkan/vulkan.hpp" + #include #define GLM_FORCE_RADIANS -#pragma warning(disable:4201) // disable warning C4201: nonstandard extension used: nameless struct/union; needed to get glm/detail/type_vec?.hpp without warnings +#pragma warning( disable : 4201 ) // disable warning C4201: nonstandard extension used: nameless struct/union; needed + // to get glm/detail/type_vec?.hpp without warnings #include -static char const* AppName = "10_InitRenderPass"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "10_InitRenderPass"; +static char const * EngineName = "Vulkan.hpp"; -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { - vk::UniqueInstance instance = vk::su::createInstance(AppName, EngineName, {}, vk::su::getInstanceExtensions()); -#if !defined(NDEBUG) - vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger(instance); + vk::UniqueInstance instance = vk::su::createInstance( AppName, EngineName, {}, vk::su::getInstanceExtensions() ); +#if !defined( NDEBUG ) + vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger( instance ); #endif vk::PhysicalDevice physicalDevice = instance->enumeratePhysicalDevices().front(); - vk::su::SurfaceData surfaceData(instance, AppName, vk::Extent2D(64, 64)); + vk::su::SurfaceData surfaceData( instance, AppName, vk::Extent2D( 64, 64 ) ); - std::pair graphicsAndPresentQueueFamilyIndex = vk::su::findGraphicsAndPresentQueueFamilyIndex(physicalDevice, *surfaceData.surface); - vk::UniqueDevice device = vk::su::createDevice(physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions()); + std::pair graphicsAndPresentQueueFamilyIndex = + vk::su::findGraphicsAndPresentQueueFamilyIndex( physicalDevice, *surfaceData.surface ); + vk::UniqueDevice device = + vk::su::createDevice( physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions() ); - vk::Format colorFormat = vk::su::pickSurfaceFormat(physicalDevice.getSurfaceFormatsKHR(surfaceData.surface.get())).format; + vk::Format colorFormat = + vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( surfaceData.surface.get() ) ).format; vk::Format depthFormat = vk::Format::eD16Unorm; /* VULKAN_HPP_KEY_START */ vk::AttachmentDescription attachmentDescriptions[2]; - attachmentDescriptions[0] = vk::AttachmentDescription(vk::AttachmentDescriptionFlags(), colorFormat, vk::SampleCountFlagBits::e1, vk::AttachmentLoadOp::eClear, - vk::AttachmentStoreOp::eStore, vk::AttachmentLoadOp::eDontCare, vk::AttachmentStoreOp::eDontCare, vk::ImageLayout::eUndefined, vk::ImageLayout::ePresentSrcKHR); - attachmentDescriptions[1] = vk::AttachmentDescription(vk::AttachmentDescriptionFlags(), depthFormat, vk::SampleCountFlagBits::e1, vk::AttachmentLoadOp::eClear, - vk::AttachmentStoreOp::eDontCare, vk::AttachmentLoadOp::eDontCare, vk::AttachmentStoreOp::eDontCare, vk::ImageLayout::eUndefined, vk::ImageLayout::eDepthStencilAttachmentOptimal); + attachmentDescriptions[0] = vk::AttachmentDescription( vk::AttachmentDescriptionFlags(), + colorFormat, + vk::SampleCountFlagBits::e1, + vk::AttachmentLoadOp::eClear, + vk::AttachmentStoreOp::eStore, + vk::AttachmentLoadOp::eDontCare, + vk::AttachmentStoreOp::eDontCare, + vk::ImageLayout::eUndefined, + vk::ImageLayout::ePresentSrcKHR ); + attachmentDescriptions[1] = vk::AttachmentDescription( vk::AttachmentDescriptionFlags(), + depthFormat, + vk::SampleCountFlagBits::e1, + vk::AttachmentLoadOp::eClear, + vk::AttachmentStoreOp::eDontCare, + vk::AttachmentLoadOp::eDontCare, + vk::AttachmentStoreOp::eDontCare, + vk::ImageLayout::eUndefined, + vk::ImageLayout::eDepthStencilAttachmentOptimal ); - vk::AttachmentReference colorReference(0, vk::ImageLayout::eColorAttachmentOptimal); - vk::AttachmentReference depthReference(1, vk::ImageLayout::eDepthStencilAttachmentOptimal); - vk::SubpassDescription subpass(vk::SubpassDescriptionFlags(), vk::PipelineBindPoint::eGraphics, 0, nullptr, 1, &colorReference, nullptr, &depthReference); + vk::AttachmentReference colorReference( 0, vk::ImageLayout::eColorAttachmentOptimal ); + vk::AttachmentReference depthReference( 1, vk::ImageLayout::eDepthStencilAttachmentOptimal ); + vk::SubpassDescription subpass( vk::SubpassDescriptionFlags(), + vk::PipelineBindPoint::eGraphics, + 0, + nullptr, + 1, + &colorReference, + nullptr, + &depthReference ); - vk::UniqueRenderPass renderPass = device->createRenderPassUnique(vk::RenderPassCreateInfo(vk::RenderPassCreateFlags(), 2, attachmentDescriptions, 1, &subpass)); + vk::UniqueRenderPass renderPass = device->createRenderPassUnique( + vk::RenderPassCreateInfo( vk::RenderPassCreateFlags(), 2, attachmentDescriptions, 1, &subpass ) ); // Note: No need to explicitly destroy the RenderPass or the Semaphore, as the corresponding destroy // functions are called by the destructor of the UniqueRenderPass and the UniqueSemaphore on leaving this scope. /* VULKAN_HPP_KEY_END */ } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (std::runtime_error& err) + catch ( std::runtime_error & err ) { std::cout << "std::runtime_error: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/11_InitShaders/11_InitShaders.cpp b/samples/11_InitShaders/11_InitShaders.cpp index e8d6e6c..8d4ebc7 100644 --- a/samples/11_InitShaders/11_InitShaders.cpp +++ b/samples/11_InitShaders/11_InitShaders.cpp @@ -17,43 +17,47 @@ #include "../utils/shaders.hpp" #include "../utils/utils.hpp" -#include "vulkan/vulkan.hpp" #include "SPIRV/GlslangToSpv.h" +#include "vulkan/vulkan.hpp" + #include -static char const* AppName = "11_InitShaders"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "11_InitShaders"; +static char const * EngineName = "Vulkan.hpp"; -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { - vk::UniqueInstance instance = vk::su::createInstance(AppName, EngineName, {}, vk::su::getInstanceExtensions()); -#if !defined(NDEBUG) - vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger(instance); + vk::UniqueInstance instance = vk::su::createInstance( AppName, EngineName, {}, vk::su::getInstanceExtensions() ); +#if !defined( NDEBUG ) + vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger( instance ); #endif vk::PhysicalDevice physicalDevice = instance->enumeratePhysicalDevices().front(); - vk::UniqueDevice device = vk::su::createDevice(physicalDevice, vk::su::findGraphicsQueueFamilyIndex(physicalDevice.getQueueFamilyProperties())); + vk::UniqueDevice device = vk::su::createDevice( + physicalDevice, vk::su::findGraphicsQueueFamilyIndex( physicalDevice.getQueueFamilyProperties() ) ); /* VULKAN_HPP_KEY_START */ glslang::InitializeProcess(); std::vector vertexShaderSPV; - bool ok = vk::su::GLSLtoSPV(vk::ShaderStageFlagBits::eVertex, vertexShaderText_PC_C, vertexShaderSPV); - assert(ok); + bool ok = vk::su::GLSLtoSPV( vk::ShaderStageFlagBits::eVertex, vertexShaderText_PC_C, vertexShaderSPV ); + assert( ok ); - vk::ShaderModuleCreateInfo vertexShaderModuleCreateInfo(vk::ShaderModuleCreateFlags(), vertexShaderSPV.size() * sizeof(unsigned int), vertexShaderSPV.data()); - vk::UniqueShaderModule vertexShaderModule = device->createShaderModuleUnique(vertexShaderModuleCreateInfo); + vk::ShaderModuleCreateInfo vertexShaderModuleCreateInfo( + vk::ShaderModuleCreateFlags(), vertexShaderSPV.size() * sizeof( unsigned int ), vertexShaderSPV.data() ); + vk::UniqueShaderModule vertexShaderModule = device->createShaderModuleUnique( vertexShaderModuleCreateInfo ); std::vector fragmentShaderSPV; - ok = vk::su::GLSLtoSPV(vk::ShaderStageFlagBits::eFragment, fragmentShaderText_C_C, fragmentShaderSPV); - assert(ok); + ok = vk::su::GLSLtoSPV( vk::ShaderStageFlagBits::eFragment, fragmentShaderText_C_C, fragmentShaderSPV ); + assert( ok ); - vk::ShaderModuleCreateInfo fragmentShaderModuleCreateInfo(vk::ShaderModuleCreateFlags(), fragmentShaderSPV.size() * sizeof(unsigned int), fragmentShaderSPV.data()); - vk::UniqueShaderModule fragmentShaderModule = device->createShaderModuleUnique(fragmentShaderModuleCreateInfo); + vk::ShaderModuleCreateInfo fragmentShaderModuleCreateInfo( + vk::ShaderModuleCreateFlags(), fragmentShaderSPV.size() * sizeof( unsigned int ), fragmentShaderSPV.data() ); + vk::UniqueShaderModule fragmentShaderModule = device->createShaderModuleUnique( fragmentShaderModuleCreateInfo ); glslang::FinalizeProcess(); @@ -62,20 +66,20 @@ int main(int /*argc*/, char ** /*argv*/) /* VULKAN_HPP_KEY_END */ } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (std::runtime_error& err) + catch ( std::runtime_error & err ) { std::cout << "std::runtime_error: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/12_InitFrameBuffers/12_InitFrameBuffers.cpp b/samples/12_InitFrameBuffers/12_InitFrameBuffers.cpp index 0b8e610..9701407 100644 --- a/samples/12_InitFrameBuffers/12_InitFrameBuffers.cpp +++ b/samples/12_InitFrameBuffers/12_InitFrameBuffers.cpp @@ -17,33 +17,44 @@ #include "../utils/utils.hpp" #include "vulkan/vulkan.hpp" + #include -static char const* AppName = "12_InitFrameBuffers"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "12_InitFrameBuffers"; +static char const * EngineName = "Vulkan.hpp"; -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { - vk::UniqueInstance instance = vk::su::createInstance(AppName, EngineName, {}, vk::su::getInstanceExtensions()); -#if !defined(NDEBUG) - vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger(instance); + vk::UniqueInstance instance = vk::su::createInstance( AppName, EngineName, {}, vk::su::getInstanceExtensions() ); +#if !defined( NDEBUG ) + vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger( instance ); #endif vk::PhysicalDevice physicalDevice = instance->enumeratePhysicalDevices().front(); - vk::su::SurfaceData surfaceData(instance, AppName, vk::Extent2D(64, 64)); + vk::su::SurfaceData surfaceData( instance, AppName, vk::Extent2D( 64, 64 ) ); - std::pair graphicsAndPresentQueueFamilyIndex = vk::su::findGraphicsAndPresentQueueFamilyIndex(physicalDevice, *surfaceData.surface); - vk::UniqueDevice device = vk::su::createDevice(physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions()); + std::pair graphicsAndPresentQueueFamilyIndex = + vk::su::findGraphicsAndPresentQueueFamilyIndex( physicalDevice, *surfaceData.surface ); + vk::UniqueDevice device = + vk::su::createDevice( physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions() ); - vk::su::SwapChainData swapChainData(physicalDevice, device, *surfaceData.surface, surfaceData.extent, vk::ImageUsageFlagBits::eColorAttachment | vk::ImageUsageFlagBits::eTransferSrc, - vk::UniqueSwapchainKHR(), graphicsAndPresentQueueFamilyIndex.first, graphicsAndPresentQueueFamilyIndex.second); + vk::su::SwapChainData swapChainData( physicalDevice, + device, + *surfaceData.surface, + surfaceData.extent, + vk::ImageUsageFlagBits::eColorAttachment | + vk::ImageUsageFlagBits::eTransferSrc, + vk::UniqueSwapchainKHR(), + graphicsAndPresentQueueFamilyIndex.first, + graphicsAndPresentQueueFamilyIndex.second ); - vk::su::DepthBufferData depthBufferData(physicalDevice, device, vk::Format::eD16Unorm, surfaceData.extent); + vk::su::DepthBufferData depthBufferData( physicalDevice, device, vk::Format::eD16Unorm, surfaceData.extent ); - vk::UniqueRenderPass renderPass = vk::su::createRenderPass(device, swapChainData.colorFormat, depthBufferData.format); + vk::UniqueRenderPass renderPass = + vk::su::createRenderPass( device, swapChainData.colorFormat, depthBufferData.format ); /* VULKAN_KEY_START */ @@ -51,31 +62,38 @@ int main(int /*argc*/, char ** /*argv*/) attachments[1] = depthBufferData.imageView.get(); std::vector framebuffers; - framebuffers.reserve(swapChainData.imageViews.size()); - for (auto const& view : swapChainData.imageViews) + framebuffers.reserve( swapChainData.imageViews.size() ); + for ( auto const & view : swapChainData.imageViews ) { attachments[0] = view.get(); - framebuffers.push_back(device->createFramebufferUnique(vk::FramebufferCreateInfo(vk::FramebufferCreateFlags(), renderPass.get(), 2, attachments, surfaceData.extent.width, surfaceData.extent.height, 1))); + framebuffers.push_back( device->createFramebufferUnique( vk::FramebufferCreateInfo( vk::FramebufferCreateFlags(), + renderPass.get(), + 2, + attachments, + surfaceData.extent.width, + surfaceData.extent.height, + 1 ) ) ); } - // Note: No need to explicitly destroy the Framebuffers, as the destroy functions are called by the destructor of the UniqueFramebuffer on leaving this scope. + // Note: No need to explicitly destroy the Framebuffers, as the destroy functions are called by the destructor of + // the UniqueFramebuffer on leaving this scope. /* VULKAN_KEY_END */ } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (std::runtime_error& err) + catch ( std::runtime_error & err ) { std::cout << "std::runtime_error: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/13_InitVertexBuffer/13_InitVertexBuffer.cpp b/samples/13_InitVertexBuffer/13_InitVertexBuffer.cpp index 47f7782..65a72ff 100644 --- a/samples/13_InitVertexBuffer/13_InitVertexBuffer.cpp +++ b/samples/13_InitVertexBuffer/13_InitVertexBuffer.cpp @@ -18,98 +18,125 @@ #include "../utils/geometries.hpp" #include "../utils/utils.hpp" #include "vulkan/vulkan.hpp" + #include -static char const* AppName = "13_InitVertexBuffer"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "13_InitVertexBuffer"; +static char const * EngineName = "Vulkan.hpp"; -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { - vk::UniqueInstance instance = vk::su::createInstance(AppName, EngineName, {}, vk::su::getInstanceExtensions()); -#if !defined(NDEBUG) - vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger(instance); + vk::UniqueInstance instance = vk::su::createInstance( AppName, EngineName, {}, vk::su::getInstanceExtensions() ); +#if !defined( NDEBUG ) + vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger( instance ); #endif vk::PhysicalDevice physicalDevice = instance->enumeratePhysicalDevices().front(); - vk::su::SurfaceData surfaceData(instance, AppName, vk::Extent2D(64, 64)); + vk::su::SurfaceData surfaceData( instance, AppName, vk::Extent2D( 64, 64 ) ); - std::pair graphicsAndPresentQueueFamilyIndex = vk::su::findGraphicsAndPresentQueueFamilyIndex(physicalDevice, *surfaceData.surface); - vk::UniqueDevice device = vk::su::createDevice(physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions()); + std::pair graphicsAndPresentQueueFamilyIndex = + vk::su::findGraphicsAndPresentQueueFamilyIndex( physicalDevice, *surfaceData.surface ); + vk::UniqueDevice device = + vk::su::createDevice( physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions() ); - vk::UniqueCommandPool commandPool = vk::su::createCommandPool(device, graphicsAndPresentQueueFamilyIndex.first); - vk::UniqueCommandBuffer commandBuffer = std::move(device->allocateCommandBuffersUnique(vk::CommandBufferAllocateInfo(commandPool.get(), vk::CommandBufferLevel::ePrimary, 1)).front()); + vk::UniqueCommandPool commandPool = vk::su::createCommandPool( device, graphicsAndPresentQueueFamilyIndex.first ); + vk::UniqueCommandBuffer commandBuffer = std::move( device + ->allocateCommandBuffersUnique( vk::CommandBufferAllocateInfo( + commandPool.get(), vk::CommandBufferLevel::ePrimary, 1 ) ) + .front() ); - vk::Queue graphicsQueue = device->getQueue(graphicsAndPresentQueueFamilyIndex.first, 0); + vk::Queue graphicsQueue = device->getQueue( graphicsAndPresentQueueFamilyIndex.first, 0 ); - vk::su::SwapChainData swapChainData(physicalDevice, device, *surfaceData.surface, surfaceData.extent, vk::ImageUsageFlagBits::eColorAttachment | vk::ImageUsageFlagBits::eTransferSrc, - vk::UniqueSwapchainKHR(), graphicsAndPresentQueueFamilyIndex.first, graphicsAndPresentQueueFamilyIndex.second); + vk::su::SwapChainData swapChainData( physicalDevice, + device, + *surfaceData.surface, + surfaceData.extent, + vk::ImageUsageFlagBits::eColorAttachment | + vk::ImageUsageFlagBits::eTransferSrc, + vk::UniqueSwapchainKHR(), + graphicsAndPresentQueueFamilyIndex.first, + graphicsAndPresentQueueFamilyIndex.second ); - vk::su::DepthBufferData depthBufferData(physicalDevice, device, vk::Format::eD16Unorm, surfaceData.extent); + vk::su::DepthBufferData depthBufferData( physicalDevice, device, vk::Format::eD16Unorm, surfaceData.extent ); - vk::UniqueRenderPass renderPass = vk::su::createRenderPass(device, swapChainData.colorFormat, depthBufferData.format); + vk::UniqueRenderPass renderPass = + vk::su::createRenderPass( device, swapChainData.colorFormat, depthBufferData.format ); - std::vector framebuffers = vk::su::createFramebuffers(device, renderPass, swapChainData.imageViews, depthBufferData.imageView, surfaceData.extent); + std::vector framebuffers = vk::su::createFramebuffers( + device, renderPass, swapChainData.imageViews, depthBufferData.imageView, surfaceData.extent ); /* VULKAN_KEY_START */ // create a vertex buffer for some vertex and color data - vk::UniqueBuffer vertexBuffer = device->createBufferUnique(vk::BufferCreateInfo(vk::BufferCreateFlags(), sizeof(coloredCubeData), vk::BufferUsageFlagBits::eVertexBuffer)); + vk::UniqueBuffer vertexBuffer = device->createBufferUnique( vk::BufferCreateInfo( + vk::BufferCreateFlags(), sizeof( coloredCubeData ), vk::BufferUsageFlagBits::eVertexBuffer ) ); // allocate device memory for that buffer - vk::MemoryRequirements memoryRequirements = device->getBufferMemoryRequirements(vertexBuffer.get()); - uint32_t memoryTypeIndex = vk::su::findMemoryType(physicalDevice.getMemoryProperties(), memoryRequirements.memoryTypeBits, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent); - vk::UniqueDeviceMemory deviceMemory = device->allocateMemoryUnique(vk::MemoryAllocateInfo(memoryRequirements.size, memoryTypeIndex)); + vk::MemoryRequirements memoryRequirements = device->getBufferMemoryRequirements( vertexBuffer.get() ); + uint32_t memoryTypeIndex = + vk::su::findMemoryType( physicalDevice.getMemoryProperties(), + memoryRequirements.memoryTypeBits, + vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent ); + vk::UniqueDeviceMemory deviceMemory = + device->allocateMemoryUnique( vk::MemoryAllocateInfo( memoryRequirements.size, memoryTypeIndex ) ); // copy the vertex and color data into that device memory - uint8_t *pData = static_cast(device->mapMemory(deviceMemory.get(), 0, memoryRequirements.size)); - memcpy(pData, coloredCubeData, sizeof(coloredCubeData)); - device->unmapMemory(deviceMemory.get()); + uint8_t * pData = static_cast( device->mapMemory( deviceMemory.get(), 0, memoryRequirements.size ) ); + memcpy( pData, coloredCubeData, sizeof( coloredCubeData ) ); + device->unmapMemory( deviceMemory.get() ); // and bind the device memory to the vertex buffer - device->bindBufferMemory(vertexBuffer.get(), deviceMemory.get(), 0); + device->bindBufferMemory( vertexBuffer.get(), deviceMemory.get(), 0 ); - vk::UniqueSemaphore imageAcquiredSemaphore = device->createSemaphoreUnique(vk::SemaphoreCreateInfo(vk::SemaphoreCreateFlags())); - vk::ResultValue currentBuffer = device->acquireNextImageKHR(swapChainData.swapChain.get(), vk::su::FenceTimeout, imageAcquiredSemaphore.get(), nullptr); - assert(currentBuffer.result == vk::Result::eSuccess); - assert(currentBuffer.value < framebuffers.size()); + vk::UniqueSemaphore imageAcquiredSemaphore = + device->createSemaphoreUnique( vk::SemaphoreCreateInfo( vk::SemaphoreCreateFlags() ) ); + vk::ResultValue currentBuffer = device->acquireNextImageKHR( + swapChainData.swapChain.get(), vk::su::FenceTimeout, imageAcquiredSemaphore.get(), nullptr ); + assert( currentBuffer.result == vk::Result::eSuccess ); + assert( currentBuffer.value < framebuffers.size() ); vk::ClearValue clearValues[2]; - clearValues[0].color = vk::ClearColorValue(std::array({ 0.2f, 0.2f, 0.2f, 0.2f })); - clearValues[1].depthStencil = vk::ClearDepthStencilValue(1.0f, 0); + clearValues[0].color = vk::ClearColorValue( std::array( { 0.2f, 0.2f, 0.2f, 0.2f } ) ); + clearValues[1].depthStencil = vk::ClearDepthStencilValue( 1.0f, 0 ); - commandBuffer->begin(vk::CommandBufferBeginInfo(vk::CommandBufferUsageFlags())); + commandBuffer->begin( vk::CommandBufferBeginInfo( vk::CommandBufferUsageFlags() ) ); - vk::RenderPassBeginInfo renderPassBeginInfo(renderPass.get(), framebuffers[currentBuffer.value].get(), vk::Rect2D(vk::Offset2D(0, 0), surfaceData.extent), 2, clearValues); - commandBuffer->beginRenderPass(renderPassBeginInfo, vk::SubpassContents::eInline); - - commandBuffer->bindVertexBuffers(0, *vertexBuffer, {0}); + vk::RenderPassBeginInfo renderPassBeginInfo( renderPass.get(), + framebuffers[currentBuffer.value].get(), + vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ), + 2, + clearValues ); + commandBuffer->beginRenderPass( renderPassBeginInfo, vk::SubpassContents::eInline ); + + commandBuffer->bindVertexBuffers( 0, *vertexBuffer, { 0 } ); commandBuffer->endRenderPass(); commandBuffer->end(); - vk::su::submitAndWait(device, graphicsQueue, commandBuffer); + vk::su::submitAndWait( device, graphicsQueue, commandBuffer ); - // Note: No need to explicitly destroy the vertexBuffer, deviceMemory, or semaphore, as the destroy functions are called - // by the destructor of the UniqueBuffer, UniqueDeviceMemory, and UniqueSemaphore, respectively, on leaving this scope. + // Note: No need to explicitly destroy the vertexBuffer, deviceMemory, or semaphore, as the destroy functions are + // called by the destructor of the UniqueBuffer, UniqueDeviceMemory, and UniqueSemaphore, respectively, on leaving + // this scope. /* VULKAN_KEY_END */ } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (std::runtime_error& err) + catch ( std::runtime_error & err ) { std::cout << "std::runtime_error: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/14_InitPipeline/14_InitPipeline.cpp b/samples/14_InitPipeline/14_InitPipeline.cpp index 3b05a3d..4a5cbf4 100644 --- a/samples/14_InitPipeline/14_InitPipeline.cpp +++ b/samples/14_InitPipeline/14_InitPipeline.cpp @@ -19,55 +19,64 @@ #include "../utils/math.hpp" #include "../utils/shaders.hpp" #include "../utils/utils.hpp" -#include "vulkan/vulkan.hpp" #include "SPIRV/GlslangToSpv.h" +#include "vulkan/vulkan.hpp" + #include -static char const* AppName = "14_InitPipeline"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "14_InitPipeline"; +static char const * EngineName = "Vulkan.hpp"; -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { - vk::UniqueInstance instance = vk::su::createInstance(AppName, EngineName, {}, vk::su::getInstanceExtensions()); -#if !defined(NDEBUG) - vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger(instance); + vk::UniqueInstance instance = vk::su::createInstance( AppName, EngineName, {}, vk::su::getInstanceExtensions() ); +#if !defined( NDEBUG ) + vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger( instance ); #endif vk::PhysicalDevice physicalDevice = instance->enumeratePhysicalDevices().front(); - vk::su::SurfaceData surfaceData(instance, AppName, vk::Extent2D(500, 500)); + vk::su::SurfaceData surfaceData( instance, AppName, vk::Extent2D( 500, 500 ) ); - std::pair graphicsAndPresentQueueFamilyIndex = vk::su::findGraphicsAndPresentQueueFamilyIndex(physicalDevice, *surfaceData.surface); - vk::UniqueDevice device = vk::su::createDevice(physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions()); + std::pair graphicsAndPresentQueueFamilyIndex = + vk::su::findGraphicsAndPresentQueueFamilyIndex( physicalDevice, *surfaceData.surface ); + vk::UniqueDevice device = + vk::su::createDevice( physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions() ); - vk::UniqueRenderPass renderPass = vk::su::createRenderPass(device, vk::su::pickSurfaceFormat(physicalDevice.getSurfaceFormatsKHR(surfaceData.surface.get())).format, vk::Format::eD16Unorm); + vk::UniqueRenderPass renderPass = vk::su::createRenderPass( + device, + vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( surfaceData.surface.get() ) ).format, + vk::Format::eD16Unorm ); - vk::UniqueDescriptorSetLayout descriptorSetLayout = vk::su::createDescriptorSetLayout(device, { {vk::DescriptorType::eUniformBuffer, 1, vk::ShaderStageFlagBits::eVertex} }); - vk::UniquePipelineLayout pipelineLayout = device->createPipelineLayoutUnique(vk::PipelineLayoutCreateInfo(vk::PipelineLayoutCreateFlags(), 1, &descriptorSetLayout.get())); + vk::UniqueDescriptorSetLayout descriptorSetLayout = vk::su::createDescriptorSetLayout( + device, { { vk::DescriptorType::eUniformBuffer, 1, vk::ShaderStageFlagBits::eVertex } } ); + vk::UniquePipelineLayout pipelineLayout = device->createPipelineLayoutUnique( + vk::PipelineLayoutCreateInfo( vk::PipelineLayoutCreateFlags(), 1, &descriptorSetLayout.get() ) ); glslang::InitializeProcess(); - vk::UniqueShaderModule vertexShaderModule = vk::su::createShaderModule(device, vk::ShaderStageFlagBits::eVertex, vertexShaderText_PC_C); - vk::UniqueShaderModule fragmentShaderModule = vk::su::createShaderModule(device, vk::ShaderStageFlagBits::eFragment, fragmentShaderText_C_C); + vk::UniqueShaderModule vertexShaderModule = + vk::su::createShaderModule( device, vk::ShaderStageFlagBits::eVertex, vertexShaderText_PC_C ); + vk::UniqueShaderModule fragmentShaderModule = + vk::su::createShaderModule( device, vk::ShaderStageFlagBits::eFragment, fragmentShaderText_C_C ); glslang::FinalizeProcess(); /* VULKAN_KEY_START */ - vk::PipelineShaderStageCreateInfo pipelineShaderStageCreateInfos[2] = - { - vk::PipelineShaderStageCreateInfo(vk::PipelineShaderStageCreateFlags(), vk::ShaderStageFlagBits::eVertex, vertexShaderModule.get(), "main"), - vk::PipelineShaderStageCreateInfo(vk::PipelineShaderStageCreateFlags(), vk::ShaderStageFlagBits::eFragment, fragmentShaderModule.get(), "main") + vk::PipelineShaderStageCreateInfo pipelineShaderStageCreateInfos[2] = { + vk::PipelineShaderStageCreateInfo( + vk::PipelineShaderStageCreateFlags(), vk::ShaderStageFlagBits::eVertex, vertexShaderModule.get(), "main" ), + vk::PipelineShaderStageCreateInfo( + vk::PipelineShaderStageCreateFlags(), vk::ShaderStageFlagBits::eFragment, fragmentShaderModule.get(), "main" ) }; - vk::VertexInputBindingDescription vertexInputBindingDescription(0, sizeof(coloredCubeData[0])); - vk::VertexInputAttributeDescription vertexInputAttributeDescriptions[2] = - { - vk::VertexInputAttributeDescription(0, 0, vk::Format::eR32G32B32A32Sfloat, 0), - vk::VertexInputAttributeDescription(1, 0, vk::Format::eR32G32B32A32Sfloat, 16) + vk::VertexInputBindingDescription vertexInputBindingDescription( 0, sizeof( coloredCubeData[0] ) ); + vk::VertexInputAttributeDescription vertexInputAttributeDescriptions[2] = { + vk::VertexInputAttributeDescription( 0, 0, vk::Format::eR32G32B32A32Sfloat, 0 ), + vk::VertexInputAttributeDescription( 1, 0, vk::Format::eR32G32B32A32Sfloat, 16 ) }; - vk::PipelineVertexInputStateCreateInfo pipelineVertexInputStateCreateInfo - ( + vk::PipelineVertexInputStateCreateInfo pipelineVertexInputStateCreateInfo( vk::PipelineVertexInputStateCreateFlags(), // flags 1, // vertexBindingDescriptionCount &vertexInputBindingDescription, // pVertexBindingDescription @@ -75,12 +84,13 @@ int main(int /*argc*/, char ** /*argv*/) vertexInputAttributeDescriptions // pVertexAttributeDescriptions ); - vk::PipelineInputAssemblyStateCreateInfo pipelineInputAssemblyStateCreateInfo(vk::PipelineInputAssemblyStateCreateFlags(), vk::PrimitiveTopology::eTriangleList); + vk::PipelineInputAssemblyStateCreateInfo pipelineInputAssemblyStateCreateInfo( + vk::PipelineInputAssemblyStateCreateFlags(), vk::PrimitiveTopology::eTriangleList ); - vk::PipelineViewportStateCreateInfo pipelineViewportStateCreateInfo(vk::PipelineViewportStateCreateFlags(), 1, nullptr, 1, nullptr); + vk::PipelineViewportStateCreateInfo pipelineViewportStateCreateInfo( + vk::PipelineViewportStateCreateFlags(), 1, nullptr, 1, nullptr ); - vk::PipelineRasterizationStateCreateInfo pipelineRasterizationStateCreateInfo - ( + vk::PipelineRasterizationStateCreateInfo pipelineRasterizationStateCreateInfo( vk::PipelineRasterizationStateCreateFlags(), // flags false, // depthClampEnable false, // rasterizerDiscardEnable @@ -94,87 +104,85 @@ int main(int /*argc*/, char ** /*argv*/) 1.0f // lineWidth ); - vk::PipelineMultisampleStateCreateInfo pipelineMultisampleStateCreateInfo - ( + vk::PipelineMultisampleStateCreateInfo pipelineMultisampleStateCreateInfo( vk::PipelineMultisampleStateCreateFlags(), // flags vk::SampleCountFlagBits::e1 // rasterizationSamples // other values can be default ); - vk::StencilOpState stencilOpState(vk::StencilOp::eKeep, vk::StencilOp::eKeep, vk::StencilOp::eKeep, vk::CompareOp::eAlways); - vk::PipelineDepthStencilStateCreateInfo pipelineDepthStencilStateCreateInfo - ( - vk::PipelineDepthStencilStateCreateFlags(), // flags - true, // depthTestEnable - true, // depthWriteEnable - vk::CompareOp::eLessOrEqual, // depthCompareOp - false, // depthBoundTestEnable - false, // stencilTestEnable - stencilOpState, // front - stencilOpState // back + vk::StencilOpState stencilOpState( + vk::StencilOp::eKeep, vk::StencilOp::eKeep, vk::StencilOp::eKeep, vk::CompareOp::eAlways ); + vk::PipelineDepthStencilStateCreateInfo pipelineDepthStencilStateCreateInfo( + vk::PipelineDepthStencilStateCreateFlags(), // flags + true, // depthTestEnable + true, // depthWriteEnable + vk::CompareOp::eLessOrEqual, // depthCompareOp + false, // depthBoundTestEnable + false, // stencilTestEnable + stencilOpState, // front + stencilOpState // back ); - vk::ColorComponentFlags colorComponentFlags(vk::ColorComponentFlagBits::eR | vk::ColorComponentFlagBits::eG | vk::ColorComponentFlagBits::eB | vk::ColorComponentFlagBits::eA); - vk::PipelineColorBlendAttachmentState pipelineColorBlendAttachmentState - ( - false, // blendEnable - vk::BlendFactor::eZero, // srcColorBlendFactor - vk::BlendFactor::eZero, // dstColorBlendFactor - vk::BlendOp::eAdd, // colorBlendOp - vk::BlendFactor::eZero, // srcAlphaBlendFactor - vk::BlendFactor::eZero, // dstAlphaBlendFactor - vk::BlendOp::eAdd, // alphaBlendOp - colorComponentFlags // colorWriteMask + vk::ColorComponentFlags colorComponentFlags( vk::ColorComponentFlagBits::eR | vk::ColorComponentFlagBits::eG | + vk::ColorComponentFlagBits::eB | vk::ColorComponentFlagBits::eA ); + vk::PipelineColorBlendAttachmentState pipelineColorBlendAttachmentState( + false, // blendEnable + vk::BlendFactor::eZero, // srcColorBlendFactor + vk::BlendFactor::eZero, // dstColorBlendFactor + vk::BlendOp::eAdd, // colorBlendOp + vk::BlendFactor::eZero, // srcAlphaBlendFactor + vk::BlendFactor::eZero, // dstAlphaBlendFactor + vk::BlendOp::eAdd, // alphaBlendOp + colorComponentFlags // colorWriteMask ); - vk::PipelineColorBlendStateCreateInfo pipelineColorBlendStateCreateInfo - ( - vk::PipelineColorBlendStateCreateFlags(), // flags - false, // logicOpEnable - vk::LogicOp::eNoOp, // logicOp - 1, // attachmentCount - &pipelineColorBlendAttachmentState, // pAttachments - { { 1.0f, 1.0f, 1.0f, 1.0f } } // blendConstants + vk::PipelineColorBlendStateCreateInfo pipelineColorBlendStateCreateInfo( + vk::PipelineColorBlendStateCreateFlags(), // flags + false, // logicOpEnable + vk::LogicOp::eNoOp, // logicOp + 1, // attachmentCount + &pipelineColorBlendAttachmentState, // pAttachments + { { 1.0f, 1.0f, 1.0f, 1.0f } } // blendConstants ); - vk::DynamicState dynamicStates[2] = { vk::DynamicState::eViewport, vk::DynamicState::eScissor }; - vk::PipelineDynamicStateCreateInfo pipelineDynamicStateCreateInfo(vk::PipelineDynamicStateCreateFlags(), 2, dynamicStates); + vk::DynamicState dynamicStates[2] = { vk::DynamicState::eViewport, vk::DynamicState::eScissor }; + vk::PipelineDynamicStateCreateInfo pipelineDynamicStateCreateInfo( + vk::PipelineDynamicStateCreateFlags(), 2, dynamicStates ); - vk::GraphicsPipelineCreateInfo graphicsPipelineCreateInfo - ( - vk::PipelineCreateFlags(), // flags - 2, // stageCount - pipelineShaderStageCreateInfos, // pStages - &pipelineVertexInputStateCreateInfo, // pVertexInputState - &pipelineInputAssemblyStateCreateInfo, // pInputAssemblyState - nullptr, // pTessellationState - &pipelineViewportStateCreateInfo, // pViewportState - &pipelineRasterizationStateCreateInfo, // pRasterizationState - &pipelineMultisampleStateCreateInfo, // pMultisampleState - &pipelineDepthStencilStateCreateInfo, // pDepthStencilState - &pipelineColorBlendStateCreateInfo, // pColorBlendState - &pipelineDynamicStateCreateInfo, // pDynamicState - pipelineLayout.get(), // layout - renderPass.get() // renderPass + vk::GraphicsPipelineCreateInfo graphicsPipelineCreateInfo( + vk::PipelineCreateFlags(), // flags + 2, // stageCount + pipelineShaderStageCreateInfos, // pStages + &pipelineVertexInputStateCreateInfo, // pVertexInputState + &pipelineInputAssemblyStateCreateInfo, // pInputAssemblyState + nullptr, // pTessellationState + &pipelineViewportStateCreateInfo, // pViewportState + &pipelineRasterizationStateCreateInfo, // pRasterizationState + &pipelineMultisampleStateCreateInfo, // pMultisampleState + &pipelineDepthStencilStateCreateInfo, // pDepthStencilState + &pipelineColorBlendStateCreateInfo, // pColorBlendState + &pipelineDynamicStateCreateInfo, // pDynamicState + pipelineLayout.get(), // layout + renderPass.get() // renderPass ); - vk::UniquePipeline pipeline = device->createGraphicsPipelineUnique(nullptr, graphicsPipelineCreateInfo); + vk::UniquePipeline pipeline = device->createGraphicsPipelineUnique( nullptr, graphicsPipelineCreateInfo ); /* VULKAN_KEY_END */ } - catch (vk::SystemError &err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (std::runtime_error& err) + catch ( std::runtime_error & err ) { std::cout << "std::runtime_error: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/15_DrawCube/15_DrawCube.cpp b/samples/15_DrawCube/15_DrawCube.cpp index f564eca..89aefa1 100644 --- a/samples/15_DrawCube/15_DrawCube.cpp +++ b/samples/15_DrawCube/15_DrawCube.cpp @@ -19,124 +19,179 @@ #include "../utils/math.hpp" #include "../utils/shaders.hpp" #include "../utils/utils.hpp" -#include "vulkan/vulkan.hpp" #include "SPIRV/GlslangToSpv.h" +#include "vulkan/vulkan.hpp" + #include #include -static char const* AppName = "15_DrawCube"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "15_DrawCube"; +static char const * EngineName = "Vulkan.hpp"; -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { - vk::UniqueInstance instance = vk::su::createInstance(AppName, EngineName, {}, vk::su::getInstanceExtensions()); -#if !defined(NDEBUG) - vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger(instance); + vk::UniqueInstance instance = vk::su::createInstance( AppName, EngineName, {}, vk::su::getInstanceExtensions() ); +#if !defined( NDEBUG ) + vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger( instance ); #endif vk::PhysicalDevice physicalDevice = instance->enumeratePhysicalDevices().front(); - vk::su::SurfaceData surfaceData(instance, AppName, vk::Extent2D(500, 500)); + vk::su::SurfaceData surfaceData( instance, AppName, vk::Extent2D( 500, 500 ) ); - std::pair graphicsAndPresentQueueFamilyIndex = vk::su::findGraphicsAndPresentQueueFamilyIndex(physicalDevice, *surfaceData.surface); - vk::UniqueDevice device = vk::su::createDevice(physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions()); + std::pair graphicsAndPresentQueueFamilyIndex = + vk::su::findGraphicsAndPresentQueueFamilyIndex( physicalDevice, *surfaceData.surface ); + vk::UniqueDevice device = + vk::su::createDevice( physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions() ); - vk::UniqueCommandPool commandPool = vk::su::createCommandPool(device, graphicsAndPresentQueueFamilyIndex.first); - vk::UniqueCommandBuffer commandBuffer = std::move(device->allocateCommandBuffersUnique(vk::CommandBufferAllocateInfo(commandPool.get(), vk::CommandBufferLevel::ePrimary, 1)).front()); + vk::UniqueCommandPool commandPool = vk::su::createCommandPool( device, graphicsAndPresentQueueFamilyIndex.first ); + vk::UniqueCommandBuffer commandBuffer = std::move( device + ->allocateCommandBuffersUnique( vk::CommandBufferAllocateInfo( + commandPool.get(), vk::CommandBufferLevel::ePrimary, 1 ) ) + .front() ); - vk::Queue graphicsQueue = device->getQueue(graphicsAndPresentQueueFamilyIndex.first, 0); - vk::Queue presentQueue = device->getQueue(graphicsAndPresentQueueFamilyIndex.second, 0); + vk::Queue graphicsQueue = device->getQueue( graphicsAndPresentQueueFamilyIndex.first, 0 ); + vk::Queue presentQueue = device->getQueue( graphicsAndPresentQueueFamilyIndex.second, 0 ); - vk::su::SwapChainData swapChainData(physicalDevice, device, *surfaceData.surface, surfaceData.extent, vk::ImageUsageFlagBits::eColorAttachment | vk::ImageUsageFlagBits::eTransferSrc, - vk::UniqueSwapchainKHR(), graphicsAndPresentQueueFamilyIndex.first, graphicsAndPresentQueueFamilyIndex.second); + vk::su::SwapChainData swapChainData( physicalDevice, + device, + *surfaceData.surface, + surfaceData.extent, + vk::ImageUsageFlagBits::eColorAttachment | + vk::ImageUsageFlagBits::eTransferSrc, + vk::UniqueSwapchainKHR(), + graphicsAndPresentQueueFamilyIndex.first, + graphicsAndPresentQueueFamilyIndex.second ); - vk::su::DepthBufferData depthBufferData(physicalDevice, device, vk::Format::eD16Unorm, surfaceData.extent); + vk::su::DepthBufferData depthBufferData( physicalDevice, device, vk::Format::eD16Unorm, surfaceData.extent ); - vk::su::BufferData uniformBufferData(physicalDevice, device, sizeof(glm::mat4x4), vk::BufferUsageFlagBits::eUniformBuffer); - vk::su::copyToDevice(device, uniformBufferData.deviceMemory, vk::su::createModelViewProjectionClipMatrix(surfaceData.extent)); + vk::su::BufferData uniformBufferData( + physicalDevice, device, sizeof( glm::mat4x4 ), vk::BufferUsageFlagBits::eUniformBuffer ); + vk::su::copyToDevice( + device, uniformBufferData.deviceMemory, vk::su::createModelViewProjectionClipMatrix( surfaceData.extent ) ); - vk::UniqueDescriptorSetLayout descriptorSetLayout = vk::su::createDescriptorSetLayout(device, { {vk::DescriptorType::eUniformBuffer, 1, vk::ShaderStageFlagBits::eVertex} }); - vk::UniquePipelineLayout pipelineLayout = device->createPipelineLayoutUnique(vk::PipelineLayoutCreateInfo(vk::PipelineLayoutCreateFlags(), 1, &descriptorSetLayout.get())); + vk::UniqueDescriptorSetLayout descriptorSetLayout = vk::su::createDescriptorSetLayout( + device, { { vk::DescriptorType::eUniformBuffer, 1, vk::ShaderStageFlagBits::eVertex } } ); + vk::UniquePipelineLayout pipelineLayout = device->createPipelineLayoutUnique( + vk::PipelineLayoutCreateInfo( vk::PipelineLayoutCreateFlags(), 1, &descriptorSetLayout.get() ) ); - vk::UniqueRenderPass renderPass = vk::su::createRenderPass(device, vk::su::pickSurfaceFormat(physicalDevice.getSurfaceFormatsKHR(surfaceData.surface.get())).format, depthBufferData.format); + vk::UniqueRenderPass renderPass = vk::su::createRenderPass( + device, + vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( surfaceData.surface.get() ) ).format, + depthBufferData.format ); glslang::InitializeProcess(); - vk::UniqueShaderModule vertexShaderModule = vk::su::createShaderModule(device, vk::ShaderStageFlagBits::eVertex, vertexShaderText_PC_C); - vk::UniqueShaderModule fragmentShaderModule = vk::su::createShaderModule(device, vk::ShaderStageFlagBits::eFragment, fragmentShaderText_C_C); + vk::UniqueShaderModule vertexShaderModule = + vk::su::createShaderModule( device, vk::ShaderStageFlagBits::eVertex, vertexShaderText_PC_C ); + vk::UniqueShaderModule fragmentShaderModule = + vk::su::createShaderModule( device, vk::ShaderStageFlagBits::eFragment, fragmentShaderText_C_C ); glslang::FinalizeProcess(); - std::vector framebuffers = vk::su::createFramebuffers(device, renderPass, swapChainData.imageViews, depthBufferData.imageView, surfaceData.extent); + std::vector framebuffers = vk::su::createFramebuffers( + device, renderPass, swapChainData.imageViews, depthBufferData.imageView, surfaceData.extent ); - vk::su::BufferData vertexBufferData(physicalDevice, device, sizeof(coloredCubeData), vk::BufferUsageFlagBits::eVertexBuffer); - vk::su::copyToDevice(device, vertexBufferData.deviceMemory, coloredCubeData, sizeof(coloredCubeData) / sizeof(coloredCubeData[0])); + vk::su::BufferData vertexBufferData( + physicalDevice, device, sizeof( coloredCubeData ), vk::BufferUsageFlagBits::eVertexBuffer ); + vk::su::copyToDevice( device, + vertexBufferData.deviceMemory, + coloredCubeData, + sizeof( coloredCubeData ) / sizeof( coloredCubeData[0] ) ); - vk::UniqueDescriptorPool descriptorPool = vk::su::createDescriptorPool(device, { {vk::DescriptorType::eUniformBuffer, 1} }); - vk::UniqueDescriptorSet descriptorSet = std::move(device->allocateDescriptorSetsUnique(vk::DescriptorSetAllocateInfo(*descriptorPool, 1, &*descriptorSetLayout)).front()); + vk::UniqueDescriptorPool descriptorPool = + vk::su::createDescriptorPool( device, { { vk::DescriptorType::eUniformBuffer, 1 } } ); + vk::UniqueDescriptorSet descriptorSet = std::move( + device->allocateDescriptorSetsUnique( vk::DescriptorSetAllocateInfo( *descriptorPool, 1, &*descriptorSetLayout ) ) + .front() ); - vk::su::updateDescriptorSets(device, descriptorSet, {{vk::DescriptorType::eUniformBuffer, uniformBufferData.buffer, vk::UniqueBufferView()}}, {}); + vk::su::updateDescriptorSets( + device, + descriptorSet, + { { vk::DescriptorType::eUniformBuffer, uniformBufferData.buffer, vk::UniqueBufferView() } }, + {} ); - vk::UniquePipelineCache pipelineCache = device->createPipelineCacheUnique(vk::PipelineCacheCreateInfo()); - vk::UniquePipeline graphicsPipeline = vk::su::createGraphicsPipeline(device, pipelineCache, std::make_pair(*vertexShaderModule, nullptr), std::make_pair(*fragmentShaderModule, nullptr), - sizeof(coloredCubeData[0]), { { vk::Format::eR32G32B32A32Sfloat, 0 }, { vk::Format::eR32G32B32A32Sfloat, 16 } }, - vk::FrontFace::eClockwise, true, pipelineLayout, renderPass); + vk::UniquePipelineCache pipelineCache = device->createPipelineCacheUnique( vk::PipelineCacheCreateInfo() ); + vk::UniquePipeline graphicsPipeline = vk::su::createGraphicsPipeline( + device, + pipelineCache, + std::make_pair( *vertexShaderModule, nullptr ), + std::make_pair( *fragmentShaderModule, nullptr ), + sizeof( coloredCubeData[0] ), + { { vk::Format::eR32G32B32A32Sfloat, 0 }, { vk::Format::eR32G32B32A32Sfloat, 16 } }, + vk::FrontFace::eClockwise, + true, + pipelineLayout, + renderPass ); /* VULKAN_KEY_START */ // Get the index of the next available swapchain image: - vk::UniqueSemaphore imageAcquiredSemaphore = device->createSemaphoreUnique(vk::SemaphoreCreateInfo()); - vk::ResultValue currentBuffer = device->acquireNextImageKHR(swapChainData.swapChain.get(), vk::su::FenceTimeout, imageAcquiredSemaphore.get(), nullptr); - assert(currentBuffer.result == vk::Result::eSuccess); - assert(currentBuffer.value < framebuffers.size()); + vk::UniqueSemaphore imageAcquiredSemaphore = device->createSemaphoreUnique( vk::SemaphoreCreateInfo() ); + vk::ResultValue currentBuffer = device->acquireNextImageKHR( + swapChainData.swapChain.get(), vk::su::FenceTimeout, imageAcquiredSemaphore.get(), nullptr ); + assert( currentBuffer.result == vk::Result::eSuccess ); + assert( currentBuffer.value < framebuffers.size() ); - commandBuffer->begin(vk::CommandBufferBeginInfo(vk::CommandBufferUsageFlags())); + commandBuffer->begin( vk::CommandBufferBeginInfo( vk::CommandBufferUsageFlags() ) ); vk::ClearValue clearValues[2]; - clearValues[0].color = vk::ClearColorValue(std::array({ 0.2f, 0.2f, 0.2f, 0.2f })); - clearValues[1].depthStencil = vk::ClearDepthStencilValue(1.0f, 0); - vk::RenderPassBeginInfo renderPassBeginInfo(renderPass.get(), framebuffers[currentBuffer.value].get(), vk::Rect2D(vk::Offset2D(0, 0), surfaceData.extent), 2, clearValues); - commandBuffer->beginRenderPass(renderPassBeginInfo, vk::SubpassContents::eInline); - commandBuffer->bindPipeline(vk::PipelineBindPoint::eGraphics, graphicsPipeline.get()); - commandBuffer->bindDescriptorSets(vk::PipelineBindPoint::eGraphics, pipelineLayout.get(), 0, descriptorSet.get(), nullptr); + clearValues[0].color = vk::ClearColorValue( std::array( { 0.2f, 0.2f, 0.2f, 0.2f } ) ); + clearValues[1].depthStencil = vk::ClearDepthStencilValue( 1.0f, 0 ); + vk::RenderPassBeginInfo renderPassBeginInfo( renderPass.get(), + framebuffers[currentBuffer.value].get(), + vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ), + 2, + clearValues ); + commandBuffer->beginRenderPass( renderPassBeginInfo, vk::SubpassContents::eInline ); + commandBuffer->bindPipeline( vk::PipelineBindPoint::eGraphics, graphicsPipeline.get() ); + commandBuffer->bindDescriptorSets( + vk::PipelineBindPoint::eGraphics, pipelineLayout.get(), 0, descriptorSet.get(), nullptr ); - commandBuffer->bindVertexBuffers(0, *vertexBufferData.buffer, {0}); - commandBuffer->setViewport(0, vk::Viewport(0.0f, 0.0f, static_cast(surfaceData.extent.width), static_cast(surfaceData.extent.height), 0.0f, 1.0f)); - commandBuffer->setScissor(0, vk::Rect2D(vk::Offset2D(0, 0), surfaceData.extent)); + commandBuffer->bindVertexBuffers( 0, *vertexBufferData.buffer, { 0 } ); + commandBuffer->setViewport( 0, + vk::Viewport( 0.0f, + 0.0f, + static_cast( surfaceData.extent.width ), + static_cast( surfaceData.extent.height ), + 0.0f, + 1.0f ) ); + commandBuffer->setScissor( 0, vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ) ); - commandBuffer->draw(12 * 3, 1, 0, 0); + commandBuffer->draw( 12 * 3, 1, 0, 0 ); commandBuffer->endRenderPass(); commandBuffer->end(); - vk::UniqueFence drawFence = device->createFenceUnique(vk::FenceCreateInfo()); + vk::UniqueFence drawFence = device->createFenceUnique( vk::FenceCreateInfo() ); - vk::PipelineStageFlags waitDestinationStageMask(vk::PipelineStageFlagBits::eColorAttachmentOutput); - vk::SubmitInfo submitInfo(1, &imageAcquiredSemaphore.get(), &waitDestinationStageMask, 1, &commandBuffer.get()); - graphicsQueue.submit(submitInfo, drawFence.get()); + vk::PipelineStageFlags waitDestinationStageMask( vk::PipelineStageFlagBits::eColorAttachmentOutput ); + vk::SubmitInfo submitInfo( 1, &imageAcquiredSemaphore.get(), &waitDestinationStageMask, 1, &commandBuffer.get() ); + graphicsQueue.submit( submitInfo, drawFence.get() ); - while (vk::Result::eTimeout == device->waitForFences(drawFence.get(), VK_TRUE, vk::su::FenceTimeout)) + while ( vk::Result::eTimeout == device->waitForFences( drawFence.get(), VK_TRUE, vk::su::FenceTimeout ) ) ; - presentQueue.presentKHR(vk::PresentInfoKHR(0, nullptr, 1, &swapChainData.swapChain.get(), ¤tBuffer.value)); - std::this_thread::sleep_for(std::chrono::milliseconds(1000)); + presentQueue.presentKHR( + vk::PresentInfoKHR( 0, nullptr, 1, &swapChainData.swapChain.get(), ¤tBuffer.value ) ); + std::this_thread::sleep_for( std::chrono::milliseconds( 1000 ) ); /* VULKAN_KEY_END */ device->waitIdle(); } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (std::runtime_error& err) + catch ( std::runtime_error & err ) { std::cout << "std::runtime_error: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/16_Vulkan_1_1/16_Vulkan_1_1.cpp b/samples/16_Vulkan_1_1/16_Vulkan_1_1.cpp index 9e1c484..5376ee8 100644 --- a/samples/16_Vulkan_1_1/16_Vulkan_1_1.cpp +++ b/samples/16_Vulkan_1_1/16_Vulkan_1_1.cpp @@ -18,98 +18,108 @@ #include "../utils/utils.hpp" #include "vulkan/vulkan.hpp" -static char const* AppName = "16_Vulkan_1_1"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "16_Vulkan_1_1"; +static char const * EngineName = "Vulkan.hpp"; -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { /* VULKAN_KEY_START */ // Keep track of the major/minor version we can actually use - uint16_t usingMajorVersion = 1; - uint16_t usingMinorVersion = 0; + uint16_t usingMajorVersion = 1; + uint16_t usingMinorVersion = 0; std::string usingVersionString = ""; // Set the desired version we want - uint16_t desiredMajorVersion = 1; - uint16_t desiredMinorVersion = 1; - uint32_t desiredVersion = VK_MAKE_VERSION(desiredMajorVersion, desiredMinorVersion, 0); + uint16_t desiredMajorVersion = 1; + uint16_t desiredMinorVersion = 1; + uint32_t desiredVersion = VK_MAKE_VERSION( desiredMajorVersion, desiredMinorVersion, 0 ); std::string desiredVersionString = ""; - desiredVersionString += std::to_string(desiredMajorVersion); + desiredVersionString += std::to_string( desiredMajorVersion ); desiredVersionString += "."; - desiredVersionString += std::to_string(desiredMinorVersion); + desiredVersionString += std::to_string( desiredMinorVersion ); -#if (VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1) +#if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) // initialize the DipatchLoaderDynamic to use - static vk::DynamicLoader dl; - PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = dl.getProcAddress("vkGetInstanceProcAddr"); - VULKAN_HPP_DEFAULT_DISPATCHER.init(vkGetInstanceProcAddr); + static vk::DynamicLoader dl; + PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = + dl.getProcAddress( "vkGetInstanceProcAddr" ); + VULKAN_HPP_DEFAULT_DISPATCHER.init( vkGetInstanceProcAddr ); #endif // Determine what API version is available uint32_t apiVersion = vk::enumerateInstanceVersion(); // Translate the version into major/minor for easier comparison - uint32_t loader_major_version = VK_VERSION_MAJOR(apiVersion); - uint32_t loader_minor_version = VK_VERSION_MINOR(apiVersion); - std::cout << "Loader/Runtime support detected for Vulkan " << loader_major_version << "." << loader_minor_version << "\n"; + uint32_t loader_major_version = VK_VERSION_MAJOR( apiVersion ); + uint32_t loader_minor_version = VK_VERSION_MINOR( apiVersion ); + std::cout << "Loader/Runtime support detected for Vulkan " << loader_major_version << "." << loader_minor_version + << "\n"; // Check current version against what we want to run - if (loader_major_version > desiredMajorVersion || - (loader_major_version == desiredMajorVersion && loader_minor_version >= desiredMinorVersion)) + if ( loader_major_version > desiredMajorVersion || + ( loader_major_version == desiredMajorVersion && loader_minor_version >= desiredMinorVersion ) ) { // Create the instance - vk::UniqueInstance instance = vk::su::createInstance(AppName, EngineName, {}, vk::su::getInstanceExtensions(), desiredVersion); -#if !defined(NDEBUG) - vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger(instance); + vk::UniqueInstance instance = + vk::su::createInstance( AppName, EngineName, {}, vk::su::getInstanceExtensions(), desiredVersion ); +#if !defined( NDEBUG ) + vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger( instance ); #endif // Get the list of physical devices std::vector physicalDevices = instance->enumeratePhysicalDevices(); - // Go through the list of physical devices and select only those that are capable of running the API version we want. + // Go through the list of physical devices and select only those that are capable of running the API version we + // want. std::vector desiredPhysicalDevices; - std::copy_if(physicalDevices.begin(), physicalDevices.end(), std::back_inserter(desiredPhysicalDevices), [desiredVersion](vk::PhysicalDevice const& pd) { return pd.getProperties().apiVersion >= desiredVersion; }); + std::copy_if( + physicalDevices.begin(), + physicalDevices.end(), + std::back_inserter( desiredPhysicalDevices ), + [desiredVersion]( vk::PhysicalDevice const & pd ) { return pd.getProperties().apiVersion >= desiredVersion; } ); // If we have something in the desired version physical device list, we're good - if (desiredPhysicalDevices.size() > 0) + if ( desiredPhysicalDevices.size() > 0 ) { usingMajorVersion = desiredMajorVersion; usingMinorVersion = desiredMinorVersion; } } - usingVersionString += std::to_string(usingMajorVersion); + usingVersionString += std::to_string( usingMajorVersion ); usingVersionString += "."; - usingVersionString += std::to_string(usingMinorVersion); + usingVersionString += std::to_string( usingMinorVersion ); - if (usingMinorVersion < desiredMinorVersion) + if ( usingMinorVersion < desiredMinorVersion ) { - std::cout << "Determined that this system can only use Vulkan API version " << usingVersionString << " instead of desired version " << desiredVersionString << std::endl; + std::cout << "Determined that this system can only use Vulkan API version " << usingVersionString + << " instead of desired version " << desiredVersionString << std::endl; } else { - std::cout << "Determined that this system can run desired Vulkan API version " << desiredVersionString << std::endl; + std::cout << "Determined that this system can run desired Vulkan API version " << desiredVersionString + << std::endl; } /* VULKAN_KEY_END */ } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (std::runtime_error& err) + catch ( std::runtime_error & err ) { std::cout << "std::runtime_error: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/CopyBlitImage/CopyBlitImage.cpp b/samples/CopyBlitImage/CopyBlitImage.cpp index e6d328f..e1441e9 100644 --- a/samples/CopyBlitImage/CopyBlitImage.cpp +++ b/samples/CopyBlitImage/CopyBlitImage.cpp @@ -20,156 +20,236 @@ #include -static char const* AppName = "CopyBlitImage"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "CopyBlitImage"; +static char const * EngineName = "Vulkan.hpp"; -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { - vk::UniqueInstance instance = vk::su::createInstance(AppName, EngineName, {}, vk::su::getInstanceExtensions()); -#if !defined(NDEBUG) - vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger(instance); + vk::UniqueInstance instance = vk::su::createInstance( AppName, EngineName, {}, vk::su::getInstanceExtensions() ); +#if !defined( NDEBUG ) + vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger( instance ); #endif vk::PhysicalDevice physicalDevice = instance->enumeratePhysicalDevices().front(); - vk::su::SurfaceData surfaceData(instance, AppName, vk::Extent2D(640, 640)); + vk::su::SurfaceData surfaceData( instance, AppName, vk::Extent2D( 640, 640 ) ); - vk::SurfaceCapabilitiesKHR surfaceCapabilities = physicalDevice.getSurfaceCapabilitiesKHR(surfaceData.surface.get()); - if (!(surfaceCapabilities.supportedUsageFlags & vk::ImageUsageFlagBits::eTransferDst)) + vk::SurfaceCapabilitiesKHR surfaceCapabilities = + physicalDevice.getSurfaceCapabilitiesKHR( surfaceData.surface.get() ); + if ( !( surfaceCapabilities.supportedUsageFlags & vk::ImageUsageFlagBits::eTransferDst ) ) { std::cout << "Surface cannot be destination of blit - abort \n"; - exit(-1); + exit( -1 ); } - std::pair graphicsAndPresentQueueFamilyIndex = vk::su::findGraphicsAndPresentQueueFamilyIndex(physicalDevice, *surfaceData.surface); - vk::UniqueDevice device = vk::su::createDevice(physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions()); + std::pair graphicsAndPresentQueueFamilyIndex = + vk::su::findGraphicsAndPresentQueueFamilyIndex( physicalDevice, *surfaceData.surface ); + vk::UniqueDevice device = + vk::su::createDevice( physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions() ); - vk::UniqueCommandPool commandPool = vk::su::createCommandPool(device, graphicsAndPresentQueueFamilyIndex.first); - vk::UniqueCommandBuffer commandBuffer = std::move(device->allocateCommandBuffersUnique(vk::CommandBufferAllocateInfo(commandPool.get(), vk::CommandBufferLevel::ePrimary, 1)).front()); + vk::UniqueCommandPool commandPool = vk::su::createCommandPool( device, graphicsAndPresentQueueFamilyIndex.first ); + vk::UniqueCommandBuffer commandBuffer = std::move( device + ->allocateCommandBuffersUnique( vk::CommandBufferAllocateInfo( + commandPool.get(), vk::CommandBufferLevel::ePrimary, 1 ) ) + .front() ); - vk::Queue graphicsQueue = device->getQueue(graphicsAndPresentQueueFamilyIndex.first, 0); - vk::Queue presentQueue = device->getQueue(graphicsAndPresentQueueFamilyIndex.second, 0); + vk::Queue graphicsQueue = device->getQueue( graphicsAndPresentQueueFamilyIndex.first, 0 ); + vk::Queue presentQueue = device->getQueue( graphicsAndPresentQueueFamilyIndex.second, 0 ); - vk::su::SwapChainData swapChainData(physicalDevice, device, *surfaceData.surface, surfaceData.extent, vk::ImageUsageFlagBits::eColorAttachment | vk::ImageUsageFlagBits::eTransferDst, - vk::UniqueSwapchainKHR(), graphicsAndPresentQueueFamilyIndex.first, graphicsAndPresentQueueFamilyIndex.second); + vk::su::SwapChainData swapChainData( physicalDevice, + device, + *surfaceData.surface, + surfaceData.extent, + vk::ImageUsageFlagBits::eColorAttachment | + vk::ImageUsageFlagBits::eTransferDst, + vk::UniqueSwapchainKHR(), + graphicsAndPresentQueueFamilyIndex.first, + graphicsAndPresentQueueFamilyIndex.second ); /* VULKAN_KEY_START */ - vk::FormatProperties formatProperties = physicalDevice.getFormatProperties(swapChainData.colorFormat); - assert((formatProperties.linearTilingFeatures & vk::FormatFeatureFlagBits::eBlitSrc) && "Format cannot be used as transfer source"); + vk::FormatProperties formatProperties = physicalDevice.getFormatProperties( swapChainData.colorFormat ); + assert( ( formatProperties.linearTilingFeatures & vk::FormatFeatureFlagBits::eBlitSrc ) && + "Format cannot be used as transfer source" ); - vk::UniqueSemaphore imageAcquiredSemaphore = device->createSemaphoreUnique(vk::SemaphoreCreateInfo()); + vk::UniqueSemaphore imageAcquiredSemaphore = device->createSemaphoreUnique( vk::SemaphoreCreateInfo() ); // Get the index of the next available swapchain image: - vk::ResultValue nextImage = device->acquireNextImageKHR(swapChainData.swapChain.get(), vk::su::FenceTimeout, imageAcquiredSemaphore.get(), nullptr); - assert(nextImage.result == vk::Result::eSuccess); - assert(nextImage.value < swapChainData.images.size()); + vk::ResultValue nextImage = device->acquireNextImageKHR( + swapChainData.swapChain.get(), vk::su::FenceTimeout, imageAcquiredSemaphore.get(), nullptr ); + assert( nextImage.result == vk::Result::eSuccess ); + assert( nextImage.value < swapChainData.images.size() ); uint32_t currentBuffer = nextImage.value; - commandBuffer->begin(vk::CommandBufferBeginInfo()); - vk::su::setImageLayout(commandBuffer, swapChainData.images[currentBuffer], swapChainData.colorFormat, vk::ImageLayout::eUndefined, vk::ImageLayout::eTransferDstOptimal); + commandBuffer->begin( vk::CommandBufferBeginInfo() ); + vk::su::setImageLayout( commandBuffer, + swapChainData.images[currentBuffer], + swapChainData.colorFormat, + vk::ImageLayout::eUndefined, + vk::ImageLayout::eTransferDstOptimal ); // Create an image, map it, and write some values to the image - vk::ImageCreateInfo imageCreateInfo(vk::ImageCreateFlags(), vk::ImageType::e2D, swapChainData.colorFormat, vk::Extent3D(surfaceData.extent, 1), 1, 1, vk::SampleCountFlagBits::e1, vk::ImageTiling::eLinear, vk::ImageUsageFlagBits::eTransferSrc); - vk::UniqueImage blitSourceImage = device->createImageUnique(imageCreateInfo); + vk::ImageCreateInfo imageCreateInfo( vk::ImageCreateFlags(), + vk::ImageType::e2D, + swapChainData.colorFormat, + vk::Extent3D( surfaceData.extent, 1 ), + 1, + 1, + vk::SampleCountFlagBits::e1, + vk::ImageTiling::eLinear, + vk::ImageUsageFlagBits::eTransferSrc ); + vk::UniqueImage blitSourceImage = device->createImageUnique( imageCreateInfo ); - vk::PhysicalDeviceMemoryProperties memoryProperties = physicalDevice.getMemoryProperties(); - vk::MemoryRequirements memoryRequirements = device->getImageMemoryRequirements(blitSourceImage.get()); - uint32_t memoryTypeIndex = vk::su::findMemoryType(memoryProperties, memoryRequirements.memoryTypeBits, vk::MemoryPropertyFlagBits::eHostVisible); + vk::PhysicalDeviceMemoryProperties memoryProperties = physicalDevice.getMemoryProperties(); + vk::MemoryRequirements memoryRequirements = device->getImageMemoryRequirements( blitSourceImage.get() ); + uint32_t memoryTypeIndex = vk::su::findMemoryType( + memoryProperties, memoryRequirements.memoryTypeBits, vk::MemoryPropertyFlagBits::eHostVisible ); - vk::UniqueDeviceMemory deviceMemory = device->allocateMemoryUnique(vk::MemoryAllocateInfo(memoryRequirements.size, memoryTypeIndex)); - device->bindImageMemory(blitSourceImage.get(), deviceMemory.get(), 0); + vk::UniqueDeviceMemory deviceMemory = + device->allocateMemoryUnique( vk::MemoryAllocateInfo( memoryRequirements.size, memoryTypeIndex ) ); + device->bindImageMemory( blitSourceImage.get(), deviceMemory.get(), 0 ); - vk::su::setImageLayout(commandBuffer, blitSourceImage.get(), swapChainData.colorFormat, vk::ImageLayout::eUndefined, vk::ImageLayout::eGeneral); + vk::su::setImageLayout( commandBuffer, + blitSourceImage.get(), + swapChainData.colorFormat, + vk::ImageLayout::eUndefined, + vk::ImageLayout::eGeneral ); commandBuffer->end(); /* Queue the command buffer for execution */ - vk::UniqueFence commandFence = device->createFenceUnique({}); - vk::PipelineStageFlags pipeStageFlags(vk::PipelineStageFlagBits::eColorAttachmentOutput); - graphicsQueue.submit(vk::SubmitInfo(1, &imageAcquiredSemaphore.get(), &pipeStageFlags, 1, &commandBuffer.get()), commandFence.get()); + vk::UniqueFence commandFence = device->createFenceUnique( {} ); + vk::PipelineStageFlags pipeStageFlags( vk::PipelineStageFlagBits::eColorAttachmentOutput ); + graphicsQueue.submit( vk::SubmitInfo( 1, &imageAcquiredSemaphore.get(), &pipeStageFlags, 1, &commandBuffer.get() ), + commandFence.get() ); /* Make sure command buffer is finished before mapping */ - while (device->waitForFences(commandFence.get(), true, vk::su::FenceTimeout) == vk::Result::eTimeout) + while ( device->waitForFences( commandFence.get(), true, vk::su::FenceTimeout ) == vk::Result::eTimeout ) ; - unsigned char* pImageMemory = static_cast(device->mapMemory(deviceMemory.get(), 0, memoryRequirements.size)); + unsigned char * pImageMemory = + static_cast( device->mapMemory( deviceMemory.get(), 0, memoryRequirements.size ) ); // Checkerboard of 8x8 pixel squares - for (uint32_t row = 0; row < surfaceData.extent.height; row++) + for ( uint32_t row = 0; row < surfaceData.extent.height; row++ ) { - for (uint32_t col = 0; col < surfaceData.extent.width; col++) + for ( uint32_t col = 0; col < surfaceData.extent.width; col++ ) { - unsigned char rgb = (((row & 0x8) == 0) ^ ((col & 0x8) == 0)) * 255; - pImageMemory[0] = rgb; - pImageMemory[1] = rgb; - pImageMemory[2] = rgb; - pImageMemory[3] = 255; + unsigned char rgb = ( ( ( row & 0x8 ) == 0 ) ^ ( ( col & 0x8 ) == 0 ) ) * 255; + pImageMemory[0] = rgb; + pImageMemory[1] = rgb; + pImageMemory[2] = rgb; + pImageMemory[3] = 255; pImageMemory += 4; } } // Flush the mapped memory and then unmap it. Assume it isn't coherent since we didn't really confirm - device->flushMappedMemoryRanges(vk::MappedMemoryRange(deviceMemory.get(), 0, memoryRequirements.size)); - device->unmapMemory(deviceMemory.get()); + device->flushMappedMemoryRanges( vk::MappedMemoryRange( deviceMemory.get(), 0, memoryRequirements.size ) ); + device->unmapMemory( deviceMemory.get() ); - commandBuffer->reset({}); - commandBuffer->begin(vk::CommandBufferBeginInfo()); + commandBuffer->reset( {} ); + commandBuffer->begin( vk::CommandBufferBeginInfo() ); // Intend to blit from this image, set the layout accordingly - vk::su::setImageLayout(commandBuffer, blitSourceImage.get(), swapChainData.colorFormat, vk::ImageLayout::eGeneral, vk::ImageLayout::eTransferSrcOptimal); + vk::su::setImageLayout( commandBuffer, + blitSourceImage.get(), + swapChainData.colorFormat, + vk::ImageLayout::eGeneral, + vk::ImageLayout::eTransferSrcOptimal ); vk::Image blitDestinationImage = swapChainData.images[currentBuffer]; // Do a 32x32 blit to all of the dst image - should get big squares - vk::ImageSubresourceLayers imageSubresourceLayers(vk::ImageAspectFlagBits::eColor, 0, 0, 1); - vk::ImageBlit imageBlit(imageSubresourceLayers, { { vk::Offset3D(0, 0, 0), vk::Offset3D(32, 32, 1) } }, imageSubresourceLayers, { { vk::Offset3D(0, 0, 0), vk::Offset3D(surfaceData.extent.width, surfaceData.extent.height, 1) } }); - commandBuffer->blitImage(blitSourceImage.get(), vk::ImageLayout::eTransferSrcOptimal, blitDestinationImage, vk::ImageLayout::eTransferDstOptimal, imageBlit, vk::Filter::eLinear); + vk::ImageSubresourceLayers imageSubresourceLayers( vk::ImageAspectFlagBits::eColor, 0, 0, 1 ); + vk::ImageBlit imageBlit( + imageSubresourceLayers, + { { vk::Offset3D( 0, 0, 0 ), vk::Offset3D( 32, 32, 1 ) } }, + imageSubresourceLayers, + { { vk::Offset3D( 0, 0, 0 ), vk::Offset3D( surfaceData.extent.width, surfaceData.extent.height, 1 ) } } ); + commandBuffer->blitImage( blitSourceImage.get(), + vk::ImageLayout::eTransferSrcOptimal, + blitDestinationImage, + vk::ImageLayout::eTransferDstOptimal, + imageBlit, + vk::Filter::eLinear ); // Use a barrier to make sure the blit is finished before the copy starts - vk::ImageMemoryBarrier memoryBarrier(vk::AccessFlagBits::eTransferWrite, vk::AccessFlagBits::eMemoryRead, vk::ImageLayout::eTransferDstOptimal, vk::ImageLayout::eTransferDstOptimal, - VK_QUEUE_FAMILY_IGNORED, VK_QUEUE_FAMILY_IGNORED, blitDestinationImage, vk::ImageSubresourceRange(vk::ImageAspectFlagBits::eColor, 0, 1, 0, 1)); - commandBuffer->pipelineBarrier(vk::PipelineStageFlagBits::eTransfer, vk::PipelineStageFlagBits::eTransfer, vk::DependencyFlags(), nullptr, nullptr, memoryBarrier); + vk::ImageMemoryBarrier memoryBarrier( vk::AccessFlagBits::eTransferWrite, + vk::AccessFlagBits::eMemoryRead, + vk::ImageLayout::eTransferDstOptimal, + vk::ImageLayout::eTransferDstOptimal, + VK_QUEUE_FAMILY_IGNORED, + VK_QUEUE_FAMILY_IGNORED, + blitDestinationImage, + vk::ImageSubresourceRange( vk::ImageAspectFlagBits::eColor, 0, 1, 0, 1 ) ); + commandBuffer->pipelineBarrier( vk::PipelineStageFlagBits::eTransfer, + vk::PipelineStageFlagBits::eTransfer, + vk::DependencyFlags(), + nullptr, + nullptr, + memoryBarrier ); // Do a image copy to part of the dst image - checks should stay small - vk::ImageCopy imageCopy(imageSubresourceLayers, vk::Offset3D(), imageSubresourceLayers, vk::Offset3D(256, 256, 0), vk::Extent3D(128, 128, 1)); - commandBuffer->copyImage(blitSourceImage.get(), vk::ImageLayout::eTransferSrcOptimal, blitDestinationImage, vk::ImageLayout::eTransferDstOptimal, imageCopy); + vk::ImageCopy imageCopy( imageSubresourceLayers, + vk::Offset3D(), + imageSubresourceLayers, + vk::Offset3D( 256, 256, 0 ), + vk::Extent3D( 128, 128, 1 ) ); + commandBuffer->copyImage( blitSourceImage.get(), + vk::ImageLayout::eTransferSrcOptimal, + blitDestinationImage, + vk::ImageLayout::eTransferDstOptimal, + imageCopy ); - vk::ImageMemoryBarrier prePresentBarrier(vk::AccessFlagBits::eTransferWrite, vk::AccessFlagBits::eMemoryRead, vk::ImageLayout::eTransferDstOptimal, vk::ImageLayout::ePresentSrcKHR, - VK_QUEUE_FAMILY_IGNORED, VK_QUEUE_FAMILY_IGNORED, swapChainData.images[currentBuffer], vk::ImageSubresourceRange(vk::ImageAspectFlagBits::eColor, 0, 1, 0, 1)); - commandBuffer->pipelineBarrier(vk::PipelineStageFlagBits::eTransfer, vk::PipelineStageFlagBits::eTopOfPipe, vk::DependencyFlags(), nullptr, nullptr, prePresentBarrier); + vk::ImageMemoryBarrier prePresentBarrier( + vk::AccessFlagBits::eTransferWrite, + vk::AccessFlagBits::eMemoryRead, + vk::ImageLayout::eTransferDstOptimal, + vk::ImageLayout::ePresentSrcKHR, + VK_QUEUE_FAMILY_IGNORED, + VK_QUEUE_FAMILY_IGNORED, + swapChainData.images[currentBuffer], + vk::ImageSubresourceRange( vk::ImageAspectFlagBits::eColor, 0, 1, 0, 1 ) ); + commandBuffer->pipelineBarrier( vk::PipelineStageFlagBits::eTransfer, + vk::PipelineStageFlagBits::eTopOfPipe, + vk::DependencyFlags(), + nullptr, + nullptr, + prePresentBarrier ); commandBuffer->end(); - vk::UniqueFence drawFence = device->createFenceUnique({}); - graphicsQueue.submit(vk::SubmitInfo(0, nullptr, nullptr, 1, &commandBuffer.get()), drawFence.get()); + vk::UniqueFence drawFence = device->createFenceUnique( {} ); + graphicsQueue.submit( vk::SubmitInfo( 0, nullptr, nullptr, 1, &commandBuffer.get() ), drawFence.get() ); graphicsQueue.waitIdle(); /* Make sure command buffer is finished before presenting */ - while (device->waitForFences(drawFence.get(), true, vk::su::FenceTimeout) == vk::Result::eTimeout) + while ( device->waitForFences( drawFence.get(), true, vk::su::FenceTimeout ) == vk::Result::eTimeout ) ; /* Now present the image in the window */ - presentQueue.presentKHR(vk::PresentInfoKHR(0, nullptr, 1, &swapChainData.swapChain.get(), ¤tBuffer, nullptr)); - std::this_thread::sleep_for(std::chrono::milliseconds(1000)); + presentQueue.presentKHR( + vk::PresentInfoKHR( 0, nullptr, 1, &swapChainData.swapChain.get(), ¤tBuffer, nullptr ) ); + std::this_thread::sleep_for( std::chrono::milliseconds( 1000 ) ); /* VULKAN_KEY_END */ } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (std::runtime_error& err) + catch ( std::runtime_error & err ) { std::cout << "std::runtime_error: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/CreateDebugUtilsMessenger/CreateDebugUtilsMessenger.cpp b/samples/CreateDebugUtilsMessenger/CreateDebugUtilsMessenger.cpp index e623c4c..a9507fb 100644 --- a/samples/CreateDebugUtilsMessenger/CreateDebugUtilsMessenger.cpp +++ b/samples/CreateDebugUtilsMessenger/CreateDebugUtilsMessenger.cpp @@ -16,68 +16,89 @@ // Draw a cube #include "vulkan/vulkan.hpp" + #include #include -static char const* AppName = "CreateDebugReportMessenger"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "CreateDebugReportMessenger"; +static char const * EngineName = "Vulkan.hpp"; -PFN_vkCreateDebugUtilsMessengerEXT pfnVkCreateDebugUtilsMessengerEXT; +PFN_vkCreateDebugUtilsMessengerEXT pfnVkCreateDebugUtilsMessengerEXT; PFN_vkDestroyDebugUtilsMessengerEXT pfnVkDestroyDebugUtilsMessengerEXT; -VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugUtilsMessengerEXT(VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugUtilsMessengerEXT* pMessenger) +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugUtilsMessengerEXT( VkInstance instance, + const VkDebugUtilsMessengerCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDebugUtilsMessengerEXT * pMessenger ) { - return pfnVkCreateDebugUtilsMessengerEXT(instance, pCreateInfo, pAllocator, pMessenger); + return pfnVkCreateDebugUtilsMessengerEXT( instance, pCreateInfo, pAllocator, pMessenger ); } -VKAPI_ATTR void VKAPI_CALL vkDestroyDebugUtilsMessengerEXT(VkInstance instance, VkDebugUtilsMessengerEXT messenger, VkAllocationCallbacks const * pAllocator) +VKAPI_ATTR void VKAPI_CALL vkDestroyDebugUtilsMessengerEXT( VkInstance instance, + VkDebugUtilsMessengerEXT messenger, + VkAllocationCallbacks const * pAllocator ) { - return pfnVkDestroyDebugUtilsMessengerEXT(instance, messenger, pAllocator); + return pfnVkDestroyDebugUtilsMessengerEXT( instance, messenger, pAllocator ); } - -VKAPI_ATTR VkBool32 VKAPI_CALL debugMessageFunc(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, - VkDebugUtilsMessengerCallbackDataEXT const * pCallbackData, void * /*pUserData*/) +VKAPI_ATTR VkBool32 VKAPI_CALL debugMessageFunc( VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VkDebugUtilsMessageTypeFlagsEXT messageTypes, + VkDebugUtilsMessengerCallbackDataEXT const * pCallbackData, + void * /*pUserData*/ ) { std::ostringstream message; - message << vk::to_string(static_cast(messageSeverity)) << ": " << vk::to_string(static_cast(messageTypes)) << ":\n"; - message << "\t" << "messageIDName = <" << pCallbackData->pMessageIdName << ">\n"; - message << "\t" << "messageIdNumber = " << pCallbackData->messageIdNumber << "\n"; - message << "\t" << "message = <" << pCallbackData->pMessage << ">\n"; - if (0 < pCallbackData->queueLabelCount) + message << vk::to_string( static_cast( messageSeverity ) ) << ": " + << vk::to_string( static_cast( messageTypes ) ) << ":\n"; + message << "\t" + << "messageIDName = <" << pCallbackData->pMessageIdName << ">\n"; + message << "\t" + << "messageIdNumber = " << pCallbackData->messageIdNumber << "\n"; + message << "\t" + << "message = <" << pCallbackData->pMessage << ">\n"; + if ( 0 < pCallbackData->queueLabelCount ) { - message << "\t" << "Queue Labels:\n"; - for (uint8_t i = 0; i < pCallbackData->queueLabelCount; i++) + message << "\t" + << "Queue Labels:\n"; + for ( uint8_t i = 0; i < pCallbackData->queueLabelCount; i++ ) { - message << "\t\t" << "labelName = <" << pCallbackData->pQueueLabels[i].pLabelName << ">\n"; + message << "\t\t" + << "labelName = <" << pCallbackData->pQueueLabels[i].pLabelName << ">\n"; } } - if (0 < pCallbackData->cmdBufLabelCount) + if ( 0 < pCallbackData->cmdBufLabelCount ) { - message << "\t" << "CommandBuffer Labels:\n"; - for (uint8_t i = 0; i < pCallbackData->cmdBufLabelCount; i++) + message << "\t" + << "CommandBuffer Labels:\n"; + for ( uint8_t i = 0; i < pCallbackData->cmdBufLabelCount; i++ ) { - message << "\t\t" << "labelName = <" << pCallbackData->pCmdBufLabels[i].pLabelName << ">\n"; + message << "\t\t" + << "labelName = <" << pCallbackData->pCmdBufLabels[i].pLabelName << ">\n"; } } - if (0 < pCallbackData->objectCount) + if ( 0 < pCallbackData->objectCount ) { - message << "\t" << "Objects:\n"; - for (uint8_t i = 0; i < pCallbackData->objectCount; i++) + message << "\t" + << "Objects:\n"; + for ( uint8_t i = 0; i < pCallbackData->objectCount; i++ ) { - message << "\t\t" << "Object " << i << "\n"; - message << "\t\t\t" << "objectType = " << vk::to_string(static_cast(pCallbackData->pObjects[i].objectType)) << "\n"; - message << "\t\t\t" << "objectHandle = " << pCallbackData->pObjects[i].objectHandle << "\n"; - if (pCallbackData->pObjects[i].pObjectName) + message << "\t\t" + << "Object " << i << "\n"; + message << "\t\t\t" + << "objectType = " + << vk::to_string( static_cast( pCallbackData->pObjects[i].objectType ) ) << "\n"; + message << "\t\t\t" + << "objectHandle = " << pCallbackData->pObjects[i].objectHandle << "\n"; + if ( pCallbackData->pObjects[i].pObjectName ) { - message << "\t\t\t" << "objectName = <" << pCallbackData->pObjects[i].pObjectName << ">\n"; + message << "\t\t\t" + << "objectName = <" << pCallbackData->pObjects[i].pObjectName << ">\n"; } } } #ifdef _WIN32 - MessageBox(NULL, message.str().c_str(), "Alert", MB_OK); + MessageBox( NULL, message.str().c_str(), "Alert", MB_OK ); #else std::cout << message.str() << std::endl; #endif @@ -85,8 +106,7 @@ VKAPI_ATTR VkBool32 VKAPI_CALL debugMessageFunc(VkDebugUtilsMessageSeverityFlagB return false; } - -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { @@ -94,51 +114,61 @@ int main(int /*argc*/, char ** /*argv*/) std::vector props = vk::enumerateInstanceExtensionProperties(); - auto propsIterator = std::find_if(props.begin(), props.end(), [](vk::ExtensionProperties const& ep) { return strcmp(ep.extensionName, VK_EXT_DEBUG_UTILS_EXTENSION_NAME) == 0; }); - if (propsIterator == props.end()) + auto propsIterator = std::find_if( props.begin(), props.end(), []( vk::ExtensionProperties const & ep ) { + return strcmp( ep.extensionName, VK_EXT_DEBUG_UTILS_EXTENSION_NAME ) == 0; + } ); + if ( propsIterator == props.end() ) { - std::cout << "Something went very wrong, cannot find " << VK_EXT_DEBUG_UTILS_EXTENSION_NAME << " extension" << std::endl; - exit(1); + std::cout << "Something went very wrong, cannot find " << VK_EXT_DEBUG_UTILS_EXTENSION_NAME << " extension" + << std::endl; + exit( 1 ); } - vk::ApplicationInfo applicationInfo(AppName, 1, EngineName, 1, VK_API_VERSION_1_1); - const char *extensionName = VK_EXT_DEBUG_UTILS_EXTENSION_NAME; - vk::UniqueInstance instance = vk::createInstanceUnique(vk::InstanceCreateInfo(vk::InstanceCreateFlags(), &applicationInfo, 0, nullptr, 1, &extensionName)); + vk::ApplicationInfo applicationInfo( AppName, 1, EngineName, 1, VK_API_VERSION_1_1 ); + const char * extensionName = VK_EXT_DEBUG_UTILS_EXTENSION_NAME; + vk::UniqueInstance instance = vk::createInstanceUnique( + vk::InstanceCreateInfo( vk::InstanceCreateFlags(), &applicationInfo, 0, nullptr, 1, &extensionName ) ); - pfnVkCreateDebugUtilsMessengerEXT = reinterpret_cast(instance->getProcAddr("vkCreateDebugUtilsMessengerEXT")); - if (!pfnVkCreateDebugUtilsMessengerEXT) + pfnVkCreateDebugUtilsMessengerEXT = + reinterpret_cast( instance->getProcAddr( "vkCreateDebugUtilsMessengerEXT" ) ); + if ( !pfnVkCreateDebugUtilsMessengerEXT ) { std::cout << "GetInstanceProcAddr: Unable to find pfnVkCreateDebugUtilsMessengerEXT function." << std::endl; - exit(1); + exit( 1 ); } - pfnVkDestroyDebugUtilsMessengerEXT = reinterpret_cast(instance->getProcAddr("vkDestroyDebugUtilsMessengerEXT")); - if (!pfnVkDestroyDebugUtilsMessengerEXT) + pfnVkDestroyDebugUtilsMessengerEXT = reinterpret_cast( + instance->getProcAddr( "vkDestroyDebugUtilsMessengerEXT" ) ); + if ( !pfnVkDestroyDebugUtilsMessengerEXT ) { std::cout << "GetInstanceProcAddr: Unable to find pfnVkDestroyDebugUtilsMessengerEXT function." << std::endl; - exit(1); + exit( 1 ); } - vk::DebugUtilsMessageSeverityFlagsEXT severityFlags(vk::DebugUtilsMessageSeverityFlagBitsEXT::eWarning | vk::DebugUtilsMessageSeverityFlagBitsEXT::eError); - vk::DebugUtilsMessageTypeFlagsEXT messageTypeFlags(vk::DebugUtilsMessageTypeFlagBitsEXT::eGeneral | vk::DebugUtilsMessageTypeFlagBitsEXT::ePerformance | vk::DebugUtilsMessageTypeFlagBitsEXT::eValidation); - vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = instance->createDebugUtilsMessengerEXTUnique(vk::DebugUtilsMessengerCreateInfoEXT({}, severityFlags, messageTypeFlags, &debugMessageFunc)); + vk::DebugUtilsMessageSeverityFlagsEXT severityFlags( vk::DebugUtilsMessageSeverityFlagBitsEXT::eWarning | + vk::DebugUtilsMessageSeverityFlagBitsEXT::eError ); + vk::DebugUtilsMessageTypeFlagsEXT messageTypeFlags( vk::DebugUtilsMessageTypeFlagBitsEXT::eGeneral | + vk::DebugUtilsMessageTypeFlagBitsEXT::ePerformance | + vk::DebugUtilsMessageTypeFlagBitsEXT::eValidation ); + vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = instance->createDebugUtilsMessengerEXTUnique( + vk::DebugUtilsMessengerCreateInfoEXT( {}, severityFlags, messageTypeFlags, &debugMessageFunc ) ); /* VULKAN_KEY_END */ } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (std::runtime_error& err) + catch ( std::runtime_error & err ) { std::cout << "std::runtime_error: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/DrawTexturedCube/DrawTexturedCube.cpp b/samples/DrawTexturedCube/DrawTexturedCube.cpp index 0cd14c2..02812e4 100644 --- a/samples/DrawTexturedCube/DrawTexturedCube.cpp +++ b/samples/DrawTexturedCube/DrawTexturedCube.cpp @@ -19,130 +19,186 @@ #include "../utils/math.hpp" #include "../utils/shaders.hpp" #include "../utils/utils.hpp" -#include "vulkan/vulkan.hpp" #include "SPIRV/GlslangToSpv.h" +#include "vulkan/vulkan.hpp" + #include #include -static char const* AppName = "DrawTexturedCube"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "DrawTexturedCube"; +static char const * EngineName = "Vulkan.hpp"; -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { - vk::UniqueInstance instance = vk::su::createInstance(AppName, EngineName, {}, vk::su::getInstanceExtensions()); -#if !defined(NDEBUG) - vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger(instance); + vk::UniqueInstance instance = vk::su::createInstance( AppName, EngineName, {}, vk::su::getInstanceExtensions() ); +#if !defined( NDEBUG ) + vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger( instance ); #endif vk::PhysicalDevice physicalDevice = instance->enumeratePhysicalDevices().front(); - vk::su::SurfaceData surfaceData(instance, AppName, vk::Extent2D(500, 500)); + vk::su::SurfaceData surfaceData( instance, AppName, vk::Extent2D( 500, 500 ) ); - std::pair graphicsAndPresentQueueFamilyIndex = vk::su::findGraphicsAndPresentQueueFamilyIndex(physicalDevice, *surfaceData.surface); - vk::UniqueDevice device = vk::su::createDevice(physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions()); + std::pair graphicsAndPresentQueueFamilyIndex = + vk::su::findGraphicsAndPresentQueueFamilyIndex( physicalDevice, *surfaceData.surface ); + vk::UniqueDevice device = + vk::su::createDevice( physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions() ); - vk::UniqueCommandPool commandPool = vk::su::createCommandPool(device, graphicsAndPresentQueueFamilyIndex.first); - vk::UniqueCommandBuffer commandBuffer = std::move(device->allocateCommandBuffersUnique(vk::CommandBufferAllocateInfo(commandPool.get(), vk::CommandBufferLevel::ePrimary, 1)).front()); + vk::UniqueCommandPool commandPool = vk::su::createCommandPool( device, graphicsAndPresentQueueFamilyIndex.first ); + vk::UniqueCommandBuffer commandBuffer = std::move( device + ->allocateCommandBuffersUnique( vk::CommandBufferAllocateInfo( + commandPool.get(), vk::CommandBufferLevel::ePrimary, 1 ) ) + .front() ); - vk::Queue graphicsQueue = device->getQueue(graphicsAndPresentQueueFamilyIndex.first, 0); - vk::Queue presentQueue = device->getQueue(graphicsAndPresentQueueFamilyIndex.second, 0); + vk::Queue graphicsQueue = device->getQueue( graphicsAndPresentQueueFamilyIndex.first, 0 ); + vk::Queue presentQueue = device->getQueue( graphicsAndPresentQueueFamilyIndex.second, 0 ); - vk::su::SwapChainData swapChainData(physicalDevice, device, *surfaceData.surface, surfaceData.extent, vk::ImageUsageFlagBits::eColorAttachment | vk::ImageUsageFlagBits::eTransferSrc, - vk::UniqueSwapchainKHR(), graphicsAndPresentQueueFamilyIndex.first, graphicsAndPresentQueueFamilyIndex.second); + vk::su::SwapChainData swapChainData( physicalDevice, + device, + *surfaceData.surface, + surfaceData.extent, + vk::ImageUsageFlagBits::eColorAttachment | + vk::ImageUsageFlagBits::eTransferSrc, + vk::UniqueSwapchainKHR(), + graphicsAndPresentQueueFamilyIndex.first, + graphicsAndPresentQueueFamilyIndex.second ); - vk::su::DepthBufferData depthBufferData(physicalDevice, device, vk::Format::eD16Unorm, surfaceData.extent); + vk::su::DepthBufferData depthBufferData( physicalDevice, device, vk::Format::eD16Unorm, surfaceData.extent ); - vk::su::TextureData textureData(physicalDevice, device); + vk::su::TextureData textureData( physicalDevice, device ); - commandBuffer->begin(vk::CommandBufferBeginInfo()); - textureData.setImage(device, commandBuffer, vk::su::CheckerboardImageGenerator()); + commandBuffer->begin( vk::CommandBufferBeginInfo() ); + textureData.setImage( device, commandBuffer, vk::su::CheckerboardImageGenerator() ); - vk::su::BufferData uniformBufferData(physicalDevice, device, sizeof(glm::mat4x4), vk::BufferUsageFlagBits::eUniformBuffer); - vk::su::copyToDevice(device, uniformBufferData.deviceMemory, vk::su::createModelViewProjectionClipMatrix(surfaceData.extent)); + vk::su::BufferData uniformBufferData( + physicalDevice, device, sizeof( glm::mat4x4 ), vk::BufferUsageFlagBits::eUniformBuffer ); + vk::su::copyToDevice( + device, uniformBufferData.deviceMemory, vk::su::createModelViewProjectionClipMatrix( surfaceData.extent ) ); - vk::UniqueDescriptorSetLayout descriptorSetLayout = vk::su::createDescriptorSetLayout(device, - { {vk::DescriptorType::eUniformBuffer, 1, vk::ShaderStageFlagBits::eVertex}, {vk::DescriptorType::eCombinedImageSampler, 1, vk::ShaderStageFlagBits::eFragment} }); - vk::UniquePipelineLayout pipelineLayout = device->createPipelineLayoutUnique(vk::PipelineLayoutCreateInfo(vk::PipelineLayoutCreateFlags(), 1, &descriptorSetLayout.get())); + vk::UniqueDescriptorSetLayout descriptorSetLayout = vk::su::createDescriptorSetLayout( + device, + { { vk::DescriptorType::eUniformBuffer, 1, vk::ShaderStageFlagBits::eVertex }, + { vk::DescriptorType::eCombinedImageSampler, 1, vk::ShaderStageFlagBits::eFragment } } ); + vk::UniquePipelineLayout pipelineLayout = device->createPipelineLayoutUnique( + vk::PipelineLayoutCreateInfo( vk::PipelineLayoutCreateFlags(), 1, &descriptorSetLayout.get() ) ); - vk::UniqueRenderPass renderPass = vk::su::createRenderPass(device, vk::su::pickSurfaceFormat(physicalDevice.getSurfaceFormatsKHR(surfaceData.surface.get())).format, depthBufferData.format); + vk::UniqueRenderPass renderPass = vk::su::createRenderPass( + device, + vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( surfaceData.surface.get() ) ).format, + depthBufferData.format ); glslang::InitializeProcess(); - vk::UniqueShaderModule vertexShaderModule = vk::su::createShaderModule(device, vk::ShaderStageFlagBits::eVertex, vertexShaderText_PT_T); - vk::UniqueShaderModule fragmentShaderModule = vk::su::createShaderModule(device, vk::ShaderStageFlagBits::eFragment, fragmentShaderText_T_C); + vk::UniqueShaderModule vertexShaderModule = + vk::su::createShaderModule( device, vk::ShaderStageFlagBits::eVertex, vertexShaderText_PT_T ); + vk::UniqueShaderModule fragmentShaderModule = + vk::su::createShaderModule( device, vk::ShaderStageFlagBits::eFragment, fragmentShaderText_T_C ); glslang::FinalizeProcess(); - std::vector framebuffers = vk::su::createFramebuffers(device, renderPass, swapChainData.imageViews, depthBufferData.imageView, surfaceData.extent); + std::vector framebuffers = vk::su::createFramebuffers( + device, renderPass, swapChainData.imageViews, depthBufferData.imageView, surfaceData.extent ); - vk::su::BufferData vertexBufferData(physicalDevice, device, sizeof(texturedCubeData), vk::BufferUsageFlagBits::eVertexBuffer); - vk::su::copyToDevice(device, vertexBufferData.deviceMemory, texturedCubeData, sizeof(texturedCubeData) / sizeof(texturedCubeData[0])); + vk::su::BufferData vertexBufferData( + physicalDevice, device, sizeof( texturedCubeData ), vk::BufferUsageFlagBits::eVertexBuffer ); + vk::su::copyToDevice( device, + vertexBufferData.deviceMemory, + texturedCubeData, + sizeof( texturedCubeData ) / sizeof( texturedCubeData[0] ) ); - vk::UniqueDescriptorPool descriptorPool = vk::su::createDescriptorPool(device, { {vk::DescriptorType::eUniformBuffer, 1}, {vk::DescriptorType::eCombinedImageSampler, 1} }); - vk::UniqueDescriptorSet descriptorSet = std::move(device->allocateDescriptorSetsUnique(vk::DescriptorSetAllocateInfo(*descriptorPool, 1, &*descriptorSetLayout)).front()); + vk::UniqueDescriptorPool descriptorPool = vk::su::createDescriptorPool( + device, { { vk::DescriptorType::eUniformBuffer, 1 }, { vk::DescriptorType::eCombinedImageSampler, 1 } } ); + vk::UniqueDescriptorSet descriptorSet = std::move( + device->allocateDescriptorSetsUnique( vk::DescriptorSetAllocateInfo( *descriptorPool, 1, &*descriptorSetLayout ) ) + .front() ); - vk::su::updateDescriptorSets(device, descriptorSet, {{vk::DescriptorType::eUniformBuffer, uniformBufferData.buffer, vk::UniqueBufferView()}}, textureData); + vk::su::updateDescriptorSets( + device, + descriptorSet, + { { vk::DescriptorType::eUniformBuffer, uniformBufferData.buffer, vk::UniqueBufferView() } }, + textureData ); - vk::UniquePipelineCache pipelineCache = device->createPipelineCacheUnique(vk::PipelineCacheCreateInfo()); - vk::UniquePipeline graphicsPipeline = vk::su::createGraphicsPipeline(device, pipelineCache, std::make_pair(*vertexShaderModule, nullptr), std::make_pair(*fragmentShaderModule, nullptr), - sizeof(texturedCubeData[0]), {{ vk::Format::eR32G32B32A32Sfloat, 0 }, { vk::Format::eR32G32Sfloat, 16 }}, - vk::FrontFace::eClockwise, true, pipelineLayout, renderPass); + vk::UniquePipelineCache pipelineCache = device->createPipelineCacheUnique( vk::PipelineCacheCreateInfo() ); + vk::UniquePipeline graphicsPipeline = + vk::su::createGraphicsPipeline( device, + pipelineCache, + std::make_pair( *vertexShaderModule, nullptr ), + std::make_pair( *fragmentShaderModule, nullptr ), + sizeof( texturedCubeData[0] ), + { { vk::Format::eR32G32B32A32Sfloat, 0 }, { vk::Format::eR32G32Sfloat, 16 } }, + vk::FrontFace::eClockwise, + true, + pipelineLayout, + renderPass ); /* VULKAN_KEY_START */ // Get the index of the next available swapchain image: - vk::UniqueSemaphore imageAcquiredSemaphore = device->createSemaphoreUnique(vk::SemaphoreCreateInfo()); - vk::ResultValue currentBuffer = device->acquireNextImageKHR(swapChainData.swapChain.get(), vk::su::FenceTimeout, imageAcquiredSemaphore.get(), nullptr); - assert(currentBuffer.result == vk::Result::eSuccess); - assert(currentBuffer.value < framebuffers.size()); + vk::UniqueSemaphore imageAcquiredSemaphore = device->createSemaphoreUnique( vk::SemaphoreCreateInfo() ); + vk::ResultValue currentBuffer = device->acquireNextImageKHR( + swapChainData.swapChain.get(), vk::su::FenceTimeout, imageAcquiredSemaphore.get(), nullptr ); + assert( currentBuffer.result == vk::Result::eSuccess ); + assert( currentBuffer.value < framebuffers.size() ); // commandBuffer->begin() has already been called above! vk::ClearValue clearValues[2]; - clearValues[0].color = vk::ClearColorValue(std::array({ 0.2f, 0.2f, 0.2f, 0.2f })); - clearValues[1].depthStencil = vk::ClearDepthStencilValue(1.0f, 0); - vk::RenderPassBeginInfo renderPassBeginInfo(renderPass.get(), framebuffers[currentBuffer.value].get(), vk::Rect2D(vk::Offset2D(0, 0), surfaceData.extent), 2, clearValues); - commandBuffer->beginRenderPass(renderPassBeginInfo, vk::SubpassContents::eInline); - commandBuffer->bindPipeline(vk::PipelineBindPoint::eGraphics, graphicsPipeline.get()); - commandBuffer->bindDescriptorSets(vk::PipelineBindPoint::eGraphics, pipelineLayout.get(), 0, descriptorSet.get(), nullptr); + clearValues[0].color = vk::ClearColorValue( std::array( { 0.2f, 0.2f, 0.2f, 0.2f } ) ); + clearValues[1].depthStencil = vk::ClearDepthStencilValue( 1.0f, 0 ); + vk::RenderPassBeginInfo renderPassBeginInfo( renderPass.get(), + framebuffers[currentBuffer.value].get(), + vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ), + 2, + clearValues ); + commandBuffer->beginRenderPass( renderPassBeginInfo, vk::SubpassContents::eInline ); + commandBuffer->bindPipeline( vk::PipelineBindPoint::eGraphics, graphicsPipeline.get() ); + commandBuffer->bindDescriptorSets( + vk::PipelineBindPoint::eGraphics, pipelineLayout.get(), 0, descriptorSet.get(), nullptr ); - commandBuffer->bindVertexBuffers(0, *vertexBufferData.buffer, {0}); - commandBuffer->setViewport(0, vk::Viewport(0.0f, 0.0f, static_cast(surfaceData.extent.width), static_cast(surfaceData.extent.height), 0.0f, 1.0f)); - commandBuffer->setScissor(0, vk::Rect2D(vk::Offset2D(0, 0), surfaceData.extent)); + commandBuffer->bindVertexBuffers( 0, *vertexBufferData.buffer, { 0 } ); + commandBuffer->setViewport( 0, + vk::Viewport( 0.0f, + 0.0f, + static_cast( surfaceData.extent.width ), + static_cast( surfaceData.extent.height ), + 0.0f, + 1.0f ) ); + commandBuffer->setScissor( 0, vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ) ); - commandBuffer->draw(12 * 3, 1, 0, 0); + commandBuffer->draw( 12 * 3, 1, 0, 0 ); commandBuffer->endRenderPass(); commandBuffer->end(); - vk::UniqueFence drawFence = device->createFenceUnique(vk::FenceCreateInfo()); + vk::UniqueFence drawFence = device->createFenceUnique( vk::FenceCreateInfo() ); - vk::PipelineStageFlags waitDestinationStageMask(vk::PipelineStageFlagBits::eColorAttachmentOutput); - vk::SubmitInfo submitInfo(1, &imageAcquiredSemaphore.get(), &waitDestinationStageMask, 1, &commandBuffer.get()); - graphicsQueue.submit(submitInfo, drawFence.get()); + vk::PipelineStageFlags waitDestinationStageMask( vk::PipelineStageFlagBits::eColorAttachmentOutput ); + vk::SubmitInfo submitInfo( 1, &imageAcquiredSemaphore.get(), &waitDestinationStageMask, 1, &commandBuffer.get() ); + graphicsQueue.submit( submitInfo, drawFence.get() ); - while (vk::Result::eTimeout == device->waitForFences(drawFence.get(), VK_TRUE, vk::su::FenceTimeout)) + while ( vk::Result::eTimeout == device->waitForFences( drawFence.get(), VK_TRUE, vk::su::FenceTimeout ) ) ; - presentQueue.presentKHR(vk::PresentInfoKHR(0, nullptr, 1, &swapChainData.swapChain.get(), ¤tBuffer.value)); - std::this_thread::sleep_for(std::chrono::milliseconds(1000)); + presentQueue.presentKHR( + vk::PresentInfoKHR( 0, nullptr, 1, &swapChainData.swapChain.get(), ¤tBuffer.value ) ); + std::this_thread::sleep_for( std::chrono::milliseconds( 1000 ) ); /* VULKAN_KEY_END */ device->waitIdle(); } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (std::runtime_error& err) + catch ( std::runtime_error & err ) { std::cout << "std::runtime_error: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/DynamicUniform/DynamicUniform.cpp b/samples/DynamicUniform/DynamicUniform.cpp index 479febc..c77e427 100644 --- a/samples/DynamicUniform/DynamicUniform.cpp +++ b/samples/DynamicUniform/DynamicUniform.cpp @@ -19,159 +19,231 @@ #include "../utils/math.hpp" #include "../utils/shaders.hpp" #include "../utils/utils.hpp" -#include "vulkan/vulkan.hpp" #include "SPIRV/GlslangToSpv.h" +#include "vulkan/vulkan.hpp" + #include #include -static char const* AppName = "DynamicUniform"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "DynamicUniform"; +static char const * EngineName = "Vulkan.hpp"; -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { - vk::UniqueInstance instance = vk::su::createInstance(AppName, EngineName, {}, vk::su::getInstanceExtensions()); -#if !defined(NDEBUG) - vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger(instance); + vk::UniqueInstance instance = vk::su::createInstance( AppName, EngineName, {}, vk::su::getInstanceExtensions() ); +#if !defined( NDEBUG ) + vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger( instance ); #endif vk::PhysicalDevice physicalDevice = instance->enumeratePhysicalDevices().front(); - vk::su::SurfaceData surfaceData(instance, AppName, vk::Extent2D(500, 500)); + vk::su::SurfaceData surfaceData( instance, AppName, vk::Extent2D( 500, 500 ) ); - std::pair graphicsAndPresentQueueFamilyIndex = vk::su::findGraphicsAndPresentQueueFamilyIndex(physicalDevice, *surfaceData.surface); - vk::UniqueDevice device = vk::su::createDevice(physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions()); + std::pair graphicsAndPresentQueueFamilyIndex = + vk::su::findGraphicsAndPresentQueueFamilyIndex( physicalDevice, *surfaceData.surface ); + vk::UniqueDevice device = + vk::su::createDevice( physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions() ); - vk::UniqueCommandPool commandPool = vk::su::createCommandPool(device, graphicsAndPresentQueueFamilyIndex.first); - vk::UniqueCommandBuffer commandBuffer = std::move(device->allocateCommandBuffersUnique(vk::CommandBufferAllocateInfo(commandPool.get(), vk::CommandBufferLevel::ePrimary, 1)).front()); + vk::UniqueCommandPool commandPool = vk::su::createCommandPool( device, graphicsAndPresentQueueFamilyIndex.first ); + vk::UniqueCommandBuffer commandBuffer = std::move( device + ->allocateCommandBuffersUnique( vk::CommandBufferAllocateInfo( + commandPool.get(), vk::CommandBufferLevel::ePrimary, 1 ) ) + .front() ); - vk::Queue graphicsQueue = device->getQueue(graphicsAndPresentQueueFamilyIndex.first, 0); - vk::Queue presentQueue = device->getQueue(graphicsAndPresentQueueFamilyIndex.second, 0); + vk::Queue graphicsQueue = device->getQueue( graphicsAndPresentQueueFamilyIndex.first, 0 ); + vk::Queue presentQueue = device->getQueue( graphicsAndPresentQueueFamilyIndex.second, 0 ); - vk::su::SwapChainData swapChainData(physicalDevice, device, *surfaceData.surface, surfaceData.extent, vk::ImageUsageFlagBits::eColorAttachment | vk::ImageUsageFlagBits::eTransferSrc, - vk::UniqueSwapchainKHR(), graphicsAndPresentQueueFamilyIndex.first, graphicsAndPresentQueueFamilyIndex.second); + vk::su::SwapChainData swapChainData( physicalDevice, + device, + *surfaceData.surface, + surfaceData.extent, + vk::ImageUsageFlagBits::eColorAttachment | + vk::ImageUsageFlagBits::eTransferSrc, + vk::UniqueSwapchainKHR(), + graphicsAndPresentQueueFamilyIndex.first, + graphicsAndPresentQueueFamilyIndex.second ); - vk::su::DepthBufferData depthBufferData(physicalDevice, device, vk::Format::eD16Unorm, surfaceData.extent); + vk::su::DepthBufferData depthBufferData( physicalDevice, device, vk::Format::eD16Unorm, surfaceData.extent ); - vk::UniqueRenderPass renderPass = vk::su::createRenderPass(device, vk::su::pickSurfaceFormat(physicalDevice.getSurfaceFormatsKHR(surfaceData.surface.get())).format, depthBufferData.format); + vk::UniqueRenderPass renderPass = vk::su::createRenderPass( + device, + vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( surfaceData.surface.get() ) ).format, + depthBufferData.format ); glslang::InitializeProcess(); - vk::UniqueShaderModule vertexShaderModule = vk::su::createShaderModule(device, vk::ShaderStageFlagBits::eVertex, vertexShaderText_PC_C); - vk::UniqueShaderModule fragmentShaderModule = vk::su::createShaderModule(device, vk::ShaderStageFlagBits::eFragment, fragmentShaderText_C_C); + vk::UniqueShaderModule vertexShaderModule = + vk::su::createShaderModule( device, vk::ShaderStageFlagBits::eVertex, vertexShaderText_PC_C ); + vk::UniqueShaderModule fragmentShaderModule = + vk::su::createShaderModule( device, vk::ShaderStageFlagBits::eFragment, fragmentShaderText_C_C ); glslang::FinalizeProcess(); - std::vector framebuffers = vk::su::createFramebuffers(device, renderPass, swapChainData.imageViews, depthBufferData.imageView, surfaceData.extent); + std::vector framebuffers = vk::su::createFramebuffers( + device, renderPass, swapChainData.imageViews, depthBufferData.imageView, surfaceData.extent ); - vk::su::BufferData vertexBufferData(physicalDevice, device, sizeof(coloredCubeData), vk::BufferUsageFlagBits::eVertexBuffer); - vk::su::copyToDevice(device, vertexBufferData.deviceMemory, coloredCubeData, sizeof(coloredCubeData) / sizeof(coloredCubeData[0])); + vk::su::BufferData vertexBufferData( + physicalDevice, device, sizeof( coloredCubeData ), vk::BufferUsageFlagBits::eVertexBuffer ); + vk::su::copyToDevice( device, + vertexBufferData.deviceMemory, + coloredCubeData, + sizeof( coloredCubeData ) / sizeof( coloredCubeData[0] ) ); /* VULKAN_KEY_START */ vk::PhysicalDeviceLimits limits = physicalDevice.getProperties().limits; - if (limits.maxDescriptorSetUniformBuffersDynamic < 1) + if ( limits.maxDescriptorSetUniformBuffersDynamic < 1 ) { std::cout << "No dynamic uniform buffers supported\n"; - exit(-1); + exit( -1 ); } /* Set up uniform buffer with 2 transform matrices in it */ glm::mat4x4 mvpcs[2]; - glm::mat4x4 model = glm::mat4x4(1.0f); - glm::mat4x4 view = glm::lookAt(glm::vec3(0.0f, 3.0f, -10.0f), glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, -1.0f, 0.0f)); - glm::mat4x4 projection = glm::perspective(glm::radians(45.0f), 1.0f, 0.1f, 100.0f); - glm::mat4x4 clip = glm::mat4x4(1.0f, 0.0f, 0.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.5f, 0.0f, 0.0f, 0.0f, 0.5f, 1.0f); // vulkan clip space has inverted y and half z ! - mvpcs[0] = clip * projection * view * model; + glm::mat4x4 model = glm::mat4x4( 1.0f ); + glm::mat4x4 view = + glm::lookAt( glm::vec3( 0.0f, 3.0f, -10.0f ), glm::vec3( 0.0f, 0.0f, 0.0f ), glm::vec3( 0.0f, -1.0f, 0.0f ) ); + glm::mat4x4 projection = glm::perspective( glm::radians( 45.0f ), 1.0f, 0.1f, 100.0f ); + glm::mat4x4 clip = glm::mat4x4( 1.0f, + 0.0f, + 0.0f, + 0.0f, + 0.0f, + -1.0f, + 0.0f, + 0.0f, + 0.0f, + 0.0f, + 0.5f, + 0.0f, + 0.0f, + 0.0f, + 0.5f, + 1.0f ); // vulkan clip space has inverted y and half z ! + mvpcs[0] = clip * projection * view * model; - model = glm::translate(model, glm::vec3(-1.5f, 1.5f, -1.5f)); + model = glm::translate( model, glm::vec3( -1.5f, 1.5f, -1.5f ) ); mvpcs[1] = clip * projection * view * model; - vk::DeviceSize bufferSize = sizeof(glm::mat4x4); - if (limits.minUniformBufferOffsetAlignment) + vk::DeviceSize bufferSize = sizeof( glm::mat4x4 ); + if ( limits.minUniformBufferOffsetAlignment ) { - bufferSize = (bufferSize + limits.minUniformBufferOffsetAlignment - 1) & ~(limits.minUniformBufferOffsetAlignment - 1); + bufferSize = + ( bufferSize + limits.minUniformBufferOffsetAlignment - 1 ) & ~( limits.minUniformBufferOffsetAlignment - 1 ); } - vk::su::BufferData uniformBufferData(physicalDevice, device, 2 * bufferSize, vk::BufferUsageFlagBits::eUniformBuffer); - vk::su::copyToDevice(device, uniformBufferData.deviceMemory, mvpcs, 2, bufferSize); + vk::su::BufferData uniformBufferData( + physicalDevice, device, 2 * bufferSize, vk::BufferUsageFlagBits::eUniformBuffer ); + vk::su::copyToDevice( device, uniformBufferData.deviceMemory, mvpcs, 2, bufferSize ); // create a DescriptorSetLayout with vk::DescriptorType::eUniformBufferDynamic - vk::UniqueDescriptorSetLayout descriptorSetLayout = vk::su::createDescriptorSetLayout(device, { {vk::DescriptorType::eUniformBufferDynamic, 1, vk::ShaderStageFlagBits::eVertex} }); - vk::UniquePipelineLayout pipelineLayout = device->createPipelineLayoutUnique(vk::PipelineLayoutCreateInfo(vk::PipelineLayoutCreateFlags(), 1, &descriptorSetLayout.get())); + vk::UniqueDescriptorSetLayout descriptorSetLayout = vk::su::createDescriptorSetLayout( + device, { { vk::DescriptorType::eUniformBufferDynamic, 1, vk::ShaderStageFlagBits::eVertex } } ); + vk::UniquePipelineLayout pipelineLayout = device->createPipelineLayoutUnique( + vk::PipelineLayoutCreateInfo( vk::PipelineLayoutCreateFlags(), 1, &descriptorSetLayout.get() ) ); // create a DescriptorPool with vk::DescriptorType::eUniformBufferDynamic - vk::UniqueDescriptorPool descriptorPool = vk::su::createDescriptorPool(device, { { vk::DescriptorType::eUniformBufferDynamic, 1 } }); - vk::UniqueDescriptorSet descriptorSet = std::move(device->allocateDescriptorSetsUnique(vk::DescriptorSetAllocateInfo(*descriptorPool, 1, &*descriptorSetLayout)).front()); + vk::UniqueDescriptorPool descriptorPool = + vk::su::createDescriptorPool( device, { { vk::DescriptorType::eUniformBufferDynamic, 1 } } ); + vk::UniqueDescriptorSet descriptorSet = std::move( + device->allocateDescriptorSetsUnique( vk::DescriptorSetAllocateInfo( *descriptorPool, 1, &*descriptorSetLayout ) ) + .front() ); - vk::su::updateDescriptorSets(device, descriptorSet, {{vk::DescriptorType::eUniformBufferDynamic, uniformBufferData.buffer, vk::UniqueBufferView()}}, {}); + vk::su::updateDescriptorSets( + device, + descriptorSet, + { { vk::DescriptorType::eUniformBufferDynamic, uniformBufferData.buffer, vk::UniqueBufferView() } }, + {} ); - vk::UniquePipelineCache pipelineCache = device->createPipelineCacheUnique(vk::PipelineCacheCreateInfo()); - vk::UniquePipeline graphicsPipeline = vk::su::createGraphicsPipeline(device, pipelineCache, std::make_pair(*vertexShaderModule, nullptr), std::make_pair(*fragmentShaderModule, nullptr), - sizeof(coloredCubeData[0]), { { vk::Format::eR32G32B32A32Sfloat, 0 }, { vk::Format::eR32G32B32A32Sfloat, 16 } }, - vk::FrontFace::eClockwise, true, pipelineLayout, renderPass); + vk::UniquePipelineCache pipelineCache = device->createPipelineCacheUnique( vk::PipelineCacheCreateInfo() ); + vk::UniquePipeline graphicsPipeline = vk::su::createGraphicsPipeline( + device, + pipelineCache, + std::make_pair( *vertexShaderModule, nullptr ), + std::make_pair( *fragmentShaderModule, nullptr ), + sizeof( coloredCubeData[0] ), + { { vk::Format::eR32G32B32A32Sfloat, 0 }, { vk::Format::eR32G32B32A32Sfloat, 16 } }, + vk::FrontFace::eClockwise, + true, + pipelineLayout, + renderPass ); // Get the index of the next available swapchain image: - vk::UniqueSemaphore imageAcquiredSemaphore = device->createSemaphoreUnique(vk::SemaphoreCreateInfo()); - vk::ResultValue currentBuffer = device->acquireNextImageKHR(swapChainData.swapChain.get(), vk::su::FenceTimeout, imageAcquiredSemaphore.get(), nullptr); - assert(currentBuffer.result == vk::Result::eSuccess); - assert(currentBuffer.value < framebuffers.size()); + vk::UniqueSemaphore imageAcquiredSemaphore = device->createSemaphoreUnique( vk::SemaphoreCreateInfo() ); + vk::ResultValue currentBuffer = device->acquireNextImageKHR( + swapChainData.swapChain.get(), vk::su::FenceTimeout, imageAcquiredSemaphore.get(), nullptr ); + assert( currentBuffer.result == vk::Result::eSuccess ); + assert( currentBuffer.value < framebuffers.size() ); - commandBuffer->begin(vk::CommandBufferBeginInfo(vk::CommandBufferUsageFlags())); + commandBuffer->begin( vk::CommandBufferBeginInfo( vk::CommandBufferUsageFlags() ) ); vk::ClearValue clearValues[2]; - clearValues[0].color = vk::ClearColorValue(std::array({ 0.2f, 0.2f, 0.2f, 0.2f })); - clearValues[1].depthStencil = vk::ClearDepthStencilValue(1.0f, 0); - vk::RenderPassBeginInfo renderPassBeginInfo(renderPass.get(), framebuffers[currentBuffer.value].get(), vk::Rect2D(vk::Offset2D(0, 0), surfaceData.extent), 2, clearValues); - commandBuffer->beginRenderPass(renderPassBeginInfo, vk::SubpassContents::eInline); - commandBuffer->bindPipeline(vk::PipelineBindPoint::eGraphics, graphicsPipeline.get()); + clearValues[0].color = vk::ClearColorValue( std::array( { 0.2f, 0.2f, 0.2f, 0.2f } ) ); + clearValues[1].depthStencil = vk::ClearDepthStencilValue( 1.0f, 0 ); + vk::RenderPassBeginInfo renderPassBeginInfo( renderPass.get(), + framebuffers[currentBuffer.value].get(), + vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ), + 2, + clearValues ); + commandBuffer->beginRenderPass( renderPassBeginInfo, vk::SubpassContents::eInline ); + commandBuffer->bindPipeline( vk::PipelineBindPoint::eGraphics, graphicsPipeline.get() ); - commandBuffer->setViewport(0, vk::Viewport(0.0f, 0.0f, static_cast(surfaceData.extent.width), static_cast(surfaceData.extent.height), 0.0f, 1.0f)); - commandBuffer->setScissor(0, vk::Rect2D(vk::Offset2D(0, 0), surfaceData.extent)); + commandBuffer->setViewport( 0, + vk::Viewport( 0.0f, + 0.0f, + static_cast( surfaceData.extent.width ), + static_cast( surfaceData.extent.height ), + 0.0f, + 1.0f ) ); + commandBuffer->setScissor( 0, vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ) ); /* The first draw should use the first matrix in the buffer */ uint32_t dynamicOffset = 0; - commandBuffer->bindDescriptorSets(vk::PipelineBindPoint::eGraphics, pipelineLayout.get(), 0, descriptorSet.get(), dynamicOffset); + commandBuffer->bindDescriptorSets( + vk::PipelineBindPoint::eGraphics, pipelineLayout.get(), 0, descriptorSet.get(), dynamicOffset ); - commandBuffer->bindVertexBuffers(0, *vertexBufferData.buffer, {0}); - commandBuffer->draw(12 * 3, 1, 0, 0); + commandBuffer->bindVertexBuffers( 0, *vertexBufferData.buffer, { 0 } ); + commandBuffer->draw( 12 * 3, 1, 0, 0 ); // the second draw should use the second matrix in the buffer; dynamicOffset = (uint32_t)bufferSize; - commandBuffer->bindDescriptorSets(vk::PipelineBindPoint::eGraphics, pipelineLayout.get(), 0, descriptorSet.get(), dynamicOffset); - commandBuffer->draw(12 * 3, 1, 0, 0); + commandBuffer->bindDescriptorSets( + vk::PipelineBindPoint::eGraphics, pipelineLayout.get(), 0, descriptorSet.get(), dynamicOffset ); + commandBuffer->draw( 12 * 3, 1, 0, 0 ); commandBuffer->endRenderPass(); commandBuffer->end(); - vk::UniqueFence drawFence = device->createFenceUnique(vk::FenceCreateInfo()); + vk::UniqueFence drawFence = device->createFenceUnique( vk::FenceCreateInfo() ); - vk::PipelineStageFlags waitDestinationStageMask(vk::PipelineStageFlagBits::eColorAttachmentOutput); - vk::SubmitInfo submitInfo(1, &imageAcquiredSemaphore.get(), &waitDestinationStageMask, 1, &commandBuffer.get()); - graphicsQueue.submit(submitInfo, drawFence.get()); + vk::PipelineStageFlags waitDestinationStageMask( vk::PipelineStageFlagBits::eColorAttachmentOutput ); + vk::SubmitInfo submitInfo( 1, &imageAcquiredSemaphore.get(), &waitDestinationStageMask, 1, &commandBuffer.get() ); + graphicsQueue.submit( submitInfo, drawFence.get() ); - while (vk::Result::eTimeout == device->waitForFences(drawFence.get(), VK_TRUE, vk::su::FenceTimeout)) + while ( vk::Result::eTimeout == device->waitForFences( drawFence.get(), VK_TRUE, vk::su::FenceTimeout ) ) ; - presentQueue.presentKHR(vk::PresentInfoKHR(0, nullptr, 1, &swapChainData.swapChain.get(), ¤tBuffer.value)); - std::this_thread::sleep_for(std::chrono::milliseconds(1000)); + presentQueue.presentKHR( + vk::PresentInfoKHR( 0, nullptr, 1, &swapChainData.swapChain.get(), ¤tBuffer.value ) ); + std::this_thread::sleep_for( std::chrono::milliseconds( 1000 ) ); /* VULKAN_KEY_END */ device->waitIdle(); } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (std::runtime_error& err) + catch ( std::runtime_error & err ) { std::cout << "std::runtime_error: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/EnableValidationWithCallback/EnableValidationWithCallback.cpp b/samples/EnableValidationWithCallback/EnableValidationWithCallback.cpp index 0b8927c..97516d3 100644 --- a/samples/EnableValidationWithCallback/EnableValidationWithCallback.cpp +++ b/samples/EnableValidationWithCallback/EnableValidationWithCallback.cpp @@ -17,68 +17,78 @@ #include "../utils/utils.hpp" #include "vulkan/vulkan.hpp" + #include #include #include -static char const* AppName = "EnableValidationWithCallback"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "EnableValidationWithCallback"; +static char const * EngineName = "Vulkan.hpp"; -PFN_vkCreateDebugUtilsMessengerEXT pfnVkCreateDebugUtilsMessengerEXT; +PFN_vkCreateDebugUtilsMessengerEXT pfnVkCreateDebugUtilsMessengerEXT; PFN_vkDestroyDebugUtilsMessengerEXT pfnVkDestroyDebugUtilsMessengerEXT; -VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugUtilsMessengerEXT(VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugUtilsMessengerEXT* pMessenger) +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugUtilsMessengerEXT( VkInstance instance, + const VkDebugUtilsMessengerCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDebugUtilsMessengerEXT * pMessenger ) { - return pfnVkCreateDebugUtilsMessengerEXT(instance, pCreateInfo, pAllocator, pMessenger); + return pfnVkCreateDebugUtilsMessengerEXT( instance, pCreateInfo, pAllocator, pMessenger ); } -VKAPI_ATTR void VKAPI_CALL vkDestroyDebugUtilsMessengerEXT(VkInstance instance, VkDebugUtilsMessengerEXT messenger, VkAllocationCallbacks const * pAllocator) +VKAPI_ATTR void VKAPI_CALL vkDestroyDebugUtilsMessengerEXT( VkInstance instance, + VkDebugUtilsMessengerEXT messenger, + VkAllocationCallbacks const * pAllocator ) { - return pfnVkDestroyDebugUtilsMessengerEXT(instance, messenger, pAllocator); + return pfnVkDestroyDebugUtilsMessengerEXT( instance, messenger, pAllocator ); } - -VKAPI_ATTR VkBool32 VKAPI_CALL debugMessageFunc(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, - VkDebugUtilsMessengerCallbackDataEXT const * pCallbackData, void * /*pUserData*/) +VKAPI_ATTR VkBool32 VKAPI_CALL debugMessageFunc( VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VkDebugUtilsMessageTypeFlagsEXT messageTypes, + VkDebugUtilsMessengerCallbackDataEXT const * pCallbackData, + void * /*pUserData*/ ) { std::string message; - message += vk::to_string(static_cast(messageSeverity)) + ": " + vk::to_string(static_cast(messageTypes)) + ":\n"; - message += std::string("\t") + "messageIDName = <" + pCallbackData->pMessageIdName + ">\n"; - message += std::string("\t") + "messageIdNumber = " + std::to_string(pCallbackData->messageIdNumber) + "\n"; - message += std::string("\t") + "message = <" + pCallbackData->pMessage + ">\n"; - if (0 < pCallbackData->queueLabelCount) + message += vk::to_string( static_cast( messageSeverity ) ) + ": " + + vk::to_string( static_cast( messageTypes ) ) + ":\n"; + message += std::string( "\t" ) + "messageIDName = <" + pCallbackData->pMessageIdName + ">\n"; + message += std::string( "\t" ) + "messageIdNumber = " + std::to_string( pCallbackData->messageIdNumber ) + "\n"; + message += std::string( "\t" ) + "message = <" + pCallbackData->pMessage + ">\n"; + if ( 0 < pCallbackData->queueLabelCount ) { - message += std::string("\t") + "Queue Labels:\n"; - for (uint8_t i = 0; i < pCallbackData->queueLabelCount; i++) + message += std::string( "\t" ) + "Queue Labels:\n"; + for ( uint8_t i = 0; i < pCallbackData->queueLabelCount; i++ ) { - message += std::string("\t\t") + "labelName = <" + pCallbackData->pQueueLabels[i].pLabelName + ">\n"; + message += std::string( "\t\t" ) + "labelName = <" + pCallbackData->pQueueLabels[i].pLabelName + ">\n"; } } - if (0 < pCallbackData->cmdBufLabelCount) + if ( 0 < pCallbackData->cmdBufLabelCount ) { - message += std::string("\t") + "CommandBuffer Labels:\n"; - for (uint8_t i = 0; i < pCallbackData->cmdBufLabelCount; i++) + message += std::string( "\t" ) + "CommandBuffer Labels:\n"; + for ( uint8_t i = 0; i < pCallbackData->cmdBufLabelCount; i++ ) { - message += std::string("\t\t") + "labelName = <" + pCallbackData->pCmdBufLabels[i].pLabelName + ">\n"; + message += std::string( "\t\t" ) + "labelName = <" + pCallbackData->pCmdBufLabels[i].pLabelName + ">\n"; } } - if (0 < pCallbackData->objectCount) + if ( 0 < pCallbackData->objectCount ) { - for (uint8_t i = 0; i < pCallbackData->objectCount; i++) + for ( uint8_t i = 0; i < pCallbackData->objectCount; i++ ) { - message += std::string("\t") + "Object " + std::to_string(i) + "\n"; - message += std::string("\t\t") + "objectType = " + vk::to_string(static_cast(pCallbackData->pObjects[i].objectType)) + "\n"; - message += std::string("\t\t") + "objectHandle = " + std::to_string(pCallbackData->pObjects[i].objectHandle) + "\n"; - if (pCallbackData->pObjects[i].pObjectName) + message += std::string( "\t" ) + "Object " + std::to_string( i ) + "\n"; + message += std::string( "\t\t" ) + "objectType = " + + vk::to_string( static_cast( pCallbackData->pObjects[i].objectType ) ) + "\n"; + message += + std::string( "\t\t" ) + "objectHandle = " + std::to_string( pCallbackData->pObjects[i].objectHandle ) + "\n"; + if ( pCallbackData->pObjects[i].pObjectName ) { - message += std::string("\t\t") + "objectName = <" + pCallbackData->pObjects[i].pObjectName + ">\n"; + message += std::string( "\t\t" ) + "objectName = <" + pCallbackData->pObjects[i].pObjectName + ">\n"; } + } } -} #ifdef _WIN32 - MessageBox(NULL, message.c_str(), "Alert", MB_OK); + MessageBox( NULL, message.c_str(), "Alert", MB_OK ); #else std::cout << message << std::endl; #endif @@ -86,24 +96,26 @@ VKAPI_ATTR VkBool32 VKAPI_CALL debugMessageFunc(VkDebugUtilsMessageSeverityFlagB return false; } -bool checkLayers(std::vector const& layers, std::vector const& properties) +bool checkLayers( std::vector const & layers, std::vector const & properties ) { // return true if all layers are listed in the properties - return std::all_of(layers.begin(), layers.end(), [&properties](char const* name) - { - return std::find_if(properties.begin(), properties.end(), [&name](vk::LayerProperties const& property) { return strcmp(property.layerName, name) == 0; }) != properties.end(); - }); + return std::all_of( layers.begin(), layers.end(), [&properties]( char const * name ) { + return std::find_if( properties.begin(), properties.end(), [&name]( vk::LayerProperties const & property ) { + return strcmp( property.layerName, name ) == 0; + } ) != properties.end(); + } ); } -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { -#if (VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1) +#if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) // initialize the DipatchLoaderDynamic to use - static vk::DynamicLoader dl; - PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = dl.getProcAddress("vkGetInstanceProcAddr"); - VULKAN_HPP_DEFAULT_DISPATCHER.init(vkGetInstanceProcAddr); + static vk::DynamicLoader dl; + PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = + dl.getProcAddress( "vkGetInstanceProcAddr" ); + VULKAN_HPP_DEFAULT_DISPATCHER.init( vkGetInstanceProcAddr ); #endif std::vector instanceLayerProperties = vk::enumerateInstanceLayerProperties(); @@ -111,61 +123,77 @@ int main(int /*argc*/, char ** /*argv*/) /* VULKAN_KEY_START */ // Use standard_validation meta layer that enables all recommended validation layers - std::vector instanceLayerNames; - instanceLayerNames.push_back("VK_LAYER_KHRONOS_validation"); - if (!checkLayers(instanceLayerNames, instanceLayerProperties)) + std::vector instanceLayerNames; + instanceLayerNames.push_back( "VK_LAYER_KHRONOS_validation" ); + if ( !checkLayers( instanceLayerNames, instanceLayerProperties ) ) { std::cout << "Set the environment variable VK_LAYER_PATH to point to the location of your layers" << std::endl; - exit(1); + exit( 1 ); } /* Enable debug callback extension */ - std::vector instanceExtensionNames; - instanceExtensionNames.push_back(VK_EXT_DEBUG_UTILS_EXTENSION_NAME); + std::vector instanceExtensionNames; + instanceExtensionNames.push_back( VK_EXT_DEBUG_UTILS_EXTENSION_NAME ); - vk::ApplicationInfo applicationInfo(AppName, 1, EngineName, 1, VK_API_VERSION_1_1); - vk::InstanceCreateInfo instanceCreateInfo( vk::InstanceCreateFlags(), &applicationInfo, vk::su::checked_cast(instanceLayerNames.size()), instanceLayerNames.data(), - vk::su::checked_cast(instanceExtensionNames.size()) , instanceExtensionNames.data() ); - vk::UniqueInstance instance = vk::createInstanceUnique(instanceCreateInfo); + vk::ApplicationInfo applicationInfo( AppName, 1, EngineName, 1, VK_API_VERSION_1_1 ); + vk::InstanceCreateInfo instanceCreateInfo( vk::InstanceCreateFlags(), + &applicationInfo, + vk::su::checked_cast( instanceLayerNames.size() ), + instanceLayerNames.data(), + vk::su::checked_cast( instanceExtensionNames.size() ), + instanceExtensionNames.data() ); + vk::UniqueInstance instance = vk::createInstanceUnique( instanceCreateInfo ); -#if (VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1) +#if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) // initialize function pointers for instance - VULKAN_HPP_DEFAULT_DISPATCHER.init(*instance); + VULKAN_HPP_DEFAULT_DISPATCHER.init( *instance ); #endif - pfnVkCreateDebugUtilsMessengerEXT = reinterpret_cast(instance->getProcAddr("vkCreateDebugUtilsMessengerEXT")); - if (!pfnVkCreateDebugUtilsMessengerEXT) + pfnVkCreateDebugUtilsMessengerEXT = + reinterpret_cast( instance->getProcAddr( "vkCreateDebugUtilsMessengerEXT" ) ); + if ( !pfnVkCreateDebugUtilsMessengerEXT ) { std::cout << "GetInstanceProcAddr: Unable to find pfnVkCreateDebugUtilsMessengerEXT function." << std::endl; - exit(1); + exit( 1 ); } - pfnVkDestroyDebugUtilsMessengerEXT = reinterpret_cast(instance->getProcAddr("vkDestroyDebugUtilsMessengerEXT")); - if (!pfnVkDestroyDebugUtilsMessengerEXT) + pfnVkDestroyDebugUtilsMessengerEXT = reinterpret_cast( + instance->getProcAddr( "vkDestroyDebugUtilsMessengerEXT" ) ); + if ( !pfnVkDestroyDebugUtilsMessengerEXT ) { std::cout << "GetInstanceProcAddr: Unable to find pfnVkDestroyDebugUtilsMessengerEXT function." << std::endl; - exit(1); + exit( 1 ); } - vk::DebugUtilsMessageSeverityFlagsEXT severityFlags(vk::DebugUtilsMessageSeverityFlagBitsEXT::eWarning | vk::DebugUtilsMessageSeverityFlagBitsEXT::eError); - vk::DebugUtilsMessageTypeFlagsEXT messageTypeFlags(vk::DebugUtilsMessageTypeFlagBitsEXT::eGeneral | vk::DebugUtilsMessageTypeFlagBitsEXT::ePerformance | vk::DebugUtilsMessageTypeFlagBitsEXT::eValidation); - vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = instance->createDebugUtilsMessengerEXTUnique(vk::DebugUtilsMessengerCreateInfoEXT({}, severityFlags, messageTypeFlags, &debugMessageFunc)); + vk::DebugUtilsMessageSeverityFlagsEXT severityFlags( vk::DebugUtilsMessageSeverityFlagBitsEXT::eWarning | + vk::DebugUtilsMessageSeverityFlagBitsEXT::eError ); + vk::DebugUtilsMessageTypeFlagsEXT messageTypeFlags( vk::DebugUtilsMessageTypeFlagBitsEXT::eGeneral | + vk::DebugUtilsMessageTypeFlagBitsEXT::ePerformance | + vk::DebugUtilsMessageTypeFlagBitsEXT::eValidation ); + vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = instance->createDebugUtilsMessengerEXTUnique( + vk::DebugUtilsMessengerCreateInfoEXT( {}, severityFlags, messageTypeFlags, &debugMessageFunc ) ); vk::PhysicalDevice physicalDevice = instance->enumeratePhysicalDevices().front(); std::vector queueFamilyProperties = physicalDevice.getQueueFamilyProperties(); - assert(!queueFamilyProperties.empty()); + assert( !queueFamilyProperties.empty() ); - auto qfpIt = std::find_if(queueFamilyProperties.begin(), queueFamilyProperties.end(), [](vk::QueueFamilyProperties const& qfp) { return !!(qfp.queueFlags & vk::QueueFlagBits::eGraphics); }); - assert(qfpIt != queueFamilyProperties.end()); - uint32_t queueFamilyIndex = static_cast(std::distance(queueFamilyProperties.begin(), qfpIt)); + auto qfpIt = std::find_if( + queueFamilyProperties.begin(), queueFamilyProperties.end(), []( vk::QueueFamilyProperties const & qfp ) { + return !!( qfp.queueFlags & vk::QueueFlagBits::eGraphics ); + } ); + assert( qfpIt != queueFamilyProperties.end() ); + uint32_t queueFamilyIndex = static_cast( std::distance( queueFamilyProperties.begin(), qfpIt ) ); - float queuePriority = 0.0f; - vk::DeviceQueueCreateInfo deviceQueueCreateInfo(vk::DeviceQueueCreateFlags(), queueFamilyIndex, 1, &queuePriority); - vk::UniqueDevice device = physicalDevice.createDeviceUnique(vk::DeviceCreateInfo(vk::DeviceCreateFlags(), 1, &deviceQueueCreateInfo)); + float queuePriority = 0.0f; + vk::DeviceQueueCreateInfo deviceQueueCreateInfo( + vk::DeviceQueueCreateFlags(), queueFamilyIndex, 1, &queuePriority ); + vk::UniqueDevice device = + physicalDevice.createDeviceUnique( vk::DeviceCreateInfo( vk::DeviceCreateFlags(), 1, &deviceQueueCreateInfo ) ); // Create a command pool (not a UniqueCommandPool, for testing purposes! - vk::CommandPool commandPool = device->createCommandPool(vk::CommandPoolCreateInfo(vk::CommandPoolCreateFlags(), queueFamilyIndex)); + vk::CommandPool commandPool = + device->createCommandPool( vk::CommandPoolCreateInfo( vk::CommandPoolCreateFlags(), queueFamilyIndex ) ); // The commandPool is not destroyed automatically (as it's not a UniqueCommandPool. // That is, the device is destroyed before the commmand pool and will trigger a validation error. @@ -174,20 +202,20 @@ int main(int /*argc*/, char ** /*argv*/) /* VULKAN_KEY_END */ } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (std::runtime_error& err) + catch ( std::runtime_error & err ) { std::cout << "std::runtime_error: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/EnumerateDevicesAdvanced/EnumerateDevicesAdvanced.cpp b/samples/EnumerateDevicesAdvanced/EnumerateDevicesAdvanced.cpp index 396d2e8..87cd11f 100644 --- a/samples/EnumerateDevicesAdvanced/EnumerateDevicesAdvanced.cpp +++ b/samples/EnumerateDevicesAdvanced/EnumerateDevicesAdvanced.cpp @@ -17,19 +17,20 @@ #include "../utils/utils.hpp" #include "vulkan/vulkan.hpp" + #include #include -static char const* AppName = "EnumerateDevicesAdvanced"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "EnumerateDevicesAdvanced"; +static char const * EngineName = "Vulkan.hpp"; -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { - vk::UniqueInstance instance = vk::su::createInstance(AppName, EngineName); -#if !defined(NDEBUG) - vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger(instance); + vk::UniqueInstance instance = vk::su::createInstance( AppName, EngineName ); +#if !defined( NDEBUG ) + vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger( instance ); #endif /* VULKAN_HPP_KEY_START */ @@ -37,41 +38,41 @@ int main(int /*argc*/, char ** /*argv*/) // enumerate the physicalDevices std::vector physicalDevices = instance->enumeratePhysicalDevices(); - for (auto const& physicalDevice : physicalDevices) + for ( auto const & physicalDevice : physicalDevices ) { vk::PhysicalDeviceProperties properties = physicalDevice.getProperties(); std::cout << "apiVersion: "; - std::cout << ((properties.apiVersion >> 22) & 0xfff) << '.'; // Major. - std::cout << ((properties.apiVersion >> 12) & 0x3ff) << '.'; // Minor. - std::cout << (properties.apiVersion & 0xfff); // Patch. + std::cout << ( ( properties.apiVersion >> 22 ) & 0xfff ) << '.'; // Major. + std::cout << ( ( properties.apiVersion >> 12 ) & 0x3ff ) << '.'; // Minor. + std::cout << ( properties.apiVersion & 0xfff ); // Patch. std::cout << '\n'; std::cout << "driverVersion: " << properties.driverVersion << '\n'; - std::cout << std::showbase << std::internal << std::setfill('0') << std::hex; - std::cout << "vendorId: " << std::setw(6) << properties.vendorID << '\n'; - std::cout << "deviceId: " << std::setw(6) << properties.deviceID << '\n'; - std::cout << std::noshowbase << std::right << std::setfill(' ') << std::dec; + std::cout << std::showbase << std::internal << std::setfill( '0' ) << std::hex; + std::cout << "vendorId: " << std::setw( 6 ) << properties.vendorID << '\n'; + std::cout << "deviceId: " << std::setw( 6 ) << properties.deviceID << '\n'; + std::cout << std::noshowbase << std::right << std::setfill( ' ' ) << std::dec; - std::cout << "deviceType: " << vk::to_string(properties.deviceType) << "\n"; + std::cout << "deviceType: " << vk::to_string( properties.deviceType ) << "\n"; std::cout << "deviceName: " << properties.deviceName << '\n'; - std::cout << "pipelineCacheUUID: " << vk::su::UUID(properties.pipelineCacheUUID) << "\n\n"; + std::cout << "pipelineCacheUUID: " << vk::su::UUID( properties.pipelineCacheUUID ) << "\n\n"; } /* VULKAN_HPP_KEY_END */ } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/Events/Events.cpp b/samples/Events/Events.cpp index fb8cb05..e7a5dce 100644 --- a/samples/Events/Events.cpp +++ b/samples/Events/Events.cpp @@ -17,135 +17,145 @@ #include "../utils/utils.hpp" #include "vulkan/vulkan.hpp" + #include -static char const* AppName = "Events"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "Events"; +static char const * EngineName = "Vulkan.hpp"; -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { - vk::UniqueInstance instance = vk::su::createInstance(AppName, EngineName, {}, vk::su::getInstanceExtensions()); -#if !defined(NDEBUG) - vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger(instance); + vk::UniqueInstance instance = vk::su::createInstance( AppName, EngineName, {}, vk::su::getInstanceExtensions() ); +#if !defined( NDEBUG ) + vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger( instance ); #endif vk::PhysicalDevice physicalDevice = instance->enumeratePhysicalDevices().front(); - uint32_t graphicsQueueFamilyIndex = vk::su::findGraphicsQueueFamilyIndex(physicalDevice.getQueueFamilyProperties()); - vk::UniqueDevice device = vk::su::createDevice(physicalDevice, graphicsQueueFamilyIndex); + uint32_t graphicsQueueFamilyIndex = + vk::su::findGraphicsQueueFamilyIndex( physicalDevice.getQueueFamilyProperties() ); + vk::UniqueDevice device = vk::su::createDevice( physicalDevice, graphicsQueueFamilyIndex ); - vk::UniqueCommandPool commandPool = vk::su::createCommandPool(device, graphicsQueueFamilyIndex); - vk::UniqueCommandBuffer commandBuffer = std::move(device->allocateCommandBuffersUnique(vk::CommandBufferAllocateInfo(commandPool.get(), vk::CommandBufferLevel::ePrimary, 1)).front()); + vk::UniqueCommandPool commandPool = vk::su::createCommandPool( device, graphicsQueueFamilyIndex ); + vk::UniqueCommandBuffer commandBuffer = std::move( device + ->allocateCommandBuffersUnique( vk::CommandBufferAllocateInfo( + commandPool.get(), vk::CommandBufferLevel::ePrimary, 1 ) ) + .front() ); - vk::Queue graphicsQueue = device->getQueue(graphicsQueueFamilyIndex, 0); + vk::Queue graphicsQueue = device->getQueue( graphicsQueueFamilyIndex, 0 ); /* VULKAN_KEY_START */ // Start with a trivial command buffer and make sure fence wait doesn't time out - commandBuffer->begin(vk::CommandBufferBeginInfo(vk::CommandBufferUsageFlags())); - commandBuffer->setViewport(0, vk::Viewport(0.0f, 0.0f, 10.0f, 10.0f, 0.0f, 1.0f)); + commandBuffer->begin( vk::CommandBufferBeginInfo( vk::CommandBufferUsageFlags() ) ); + commandBuffer->setViewport( 0, vk::Viewport( 0.0f, 0.0f, 10.0f, 10.0f, 0.0f, 1.0f ) ); commandBuffer->end(); - vk::UniqueFence fence = device->createFenceUnique(vk::FenceCreateInfo()); + vk::UniqueFence fence = device->createFenceUnique( vk::FenceCreateInfo() ); - vk::PipelineStageFlags waitDestinationStageMask(vk::PipelineStageFlagBits::eColorAttachmentOutput); - vk::SubmitInfo submitInfo(0, nullptr, &waitDestinationStageMask, 1, &commandBuffer.get()); - graphicsQueue.submit(submitInfo, fence.get()); + vk::PipelineStageFlags waitDestinationStageMask( vk::PipelineStageFlagBits::eColorAttachmentOutput ); + vk::SubmitInfo submitInfo( 0, nullptr, &waitDestinationStageMask, 1, &commandBuffer.get() ); + graphicsQueue.submit( submitInfo, fence.get() ); // Make sure timeout is long enough for a simple command buffer without waiting for an event vk::Result result; - int timeouts = -1; + int timeouts = -1; do { - result = device->waitForFences(fence.get(), true, vk::su::FenceTimeout); + result = device->waitForFences( fence.get(), true, vk::su::FenceTimeout ); timeouts++; - } while (result == vk::Result::eTimeout); - assert(result == vk::Result::eSuccess); - if (timeouts != 0) + } while ( result == vk::Result::eTimeout ); + assert( result == vk::Result::eSuccess ); + if ( timeouts != 0 ) { std::cout << "Unsuitable timeout value, exiting\n"; - exit(-1); + exit( -1 ); } // Now create an event and wait for it on the GPU - vk::UniqueEvent event = device->createEventUnique(vk::EventCreateInfo(vk::EventCreateFlags())); + vk::UniqueEvent event = device->createEventUnique( vk::EventCreateInfo( vk::EventCreateFlags() ) ); - commandBuffer->reset(vk::CommandBufferResetFlags()); - commandBuffer->begin(vk::CommandBufferBeginInfo()); - commandBuffer->waitEvents(event.get(), vk::PipelineStageFlagBits::eHost, vk::PipelineStageFlagBits::eBottomOfPipe, nullptr, nullptr, nullptr); + commandBuffer->reset( vk::CommandBufferResetFlags() ); + commandBuffer->begin( vk::CommandBufferBeginInfo() ); + commandBuffer->waitEvents( event.get(), + vk::PipelineStageFlagBits::eHost, + vk::PipelineStageFlagBits::eBottomOfPipe, + nullptr, + nullptr, + nullptr ); commandBuffer->end(); - device->resetFences(fence.get()); + device->resetFences( fence.get() ); // Note that stepping through this code in the debugger is a bad idea because the GPU can TDR waiting for the event. // Execute the code from vk::Queue::submit() through vk::Device::setEvent() without breakpoints waitDestinationStageMask = vk::PipelineStageFlagBits::eBottomOfPipe; - graphicsQueue.submit(submitInfo, fence.get()); + graphicsQueue.submit( submitInfo, fence.get() ); // We should timeout waiting for the fence because the GPU should be waiting on the event - result = device->waitForFences(fence.get(), true, vk::su::FenceTimeout); - if (result != vk::Result::eTimeout) + result = device->waitForFences( fence.get(), true, vk::su::FenceTimeout ); + if ( result != vk::Result::eTimeout ) { std::cout << "Didn't get expected timeout in vk::Device::waitForFences, exiting\n"; - exit(-1); + exit( -1 ); } // Set the event from the CPU and wait for the fence. // This should succeed since we set the event - device->setEvent(event.get()); + device->setEvent( event.get() ); do { - result = device->waitForFences(fence.get(), true, vk::su::FenceTimeout); - } while (result == vk::Result::eTimeout); - assert(result == vk::Result::eSuccess); + result = device->waitForFences( fence.get(), true, vk::su::FenceTimeout ); + } while ( result == vk::Result::eTimeout ); + assert( result == vk::Result::eSuccess ); - commandBuffer->reset({}); - device->resetFences(fence.get()); - device->resetEvent(event.get()); + commandBuffer->reset( {} ); + device->resetFences( fence.get() ); + device->resetEvent( event.get() ); // Now set the event from the GPU and wait on the CPU - commandBuffer->begin(vk::CommandBufferBeginInfo()); - commandBuffer->setEvent(event.get(), vk::PipelineStageFlagBits::eBottomOfPipe); + commandBuffer->begin( vk::CommandBufferBeginInfo() ); + commandBuffer->setEvent( event.get(), vk::PipelineStageFlagBits::eBottomOfPipe ); commandBuffer->end(); // Look for the event on the CPU. It should be vk::Result::eEventReset since we haven't sent the command buffer yet. - result = device->getEventStatus(event.get()); - assert(result == vk::Result::eEventReset); + result = device->getEventStatus( event.get() ); + assert( result == vk::Result::eEventReset ); // Send the command buffer and loop waiting for the event - graphicsQueue.submit(submitInfo, fence.get()); + graphicsQueue.submit( submitInfo, fence.get() ); int polls = 0; do { - result = device->getEventStatus(event.get()); + result = device->getEventStatus( event.get() ); polls++; - } while (result != vk::Result::eEventSet); - printf("%d polls to find the event set\n", polls); + } while ( result != vk::Result::eEventSet ); + printf( "%d polls to find the event set\n", polls ); do { - result = device->waitForFences(fence.get(), true, vk::su::FenceTimeout); - } while (result == vk::Result::eTimeout); - assert(result == vk::Result::eSuccess); + result = device->waitForFences( fence.get(), true, vk::su::FenceTimeout ); + } while ( result == vk::Result::eTimeout ); + assert( result == vk::Result::eSuccess ); /* VULKAN_KEY_END */ } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (std::runtime_error& err) + catch ( std::runtime_error & err ) { std::cout << "std::runtime_error: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/ImmutableSampler/ImmutableSampler.cpp b/samples/ImmutableSampler/ImmutableSampler.cpp index cce2286..082573d 100644 --- a/samples/ImmutableSampler/ImmutableSampler.cpp +++ b/samples/ImmutableSampler/ImmutableSampler.cpp @@ -19,142 +19,194 @@ #include "../utils/math.hpp" #include "../utils/shaders.hpp" #include "../utils/utils.hpp" -#include "vulkan/vulkan.hpp" #include "SPIRV/GlslangToSpv.h" +#include "vulkan/vulkan.hpp" + #include #include -static char const* AppName = "ImmutableSampler"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "ImmutableSampler"; +static char const * EngineName = "Vulkan.hpp"; -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { - vk::UniqueInstance instance = vk::su::createInstance(AppName, EngineName, {}, vk::su::getInstanceExtensions()); -#if !defined(NDEBUG) - vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger(instance); + vk::UniqueInstance instance = vk::su::createInstance( AppName, EngineName, {}, vk::su::getInstanceExtensions() ); +#if !defined( NDEBUG ) + vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger( instance ); #endif vk::PhysicalDevice physicalDevice = instance->enumeratePhysicalDevices().front(); - vk::su::SurfaceData surfaceData(instance, AppName, vk::Extent2D(500, 500)); + vk::su::SurfaceData surfaceData( instance, AppName, vk::Extent2D( 500, 500 ) ); - std::pair graphicsAndPresentQueueFamilyIndex = vk::su::findGraphicsAndPresentQueueFamilyIndex(physicalDevice, *surfaceData.surface); - vk::UniqueDevice device = vk::su::createDevice(physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions()); + std::pair graphicsAndPresentQueueFamilyIndex = + vk::su::findGraphicsAndPresentQueueFamilyIndex( physicalDevice, *surfaceData.surface ); + vk::UniqueDevice device = + vk::su::createDevice( physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions() ); - vk::UniqueCommandPool commandPool = vk::su::createCommandPool(device, graphicsAndPresentQueueFamilyIndex.first); - vk::UniqueCommandBuffer commandBuffer = std::move(device->allocateCommandBuffersUnique(vk::CommandBufferAllocateInfo(commandPool.get(), vk::CommandBufferLevel::ePrimary, 1)).front()); + vk::UniqueCommandPool commandPool = vk::su::createCommandPool( device, graphicsAndPresentQueueFamilyIndex.first ); + vk::UniqueCommandBuffer commandBuffer = std::move( device + ->allocateCommandBuffersUnique( vk::CommandBufferAllocateInfo( + commandPool.get(), vk::CommandBufferLevel::ePrimary, 1 ) ) + .front() ); - vk::Queue graphicsQueue = device->getQueue(graphicsAndPresentQueueFamilyIndex.first, 0); - vk::Queue presentQueue = device->getQueue(graphicsAndPresentQueueFamilyIndex.second, 0); + vk::Queue graphicsQueue = device->getQueue( graphicsAndPresentQueueFamilyIndex.first, 0 ); + vk::Queue presentQueue = device->getQueue( graphicsAndPresentQueueFamilyIndex.second, 0 ); - vk::su::SwapChainData swapChainData(physicalDevice, device, *surfaceData.surface, surfaceData.extent, vk::ImageUsageFlagBits::eColorAttachment | vk::ImageUsageFlagBits::eTransferSrc, - vk::UniqueSwapchainKHR(), graphicsAndPresentQueueFamilyIndex.first, graphicsAndPresentQueueFamilyIndex.second); + vk::su::SwapChainData swapChainData( physicalDevice, + device, + *surfaceData.surface, + surfaceData.extent, + vk::ImageUsageFlagBits::eColorAttachment | + vk::ImageUsageFlagBits::eTransferSrc, + vk::UniqueSwapchainKHR(), + graphicsAndPresentQueueFamilyIndex.first, + graphicsAndPresentQueueFamilyIndex.second ); - vk::su::DepthBufferData depthBufferData(physicalDevice, device, vk::Format::eD16Unorm, surfaceData.extent); + vk::su::DepthBufferData depthBufferData( physicalDevice, device, vk::Format::eD16Unorm, surfaceData.extent ); - vk::su::BufferData uniformBufferData(physicalDevice, device, sizeof(glm::mat4x4), vk::BufferUsageFlagBits::eUniformBuffer); - vk::su::copyToDevice(device, uniformBufferData.deviceMemory, vk::su::createModelViewProjectionClipMatrix(surfaceData.extent)); + vk::su::BufferData uniformBufferData( + physicalDevice, device, sizeof( glm::mat4x4 ), vk::BufferUsageFlagBits::eUniformBuffer ); + vk::su::copyToDevice( + device, uniformBufferData.deviceMemory, vk::su::createModelViewProjectionClipMatrix( surfaceData.extent ) ); - vk::UniqueRenderPass renderPass = vk::su::createRenderPass(device, vk::su::pickSurfaceFormat(physicalDevice.getSurfaceFormatsKHR(surfaceData.surface.get())).format, depthBufferData.format); + vk::UniqueRenderPass renderPass = vk::su::createRenderPass( + device, + vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( surfaceData.surface.get() ) ).format, + depthBufferData.format ); glslang::InitializeProcess(); - vk::UniqueShaderModule vertexShaderModule = vk::su::createShaderModule(device, vk::ShaderStageFlagBits::eVertex, vertexShaderText_PT_T); - vk::UniqueShaderModule fragmentShaderModule = vk::su::createShaderModule(device, vk::ShaderStageFlagBits::eFragment, fragmentShaderText_T_C); + vk::UniqueShaderModule vertexShaderModule = + vk::su::createShaderModule( device, vk::ShaderStageFlagBits::eVertex, vertexShaderText_PT_T ); + vk::UniqueShaderModule fragmentShaderModule = + vk::su::createShaderModule( device, vk::ShaderStageFlagBits::eFragment, fragmentShaderText_T_C ); glslang::FinalizeProcess(); - std::vector framebuffers = vk::su::createFramebuffers(device, renderPass, swapChainData.imageViews, depthBufferData.imageView, surfaceData.extent); + std::vector framebuffers = vk::su::createFramebuffers( + device, renderPass, swapChainData.imageViews, depthBufferData.imageView, surfaceData.extent ); - vk::su::BufferData vertexBufferData(physicalDevice, device, sizeof(texturedCubeData), vk::BufferUsageFlagBits::eVertexBuffer); - vk::su::copyToDevice(device, vertexBufferData.deviceMemory, texturedCubeData, sizeof(texturedCubeData) / sizeof(texturedCubeData[0])); + vk::su::BufferData vertexBufferData( + physicalDevice, device, sizeof( texturedCubeData ), vk::BufferUsageFlagBits::eVertexBuffer ); + vk::su::copyToDevice( device, + vertexBufferData.deviceMemory, + texturedCubeData, + sizeof( texturedCubeData ) / sizeof( texturedCubeData[0] ) ); /* VULKAN_KEY_START */ - vk::su::TextureData textureData(physicalDevice, device); + vk::su::TextureData textureData( physicalDevice, device ); - commandBuffer->begin(vk::CommandBufferBeginInfo()); - textureData.setImage(device, commandBuffer, vk::su::CheckerboardImageGenerator()); + commandBuffer->begin( vk::CommandBufferBeginInfo() ); + textureData.setImage( device, commandBuffer, vk::su::CheckerboardImageGenerator() ); - vk::DescriptorSetLayoutBinding bindings[2] = - { - vk::DescriptorSetLayoutBinding(0, vk::DescriptorType::eUniformBuffer, 1, vk::ShaderStageFlagBits::eVertex), - vk::DescriptorSetLayoutBinding(1, vk::DescriptorType::eCombinedImageSampler, 1, vk::ShaderStageFlagBits::eFragment, &textureData.textureSampler.get()) + vk::DescriptorSetLayoutBinding bindings[2] = { + vk::DescriptorSetLayoutBinding( 0, vk::DescriptorType::eUniformBuffer, 1, vk::ShaderStageFlagBits::eVertex ), + vk::DescriptorSetLayoutBinding( 1, + vk::DescriptorType::eCombinedImageSampler, + 1, + vk::ShaderStageFlagBits::eFragment, + &textureData.textureSampler.get() ) }; - vk::UniqueDescriptorSetLayout descriptorSetLayout = device->createDescriptorSetLayoutUnique(vk::DescriptorSetLayoutCreateInfo(vk::DescriptorSetLayoutCreateFlags(), 2, bindings)); + vk::UniqueDescriptorSetLayout descriptorSetLayout = device->createDescriptorSetLayoutUnique( + vk::DescriptorSetLayoutCreateInfo( vk::DescriptorSetLayoutCreateFlags(), 2, bindings ) ); // Create pipeline layout - vk::UniquePipelineLayout pipelineLayout = device->createPipelineLayoutUnique(vk::PipelineLayoutCreateInfo(vk::PipelineLayoutCreateFlags(), 1, &descriptorSetLayout.get())); + vk::UniquePipelineLayout pipelineLayout = device->createPipelineLayoutUnique( + vk::PipelineLayoutCreateInfo( vk::PipelineLayoutCreateFlags(), 1, &descriptorSetLayout.get() ) ); // Create a single pool to contain data for our descriptor set - vk::DescriptorPoolSize poolSizes[2] = - { - vk::DescriptorPoolSize(vk::DescriptorType::eUniformBuffer, 1), - vk::DescriptorPoolSize(vk::DescriptorType::eCombinedImageSampler, 1) - }; - vk::UniqueDescriptorPool descriptorPool = device->createDescriptorPoolUnique(vk::DescriptorPoolCreateInfo(vk::DescriptorPoolCreateFlagBits::eFreeDescriptorSet, 1, 2, poolSizes)); + vk::DescriptorPoolSize poolSizes[2] = { vk::DescriptorPoolSize( vk::DescriptorType::eUniformBuffer, 1 ), + vk::DescriptorPoolSize( vk::DescriptorType::eCombinedImageSampler, 1 ) }; + vk::UniqueDescriptorPool descriptorPool = device->createDescriptorPoolUnique( + vk::DescriptorPoolCreateInfo( vk::DescriptorPoolCreateFlagBits::eFreeDescriptorSet, 1, 2, poolSizes ) ); // Populate descriptor sets - vk::UniqueDescriptorSet descriptorSet = std::move(device->allocateDescriptorSetsUnique(vk::DescriptorSetAllocateInfo(*descriptorPool, 1, &*descriptorSetLayout)).front()); + vk::UniqueDescriptorSet descriptorSet = std::move( + device->allocateDescriptorSetsUnique( vk::DescriptorSetAllocateInfo( *descriptorPool, 1, &*descriptorSetLayout ) ) + .front() ); - vk::DescriptorBufferInfo bufferInfo(uniformBufferData.buffer.get(), 0, sizeof(glm::mat4x4)); - vk::DescriptorImageInfo imageInfo(textureData.textureSampler.get(), textureData.imageData->imageView.get(), vk::ImageLayout::eShaderReadOnlyOptimal); - vk::WriteDescriptorSet writeDescriptorSets[2] = - { - vk::WriteDescriptorSet(descriptorSet.get(), 0, 0, 1, vk::DescriptorType::eUniformBuffer, nullptr, &bufferInfo), - vk::WriteDescriptorSet(descriptorSet.get(), 1, 0, 1, vk::DescriptorType::eCombinedImageSampler, &imageInfo) + vk::DescriptorBufferInfo bufferInfo( uniformBufferData.buffer.get(), 0, sizeof( glm::mat4x4 ) ); + vk::DescriptorImageInfo imageInfo( textureData.textureSampler.get(), + textureData.imageData->imageView.get(), + vk::ImageLayout::eShaderReadOnlyOptimal ); + vk::WriteDescriptorSet writeDescriptorSets[2] = { + vk::WriteDescriptorSet( descriptorSet.get(), 0, 0, 1, vk::DescriptorType::eUniformBuffer, nullptr, &bufferInfo ), + vk::WriteDescriptorSet( descriptorSet.get(), 1, 0, 1, vk::DescriptorType::eCombinedImageSampler, &imageInfo ) }; - device->updateDescriptorSets(vk::ArrayProxy(2, writeDescriptorSets), nullptr); + device->updateDescriptorSets( vk::ArrayProxy( 2, writeDescriptorSets ), nullptr ); /* VULKAN_KEY_END */ - vk::UniquePipelineCache pipelineCache = device->createPipelineCacheUnique(vk::PipelineCacheCreateInfo()); - vk::UniquePipeline graphicsPipeline = vk::su::createGraphicsPipeline(device, pipelineCache, std::make_pair(*vertexShaderModule, nullptr), std::make_pair(*fragmentShaderModule, nullptr), - sizeof(texturedCubeData[0]), { { vk::Format::eR32G32B32A32Sfloat, 0 }, { vk::Format::eR32G32Sfloat, 16 } }, - vk::FrontFace::eClockwise, true, pipelineLayout, renderPass); + vk::UniquePipelineCache pipelineCache = device->createPipelineCacheUnique( vk::PipelineCacheCreateInfo() ); + vk::UniquePipeline graphicsPipeline = + vk::su::createGraphicsPipeline( device, + pipelineCache, + std::make_pair( *vertexShaderModule, nullptr ), + std::make_pair( *fragmentShaderModule, nullptr ), + sizeof( texturedCubeData[0] ), + { { vk::Format::eR32G32B32A32Sfloat, 0 }, { vk::Format::eR32G32Sfloat, 16 } }, + vk::FrontFace::eClockwise, + true, + pipelineLayout, + renderPass ); - vk::UniqueSemaphore imageAcquiredSemaphore = device->createSemaphoreUnique(vk::SemaphoreCreateInfo()); - vk::ResultValue currentBuffer = device->acquireNextImageKHR(swapChainData.swapChain.get(), vk::su::FenceTimeout, imageAcquiredSemaphore.get(), nullptr); - assert(currentBuffer.result == vk::Result::eSuccess); - assert(currentBuffer.value < framebuffers.size()); + vk::UniqueSemaphore imageAcquiredSemaphore = device->createSemaphoreUnique( vk::SemaphoreCreateInfo() ); + vk::ResultValue currentBuffer = device->acquireNextImageKHR( + swapChainData.swapChain.get(), vk::su::FenceTimeout, imageAcquiredSemaphore.get(), nullptr ); + assert( currentBuffer.result == vk::Result::eSuccess ); + assert( currentBuffer.value < framebuffers.size() ); vk::ClearValue clearValues[2]; - clearValues[0].color = vk::ClearColorValue(std::array({ 0.2f, 0.2f, 0.2f, 0.2f })); - clearValues[1].depthStencil = vk::ClearDepthStencilValue(1.0f, 0); - vk::RenderPassBeginInfo renderPassBeginInfo(renderPass.get(), framebuffers[currentBuffer.value].get(), vk::Rect2D(vk::Offset2D(0, 0), surfaceData.extent), 2, clearValues); - commandBuffer->beginRenderPass(renderPassBeginInfo, vk::SubpassContents::eInline); - commandBuffer->bindPipeline(vk::PipelineBindPoint::eGraphics, graphicsPipeline.get()); - commandBuffer->bindDescriptorSets(vk::PipelineBindPoint::eGraphics, pipelineLayout.get(), 0, descriptorSet.get(), nullptr); + clearValues[0].color = vk::ClearColorValue( std::array( { 0.2f, 0.2f, 0.2f, 0.2f } ) ); + clearValues[1].depthStencil = vk::ClearDepthStencilValue( 1.0f, 0 ); + vk::RenderPassBeginInfo renderPassBeginInfo( renderPass.get(), + framebuffers[currentBuffer.value].get(), + vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ), + 2, + clearValues ); + commandBuffer->beginRenderPass( renderPassBeginInfo, vk::SubpassContents::eInline ); + commandBuffer->bindPipeline( vk::PipelineBindPoint::eGraphics, graphicsPipeline.get() ); + commandBuffer->bindDescriptorSets( + vk::PipelineBindPoint::eGraphics, pipelineLayout.get(), 0, descriptorSet.get(), nullptr ); - commandBuffer->bindVertexBuffers(0, *vertexBufferData.buffer, {0}); - commandBuffer->setViewport(0, vk::Viewport(0.0f, 0.0f, static_cast(surfaceData.extent.width), static_cast(surfaceData.extent.height), 0.0f, 1.0f)); - commandBuffer->setScissor(0, vk::Rect2D(vk::Offset2D(0, 0), surfaceData.extent)); + commandBuffer->bindVertexBuffers( 0, *vertexBufferData.buffer, { 0 } ); + commandBuffer->setViewport( 0, + vk::Viewport( 0.0f, + 0.0f, + static_cast( surfaceData.extent.width ), + static_cast( surfaceData.extent.height ), + 0.0f, + 1.0f ) ); + commandBuffer->setScissor( 0, vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ) ); - commandBuffer->draw(12 * 3, 1, 0, 0); + commandBuffer->draw( 12 * 3, 1, 0, 0 ); commandBuffer->endRenderPass(); commandBuffer->end(); - vk::su::submitAndWait(device, graphicsQueue, commandBuffer); + vk::su::submitAndWait( device, graphicsQueue, commandBuffer ); - presentQueue.presentKHR(vk::PresentInfoKHR(0, nullptr, 1, &swapChainData.swapChain.get(), ¤tBuffer.value)); - std::this_thread::sleep_for(std::chrono::milliseconds(1000)); + presentQueue.presentKHR( + vk::PresentInfoKHR( 0, nullptr, 1, &swapChainData.swapChain.get(), ¤tBuffer.value ) ); + std::this_thread::sleep_for( std::chrono::milliseconds( 1000 ) ); device->waitIdle(); } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (std::runtime_error& err) + catch ( std::runtime_error & err ) { std::cout << "std::runtime_error: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/InitTexture/InitTexture.cpp b/samples/InitTexture/InitTexture.cpp index fa6f4a1..c6f52f7 100644 --- a/samples/InitTexture/InitTexture.cpp +++ b/samples/InitTexture/InitTexture.cpp @@ -19,141 +19,203 @@ #include "../utils/math.hpp" #include "../utils/shaders.hpp" #include "../utils/utils.hpp" -#include "vulkan/vulkan.hpp" #include "SPIRV/GlslangToSpv.h" +#include "vulkan/vulkan.hpp" + #include -static char const* AppName = "InitTexture"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "InitTexture"; +static char const * EngineName = "Vulkan.hpp"; -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { - vk::UniqueInstance instance = vk::su::createInstance(AppName, EngineName, {}, vk::su::getInstanceExtensions()); -#if !defined(NDEBUG) - vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger(instance); + vk::UniqueInstance instance = vk::su::createInstance( AppName, EngineName, {}, vk::su::getInstanceExtensions() ); +#if !defined( NDEBUG ) + vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger( instance ); #endif vk::PhysicalDevice physicalDevice = instance->enumeratePhysicalDevices().front(); - vk::su::SurfaceData surfaceData(instance, AppName, vk::Extent2D(50, 50)); + vk::su::SurfaceData surfaceData( instance, AppName, vk::Extent2D( 50, 50 ) ); - std::pair graphicsAndPresentQueueFamilyIndex = vk::su::findGraphicsAndPresentQueueFamilyIndex(physicalDevice, *surfaceData.surface); - vk::UniqueDevice device = vk::su::createDevice(physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions()); + std::pair graphicsAndPresentQueueFamilyIndex = + vk::su::findGraphicsAndPresentQueueFamilyIndex( physicalDevice, *surfaceData.surface ); + vk::UniqueDevice device = + vk::su::createDevice( physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions() ); - vk::UniqueCommandPool commandPool = vk::su::createCommandPool(device, graphicsAndPresentQueueFamilyIndex.first); - vk::UniqueCommandBuffer commandBuffer = std::move(device->allocateCommandBuffersUnique(vk::CommandBufferAllocateInfo(commandPool.get(), vk::CommandBufferLevel::ePrimary, 1)).front()); + vk::UniqueCommandPool commandPool = vk::su::createCommandPool( device, graphicsAndPresentQueueFamilyIndex.first ); + vk::UniqueCommandBuffer commandBuffer = std::move( device + ->allocateCommandBuffersUnique( vk::CommandBufferAllocateInfo( + commandPool.get(), vk::CommandBufferLevel::ePrimary, 1 ) ) + .front() ); - vk::Queue graphicsQueue = device->getQueue(graphicsAndPresentQueueFamilyIndex.first, 0); - vk::Queue presentQueue = device->getQueue(graphicsAndPresentQueueFamilyIndex.second, 0); + vk::Queue graphicsQueue = device->getQueue( graphicsAndPresentQueueFamilyIndex.first, 0 ); + vk::Queue presentQueue = device->getQueue( graphicsAndPresentQueueFamilyIndex.second, 0 ); /* VULKAN_KEY_START */ - vk::Format format = vk::Format::eR8G8B8A8Unorm; - vk::FormatProperties formatProperties = physicalDevice.getFormatProperties(format); + vk::Format format = vk::Format::eR8G8B8A8Unorm; + vk::FormatProperties formatProperties = physicalDevice.getFormatProperties( format ); // See if we can use a linear tiled image for a texture, if not, we will need a staging buffer for the texture data - bool needsStaging = !(formatProperties.linearTilingFeatures & vk::FormatFeatureFlagBits::eSampledImage); + bool needsStaging = !( formatProperties.linearTilingFeatures & vk::FormatFeatureFlagBits::eSampledImage ); - vk::UniqueImage image = device->createImageUnique(vk::ImageCreateInfo(vk::ImageCreateFlags(), vk::ImageType::e2D, format, vk::Extent3D(surfaceData.extent, 1), 1, 1, - vk::SampleCountFlagBits::e1, needsStaging ? vk::ImageTiling::eOptimal : vk::ImageTiling::eLinear, - vk::ImageUsageFlagBits::eSampled | (needsStaging ? vk::ImageUsageFlagBits::eTransferDst : vk::ImageUsageFlagBits()), - vk::SharingMode::eExclusive, 0, nullptr, needsStaging ? vk::ImageLayout::eUndefined : vk::ImageLayout::ePreinitialized)); + vk::UniqueImage image = device->createImageUnique( + vk::ImageCreateInfo( vk::ImageCreateFlags(), + vk::ImageType::e2D, + format, + vk::Extent3D( surfaceData.extent, 1 ), + 1, + 1, + vk::SampleCountFlagBits::e1, + needsStaging ? vk::ImageTiling::eOptimal : vk::ImageTiling::eLinear, + vk::ImageUsageFlagBits::eSampled | + ( needsStaging ? vk::ImageUsageFlagBits::eTransferDst : vk::ImageUsageFlagBits() ), + vk::SharingMode::eExclusive, + 0, + nullptr, + needsStaging ? vk::ImageLayout::eUndefined : vk::ImageLayout::ePreinitialized ) ); - vk::MemoryRequirements memoryRequirements = device->getImageMemoryRequirements(image.get()); - uint32_t memoryTypeIndex = vk::su::findMemoryType(physicalDevice.getMemoryProperties(), memoryRequirements.memoryTypeBits, - needsStaging ? vk::MemoryPropertyFlags() : (vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent)); + vk::MemoryRequirements memoryRequirements = device->getImageMemoryRequirements( image.get() ); + uint32_t memoryTypeIndex = vk::su::findMemoryType( + physicalDevice.getMemoryProperties(), + memoryRequirements.memoryTypeBits, + needsStaging ? vk::MemoryPropertyFlags() + : ( vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent ) ); // allocate memory - vk::UniqueDeviceMemory imageMemory = device->allocateMemoryUnique(vk::MemoryAllocateInfo(memoryRequirements.size, memoryTypeIndex)); + vk::UniqueDeviceMemory imageMemory = + device->allocateMemoryUnique( vk::MemoryAllocateInfo( memoryRequirements.size, memoryTypeIndex ) ); // bind memory - device->bindImageMemory(image.get(), imageMemory.get(), 0); + device->bindImageMemory( image.get(), imageMemory.get(), 0 ); - vk::UniqueBuffer textureBuffer; + vk::UniqueBuffer textureBuffer; vk::UniqueDeviceMemory textureBufferMemory; - if (needsStaging) + if ( needsStaging ) { // Need a staging buffer to map and copy texture into - textureBuffer = device->createBufferUnique(vk::BufferCreateInfo(vk::BufferCreateFlags(), surfaceData.extent.width * surfaceData.extent.height * 4, vk::BufferUsageFlagBits::eTransferSrc)); + textureBuffer = + device->createBufferUnique( vk::BufferCreateInfo( vk::BufferCreateFlags(), + surfaceData.extent.width * surfaceData.extent.height * 4, + vk::BufferUsageFlagBits::eTransferSrc ) ); - memoryRequirements = device->getBufferMemoryRequirements(textureBuffer.get()); - memoryTypeIndex = vk::su::findMemoryType(physicalDevice.getMemoryProperties(), memoryRequirements.memoryTypeBits, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent); + memoryRequirements = device->getBufferMemoryRequirements( textureBuffer.get() ); + memoryTypeIndex = + vk::su::findMemoryType( physicalDevice.getMemoryProperties(), + memoryRequirements.memoryTypeBits, + vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent ); // allocate memory - textureBufferMemory = device->allocateMemoryUnique(vk::MemoryAllocateInfo(memoryRequirements.size, memoryTypeIndex)); + textureBufferMemory = + device->allocateMemoryUnique( vk::MemoryAllocateInfo( memoryRequirements.size, memoryTypeIndex ) ); // bind memory - device->bindBufferMemory(textureBuffer.get(), textureBufferMemory.get(), 0); + device->bindBufferMemory( textureBuffer.get(), textureBufferMemory.get(), 0 ); } else { - vk::SubresourceLayout subresourceLayout = device->getImageSubresourceLayout(image.get(), vk::ImageSubresource(vk::ImageAspectFlagBits::eColor)); + vk::SubresourceLayout subresourceLayout = + device->getImageSubresourceLayout( image.get(), vk::ImageSubresource( vk::ImageAspectFlagBits::eColor ) ); } - void* data = device->mapMemory(needsStaging ? textureBufferMemory.get() : imageMemory.get(), 0, memoryRequirements.size, vk::MemoryMapFlags()); + void * data = device->mapMemory( + needsStaging ? textureBufferMemory.get() : imageMemory.get(), 0, memoryRequirements.size, vk::MemoryMapFlags() ); // Checkerboard of 16x16 pixel squares - unsigned char *pImageMemory = static_cast(data); - for (uint32_t row = 0; row < surfaceData.extent.height; row++) + unsigned char * pImageMemory = static_cast( data ); + for ( uint32_t row = 0; row < surfaceData.extent.height; row++ ) { - for (uint32_t col = 0; col < surfaceData.extent.width; col++) + for ( uint32_t col = 0; col < surfaceData.extent.width; col++ ) { - unsigned char rgb = (((row & 0x10) == 0) ^ ((col & 0x10) == 0)) * 255; - pImageMemory[0] = rgb; - pImageMemory[1] = rgb; - pImageMemory[2] = rgb; - pImageMemory[3] = 255; + unsigned char rgb = ( ( ( row & 0x10 ) == 0 ) ^ ( ( col & 0x10 ) == 0 ) ) * 255; + pImageMemory[0] = rgb; + pImageMemory[1] = rgb; + pImageMemory[2] = rgb; + pImageMemory[3] = 255; pImageMemory += 4; } } - device->unmapMemory(needsStaging ? textureBufferMemory.get() : imageMemory.get()); + device->unmapMemory( needsStaging ? textureBufferMemory.get() : imageMemory.get() ); - commandBuffer->begin(vk::CommandBufferBeginInfo()); - if (needsStaging) + commandBuffer->begin( vk::CommandBufferBeginInfo() ); + if ( needsStaging ) { // Since we're going to blit to the texture image, set its layout to eTransferDstOptimal - vk::su::setImageLayout(commandBuffer, image.get(), format, vk::ImageLayout::eUndefined, vk::ImageLayout::eTransferDstOptimal); - vk::BufferImageCopy copyRegion(0, surfaceData.extent.width, surfaceData.extent.height, vk::ImageSubresourceLayers(vk::ImageAspectFlagBits::eColor, 0, 0, 1), vk::Offset3D(0, 0, 0), vk::Extent3D(surfaceData.extent, 1)); - commandBuffer->copyBufferToImage(textureBuffer.get(), image.get(), vk::ImageLayout::eTransferDstOptimal, copyRegion); + vk::su::setImageLayout( + commandBuffer, image.get(), format, vk::ImageLayout::eUndefined, vk::ImageLayout::eTransferDstOptimal ); + vk::BufferImageCopy copyRegion( 0, + surfaceData.extent.width, + surfaceData.extent.height, + vk::ImageSubresourceLayers( vk::ImageAspectFlagBits::eColor, 0, 0, 1 ), + vk::Offset3D( 0, 0, 0 ), + vk::Extent3D( surfaceData.extent, 1 ) ); + commandBuffer->copyBufferToImage( + textureBuffer.get(), image.get(), vk::ImageLayout::eTransferDstOptimal, copyRegion ); // Set the layout for the texture image from eTransferDstOptimal to SHADER_READ_ONLY - vk::su::setImageLayout(commandBuffer, image.get(), format, vk::ImageLayout::eTransferDstOptimal, vk::ImageLayout::eShaderReadOnlyOptimal); + vk::su::setImageLayout( commandBuffer, + image.get(), + format, + vk::ImageLayout::eTransferDstOptimal, + vk::ImageLayout::eShaderReadOnlyOptimal ); } else { // If we can use the linear tiled image as a texture, just do it - vk::su::setImageLayout(commandBuffer, image.get(), format, vk::ImageLayout::ePreinitialized, vk::ImageLayout::eShaderReadOnlyOptimal); + vk::su::setImageLayout( + commandBuffer, image.get(), format, vk::ImageLayout::ePreinitialized, vk::ImageLayout::eShaderReadOnlyOptimal ); } commandBuffer->end(); - vk::su::submitAndWait(device, graphicsQueue, commandBuffer); + vk::su::submitAndWait( device, graphicsQueue, commandBuffer ); - vk::UniqueSampler sampler = device->createSamplerUnique(vk::SamplerCreateInfo(vk::SamplerCreateFlags(), vk::Filter::eNearest, vk::Filter::eNearest, vk::SamplerMipmapMode::eNearest, - vk::SamplerAddressMode::eClampToEdge, vk::SamplerAddressMode::eClampToEdge, vk::SamplerAddressMode::eClampToEdge, 0.0f, false, 1.0f, false, vk::CompareOp::eNever, 0.0f, 0.0f, - vk::BorderColor::eFloatOpaqueWhite)); + vk::UniqueSampler sampler = + device->createSamplerUnique( vk::SamplerCreateInfo( vk::SamplerCreateFlags(), + vk::Filter::eNearest, + vk::Filter::eNearest, + vk::SamplerMipmapMode::eNearest, + vk::SamplerAddressMode::eClampToEdge, + vk::SamplerAddressMode::eClampToEdge, + vk::SamplerAddressMode::eClampToEdge, + 0.0f, + false, + 1.0f, + false, + vk::CompareOp::eNever, + 0.0f, + 0.0f, + vk::BorderColor::eFloatOpaqueWhite ) ); - vk::ComponentMapping componentMapping(vk::ComponentSwizzle::eR, vk::ComponentSwizzle::eG, vk::ComponentSwizzle::eB, vk::ComponentSwizzle::eA); - vk::ImageViewCreateInfo imageViewCreateInfo(vk::ImageViewCreateFlags(), image.get(), vk::ImageViewType::e2D, format, componentMapping, vk::ImageSubresourceRange(vk::ImageAspectFlagBits::eColor, 0, 1, 0, 1)); - vk::UniqueImageView imageView = device->createImageViewUnique(imageViewCreateInfo); + vk::ComponentMapping componentMapping( + vk::ComponentSwizzle::eR, vk::ComponentSwizzle::eG, vk::ComponentSwizzle::eB, vk::ComponentSwizzle::eA ); + vk::ImageViewCreateInfo imageViewCreateInfo( + vk::ImageViewCreateFlags(), + image.get(), + vk::ImageViewType::e2D, + format, + componentMapping, + vk::ImageSubresourceRange( vk::ImageAspectFlagBits::eColor, 0, 1, 0, 1 ) ); + vk::UniqueImageView imageView = device->createImageViewUnique( imageViewCreateInfo ); /* VULKAN_KEY_END */ } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (std::runtime_error& err) + catch ( std::runtime_error & err ) { std::cout << "std::runtime_error: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/InputAttachment/InputAttachment.cpp b/samples/InputAttachment/InputAttachment.cpp index 99eaf9f..eed246e 100644 --- a/samples/InputAttachment/InputAttachment.cpp +++ b/samples/InputAttachment/InputAttachment.cpp @@ -19,13 +19,14 @@ #include "../utils/math.hpp" #include "../utils/shaders.hpp" #include "../utils/utils.hpp" -#include "vulkan/vulkan.hpp" #include "SPIRV/GlslangToSpv.h" +#include "vulkan/vulkan.hpp" + #include #include -static char const* AppName = "InputAttachment"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "InputAttachment"; +static char const * EngineName = "Vulkan.hpp"; static std::string vertexShaderText = R"( #version 450 @@ -43,7 +44,7 @@ void main() )"; // Use subpassLoad to read from input attachment -static const char *fragmentShaderText = R"( +static const char * fragmentShaderText = R"( #version 450 layout (input_attachment_index = 0, set = 0, binding = 0) uniform subpassInput inputAttachment; @@ -55,147 +56,239 @@ void main() outColor = subpassLoad(inputAttachment); } )"; -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { - vk::UniqueInstance instance = vk::su::createInstance(AppName, EngineName, {}, vk::su::getInstanceExtensions()); -#if !defined(NDEBUG) - vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger(instance); + vk::UniqueInstance instance = vk::su::createInstance( AppName, EngineName, {}, vk::su::getInstanceExtensions() ); +#if !defined( NDEBUG ) + vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger( instance ); #endif vk::PhysicalDevice physicalDevice = instance->enumeratePhysicalDevices().front(); - vk::FormatProperties formatProperties = physicalDevice.getFormatProperties(vk::Format::eR8G8B8A8Unorm); - if (!(formatProperties.optimalTilingFeatures & vk::FormatFeatureFlagBits::eColorAttachment)) + vk::FormatProperties formatProperties = physicalDevice.getFormatProperties( vk::Format::eR8G8B8A8Unorm ); + if ( !( formatProperties.optimalTilingFeatures & vk::FormatFeatureFlagBits::eColorAttachment ) ) { std::cout << "vk::Format::eR8G8B8A8Unorm format unsupported for input attachment\n"; - exit(-1); + exit( -1 ); } - vk::su::SurfaceData surfaceData(instance, AppName, vk::Extent2D(500, 500)); + vk::su::SurfaceData surfaceData( instance, AppName, vk::Extent2D( 500, 500 ) ); - std::pair graphicsAndPresentQueueFamilyIndex = vk::su::findGraphicsAndPresentQueueFamilyIndex(physicalDevice, *surfaceData.surface); - vk::UniqueDevice device = vk::su::createDevice(physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions()); + std::pair graphicsAndPresentQueueFamilyIndex = + vk::su::findGraphicsAndPresentQueueFamilyIndex( physicalDevice, *surfaceData.surface ); + vk::UniqueDevice device = + vk::su::createDevice( physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions() ); - vk::UniqueCommandPool commandPool = vk::su::createCommandPool(device, graphicsAndPresentQueueFamilyIndex.first); - vk::UniqueCommandBuffer commandBuffer = std::move(device->allocateCommandBuffersUnique(vk::CommandBufferAllocateInfo(commandPool.get(), vk::CommandBufferLevel::ePrimary, 1)).front()); + vk::UniqueCommandPool commandPool = vk::su::createCommandPool( device, graphicsAndPresentQueueFamilyIndex.first ); + vk::UniqueCommandBuffer commandBuffer = std::move( device + ->allocateCommandBuffersUnique( vk::CommandBufferAllocateInfo( + commandPool.get(), vk::CommandBufferLevel::ePrimary, 1 ) ) + .front() ); - vk::Queue graphicsQueue = device->getQueue(graphicsAndPresentQueueFamilyIndex.first, 0); - vk::Queue presentQueue = device->getQueue(graphicsAndPresentQueueFamilyIndex.second, 0); + vk::Queue graphicsQueue = device->getQueue( graphicsAndPresentQueueFamilyIndex.first, 0 ); + vk::Queue presentQueue = device->getQueue( graphicsAndPresentQueueFamilyIndex.second, 0 ); - vk::su::SwapChainData swapChainData(physicalDevice, device, *surfaceData.surface, surfaceData.extent, vk::ImageUsageFlagBits::eColorAttachment | vk::ImageUsageFlagBits::eTransferSrc, - vk::UniqueSwapchainKHR(), graphicsAndPresentQueueFamilyIndex.first, graphicsAndPresentQueueFamilyIndex.second); + vk::su::SwapChainData swapChainData( physicalDevice, + device, + *surfaceData.surface, + surfaceData.extent, + vk::ImageUsageFlagBits::eColorAttachment | + vk::ImageUsageFlagBits::eTransferSrc, + vk::UniqueSwapchainKHR(), + graphicsAndPresentQueueFamilyIndex.first, + graphicsAndPresentQueueFamilyIndex.second ); /* VULKAN_KEY_START */ - // Create a framebuffer with 2 attachments, one the color attachment the shaders render into, and the other an input attachment which - // will be cleared to yellow, and then used by the shaders to color the drawn triangle. Final result should be a yellow triangle + // Create a framebuffer with 2 attachments, one the color attachment the shaders render into, and the other an input + // attachment which will be cleared to yellow, and then used by the shaders to color the drawn triangle. Final + // result should be a yellow triangle // Create the image that will be used as the input attachment // The image for the color attachment is the presentable image already created as part of the SwapChainData - vk::UniqueImage inputImage = device->createImageUnique(vk::ImageCreateInfo(vk::ImageCreateFlags(), vk::ImageType::e2D, swapChainData.colorFormat, vk::Extent3D(surfaceData.extent, 1), 1, 1, - vk::SampleCountFlagBits::e1, vk::ImageTiling::eOptimal, vk::ImageUsageFlagBits::eInputAttachment | vk::ImageUsageFlagBits::eTransferDst)); + vk::UniqueImage inputImage = device->createImageUnique( + vk::ImageCreateInfo( vk::ImageCreateFlags(), + vk::ImageType::e2D, + swapChainData.colorFormat, + vk::Extent3D( surfaceData.extent, 1 ), + 1, + 1, + vk::SampleCountFlagBits::e1, + vk::ImageTiling::eOptimal, + vk::ImageUsageFlagBits::eInputAttachment | vk::ImageUsageFlagBits::eTransferDst ) ); - vk::MemoryRequirements memoryRequirements = device->getImageMemoryRequirements(inputImage.get()); - uint32_t memoryTypeIndex = vk::su::findMemoryType(physicalDevice.getMemoryProperties(), memoryRequirements.memoryTypeBits, vk::MemoryPropertyFlags()); - vk::UniqueDeviceMemory inputMemory = device->allocateMemoryUnique(vk::MemoryAllocateInfo(memoryRequirements.size, memoryTypeIndex)); - device->bindImageMemory(inputImage.get(), inputMemory.get(), 0); + vk::MemoryRequirements memoryRequirements = device->getImageMemoryRequirements( inputImage.get() ); + uint32_t memoryTypeIndex = vk::su::findMemoryType( + physicalDevice.getMemoryProperties(), memoryRequirements.memoryTypeBits, vk::MemoryPropertyFlags() ); + vk::UniqueDeviceMemory inputMemory = + device->allocateMemoryUnique( vk::MemoryAllocateInfo( memoryRequirements.size, memoryTypeIndex ) ); + device->bindImageMemory( inputImage.get(), inputMemory.get(), 0 ); // Set the image layout to TRANSFER_DST_OPTIMAL to be ready for clear - commandBuffer->begin(vk::CommandBufferBeginInfo()); - vk::su::setImageLayout(commandBuffer, inputImage.get(), swapChainData.colorFormat, vk::ImageLayout::eUndefined, vk::ImageLayout::eTransferDstOptimal); + commandBuffer->begin( vk::CommandBufferBeginInfo() ); + vk::su::setImageLayout( commandBuffer, + inputImage.get(), + swapChainData.colorFormat, + vk::ImageLayout::eUndefined, + vk::ImageLayout::eTransferDstOptimal ); - commandBuffer->clearColorImage(inputImage.get(), vk::ImageLayout::eTransferDstOptimal, vk::ClearColorValue(std::array({ {1.0f, 1.0f, 0.0f, 0.0f} })), - vk::ImageSubresourceRange(vk::ImageAspectFlagBits::eColor, 0, VK_REMAINING_MIP_LEVELS, 0, VK_REMAINING_ARRAY_LAYERS)); + commandBuffer->clearColorImage( + inputImage.get(), + vk::ImageLayout::eTransferDstOptimal, + vk::ClearColorValue( std::array( { { 1.0f, 1.0f, 0.0f, 0.0f } } ) ), + vk::ImageSubresourceRange( + vk::ImageAspectFlagBits::eColor, 0, VK_REMAINING_MIP_LEVELS, 0, VK_REMAINING_ARRAY_LAYERS ) ); // Set the image layout to SHADER_READONLY_OPTIMAL for use by the shaders - vk::su::setImageLayout(commandBuffer, inputImage.get(), swapChainData.colorFormat, vk::ImageLayout::eTransferDstOptimal, vk::ImageLayout::eShaderReadOnlyOptimal); + vk::su::setImageLayout( commandBuffer, + inputImage.get(), + swapChainData.colorFormat, + vk::ImageLayout::eTransferDstOptimal, + vk::ImageLayout::eShaderReadOnlyOptimal ); - vk::ComponentMapping componentMapping(vk::ComponentSwizzle::eR, vk::ComponentSwizzle::eG, vk::ComponentSwizzle::eB, vk::ComponentSwizzle::eA); - vk::ImageViewCreateInfo imageViewCreateInfo(vk::ImageViewCreateFlags(), inputImage.get(), vk::ImageViewType::e2D, swapChainData.colorFormat, componentMapping, vk::ImageSubresourceRange(vk::ImageAspectFlagBits::eColor, 0, 1, 0, 1)); - vk::UniqueImageView inputAttachmentView = device->createImageViewUnique(imageViewCreateInfo); + vk::ComponentMapping componentMapping( + vk::ComponentSwizzle::eR, vk::ComponentSwizzle::eG, vk::ComponentSwizzle::eB, vk::ComponentSwizzle::eA ); + vk::ImageViewCreateInfo imageViewCreateInfo( + vk::ImageViewCreateFlags(), + inputImage.get(), + vk::ImageViewType::e2D, + swapChainData.colorFormat, + componentMapping, + vk::ImageSubresourceRange( vk::ImageAspectFlagBits::eColor, 0, 1, 0, 1 ) ); + vk::UniqueImageView inputAttachmentView = device->createImageViewUnique( imageViewCreateInfo ); - vk::DescriptorSetLayoutBinding layoutBinding(0, vk::DescriptorType::eInputAttachment, 1, vk::ShaderStageFlagBits::eFragment); - vk::UniqueDescriptorSetLayout descriptorSetLayout = device->createDescriptorSetLayoutUnique(vk::DescriptorSetLayoutCreateInfo(vk::DescriptorSetLayoutCreateFlags(), 1, &layoutBinding)); + vk::DescriptorSetLayoutBinding layoutBinding( + 0, vk::DescriptorType::eInputAttachment, 1, vk::ShaderStageFlagBits::eFragment ); + vk::UniqueDescriptorSetLayout descriptorSetLayout = device->createDescriptorSetLayoutUnique( + vk::DescriptorSetLayoutCreateInfo( vk::DescriptorSetLayoutCreateFlags(), 1, &layoutBinding ) ); - vk::UniquePipelineLayout pipelineLayout = device->createPipelineLayoutUnique(vk::PipelineLayoutCreateInfo(vk::PipelineLayoutCreateFlags(), 1, &descriptorSetLayout.get())); + vk::UniquePipelineLayout pipelineLayout = device->createPipelineLayoutUnique( + vk::PipelineLayoutCreateInfo( vk::PipelineLayoutCreateFlags(), 1, &descriptorSetLayout.get() ) ); - vk::AttachmentDescription attachments[2] = - { - // First attachment is the color attachment - clear at the beginning of the renderpass and transition layout to PRESENT_SRC_KHR at the end of renderpass - vk::AttachmentDescription(vk::AttachmentDescriptionFlags(), swapChainData.colorFormat, vk::SampleCountFlagBits::e1, vk::AttachmentLoadOp::eClear, vk::AttachmentStoreOp::eStore, - vk::AttachmentLoadOp::eDontCare, vk::AttachmentStoreOp::eDontCare, vk::ImageLayout::eUndefined, vk::ImageLayout::ePresentSrcKHR), + vk::AttachmentDescription attachments[2] = { + // First attachment is the color attachment - clear at the beginning of the renderpass and transition layout to + // PRESENT_SRC_KHR at the end of renderpass + vk::AttachmentDescription( vk::AttachmentDescriptionFlags(), + swapChainData.colorFormat, + vk::SampleCountFlagBits::e1, + vk::AttachmentLoadOp::eClear, + vk::AttachmentStoreOp::eStore, + vk::AttachmentLoadOp::eDontCare, + vk::AttachmentStoreOp::eDontCare, + vk::ImageLayout::eUndefined, + vk::ImageLayout::ePresentSrcKHR ), // Second attachment is input attachment. Once cleared it should have width*height yellow pixels. - // Doing a subpassLoad in the fragment shader should give the shader the color at the fragments x,y location from the input attachment - vk::AttachmentDescription(vk::AttachmentDescriptionFlags(), swapChainData.colorFormat, vk::SampleCountFlagBits::e1, vk::AttachmentLoadOp::eLoad, vk::AttachmentStoreOp::eDontCare, - vk::AttachmentLoadOp::eDontCare, vk::AttachmentStoreOp::eDontCare, vk::ImageLayout::eShaderReadOnlyOptimal, vk::ImageLayout::eShaderReadOnlyOptimal) + // Doing a subpassLoad in the fragment shader should give the shader the color at the fragments x,y location from + // the input attachment + vk::AttachmentDescription( vk::AttachmentDescriptionFlags(), + swapChainData.colorFormat, + vk::SampleCountFlagBits::e1, + vk::AttachmentLoadOp::eLoad, + vk::AttachmentStoreOp::eDontCare, + vk::AttachmentLoadOp::eDontCare, + vk::AttachmentStoreOp::eDontCare, + vk::ImageLayout::eShaderReadOnlyOptimal, + vk::ImageLayout::eShaderReadOnlyOptimal ) }; - vk::AttachmentReference colorReference(0, vk::ImageLayout::eColorAttachmentOptimal); - vk::AttachmentReference inputReference(1, vk::ImageLayout::eShaderReadOnlyOptimal); - vk::SubpassDescription subPass(vk::SubpassDescriptionFlags(), vk::PipelineBindPoint::eGraphics, 1, &inputReference, 1, &colorReference); - vk::UniqueRenderPass renderPass = device->createRenderPassUnique(vk::RenderPassCreateInfo(vk::RenderPassCreateFlags(), 2, attachments, 1, &subPass)); + vk::AttachmentReference colorReference( 0, vk::ImageLayout::eColorAttachmentOptimal ); + vk::AttachmentReference inputReference( 1, vk::ImageLayout::eShaderReadOnlyOptimal ); + vk::SubpassDescription subPass( + vk::SubpassDescriptionFlags(), vk::PipelineBindPoint::eGraphics, 1, &inputReference, 1, &colorReference ); + vk::UniqueRenderPass renderPass = device->createRenderPassUnique( + vk::RenderPassCreateInfo( vk::RenderPassCreateFlags(), 2, attachments, 1, &subPass ) ); glslang::InitializeProcess(); - vk::UniqueShaderModule vertexShaderModule = vk::su::createShaderModule(device, vk::ShaderStageFlagBits::eVertex, vertexShaderText); - vk::UniqueShaderModule fragmentShaderModule = vk::su::createShaderModule(device, vk::ShaderStageFlagBits::eFragment, fragmentShaderText); + vk::UniqueShaderModule vertexShaderModule = + vk::su::createShaderModule( device, vk::ShaderStageFlagBits::eVertex, vertexShaderText ); + vk::UniqueShaderModule fragmentShaderModule = + vk::su::createShaderModule( device, vk::ShaderStageFlagBits::eFragment, fragmentShaderText ); glslang::FinalizeProcess(); - std::vector framebuffers = vk::su::createFramebuffers(device, renderPass, swapChainData.imageViews, inputAttachmentView, surfaceData.extent); + std::vector framebuffers = vk::su::createFramebuffers( + device, renderPass, swapChainData.imageViews, inputAttachmentView, surfaceData.extent ); - vk::DescriptorPoolSize poolSize(vk::DescriptorType::eInputAttachment, 1); - vk::UniqueDescriptorPool descriptorPool = device->createDescriptorPoolUnique(vk::DescriptorPoolCreateInfo(vk::DescriptorPoolCreateFlagBits::eFreeDescriptorSet, 1, 1, &poolSize)); + vk::DescriptorPoolSize poolSize( vk::DescriptorType::eInputAttachment, 1 ); + vk::UniqueDescriptorPool descriptorPool = device->createDescriptorPoolUnique( + vk::DescriptorPoolCreateInfo( vk::DescriptorPoolCreateFlagBits::eFreeDescriptorSet, 1, 1, &poolSize ) ); - vk::UniqueDescriptorSet descriptorSet = std::move(device->allocateDescriptorSetsUnique(vk::DescriptorSetAllocateInfo(*descriptorPool, 1, &*descriptorSetLayout)).front()); + vk::UniqueDescriptorSet descriptorSet = std::move( + device->allocateDescriptorSetsUnique( vk::DescriptorSetAllocateInfo( *descriptorPool, 1, &*descriptorSetLayout ) ) + .front() ); - vk::DescriptorImageInfo inputImageInfo(nullptr, inputAttachmentView.get(), vk::ImageLayout::eShaderReadOnlyOptimal); - vk::WriteDescriptorSet writeDescriptorSet(descriptorSet.get(), 0, 0, 1, vk::DescriptorType::eInputAttachment, &inputImageInfo); - device->updateDescriptorSets(vk::ArrayProxy(1, &writeDescriptorSet), nullptr); + vk::DescriptorImageInfo inputImageInfo( + nullptr, inputAttachmentView.get(), vk::ImageLayout::eShaderReadOnlyOptimal ); + vk::WriteDescriptorSet writeDescriptorSet( + descriptorSet.get(), 0, 0, 1, vk::DescriptorType::eInputAttachment, &inputImageInfo ); + device->updateDescriptorSets( vk::ArrayProxy( 1, &writeDescriptorSet ), nullptr ); - vk::UniquePipelineCache pipelineCache = device->createPipelineCacheUnique(vk::PipelineCacheCreateInfo()); - vk::UniquePipeline graphicsPipeline = vk::su::createGraphicsPipeline(device, pipelineCache, std::make_pair(*vertexShaderModule, nullptr), std::make_pair(*fragmentShaderModule, nullptr), 0, {}, - vk::FrontFace::eClockwise, false, pipelineLayout, renderPass); + vk::UniquePipelineCache pipelineCache = device->createPipelineCacheUnique( vk::PipelineCacheCreateInfo() ); + vk::UniquePipeline graphicsPipeline = + vk::su::createGraphicsPipeline( device, + pipelineCache, + std::make_pair( *vertexShaderModule, nullptr ), + std::make_pair( *fragmentShaderModule, nullptr ), + 0, + {}, + vk::FrontFace::eClockwise, + false, + pipelineLayout, + renderPass ); - vk::UniqueSemaphore imageAcquiredSemaphore = device->createSemaphoreUnique(vk::SemaphoreCreateInfo()); + vk::UniqueSemaphore imageAcquiredSemaphore = device->createSemaphoreUnique( vk::SemaphoreCreateInfo() ); - vk::ResultValue result = device->acquireNextImage2KHR(vk::AcquireNextImageInfoKHR(swapChainData.swapChain.get(), UINT64_MAX, imageAcquiredSemaphore.get(), {}, 1)); - assert(result.result == vk::Result::eSuccess); + vk::ResultValue result = device->acquireNextImage2KHR( + vk::AcquireNextImageInfoKHR( swapChainData.swapChain.get(), UINT64_MAX, imageAcquiredSemaphore.get(), {}, 1 ) ); + assert( result.result == vk::Result::eSuccess ); uint32_t currentBuffer = result.value; vk::ClearValue clearValue; - clearValue.color = vk::ClearColorValue(std::array({ 0.2f, 0.2f, 0.2f, 0.2f })); - commandBuffer->beginRenderPass(vk::RenderPassBeginInfo(renderPass.get(), framebuffers[currentBuffer].get(), vk::Rect2D(vk::Offset2D(0, 0), surfaceData.extent), 1, &clearValue), vk::SubpassContents::eInline); - commandBuffer->bindPipeline(vk::PipelineBindPoint::eGraphics, graphicsPipeline.get()); - commandBuffer->bindDescriptorSets(vk::PipelineBindPoint::eGraphics, pipelineLayout.get(), 0, descriptorSet.get(), nullptr); + clearValue.color = vk::ClearColorValue( std::array( { 0.2f, 0.2f, 0.2f, 0.2f } ) ); + commandBuffer->beginRenderPass( vk::RenderPassBeginInfo( renderPass.get(), + framebuffers[currentBuffer].get(), + vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ), + 1, + &clearValue ), + vk::SubpassContents::eInline ); + commandBuffer->bindPipeline( vk::PipelineBindPoint::eGraphics, graphicsPipeline.get() ); + commandBuffer->bindDescriptorSets( + vk::PipelineBindPoint::eGraphics, pipelineLayout.get(), 0, descriptorSet.get(), nullptr ); - commandBuffer->setViewport(0, vk::Viewport(0.0f, 0.0f, static_cast(surfaceData.extent.width), static_cast(surfaceData.extent.height), 0.0f, 1.0f)); - commandBuffer->setScissor(0, vk::Rect2D(vk::Offset2D(0, 0), surfaceData.extent)); + commandBuffer->setViewport( 0, + vk::Viewport( 0.0f, + 0.0f, + static_cast( surfaceData.extent.width ), + static_cast( surfaceData.extent.height ), + 0.0f, + 1.0f ) ); + commandBuffer->setScissor( 0, vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ) ); - commandBuffer->draw(3, 1, 0, 0); + commandBuffer->draw( 3, 1, 0, 0 ); commandBuffer->endRenderPass(); commandBuffer->end(); /* VULKAN_KEY_END */ - vk::su::submitAndWait(device, graphicsQueue, commandBuffer); + vk::su::submitAndWait( device, graphicsQueue, commandBuffer ); - presentQueue.presentKHR(vk::PresentInfoKHR(0, nullptr, 1, &swapChainData.swapChain.get(), ¤tBuffer)); - std::this_thread::sleep_for(std::chrono::milliseconds(1000)); + presentQueue.presentKHR( vk::PresentInfoKHR( 0, nullptr, 1, &swapChainData.swapChain.get(), ¤tBuffer ) ); + std::this_thread::sleep_for( std::chrono::milliseconds( 1000 ) ); } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (std::runtime_error& err) + catch ( std::runtime_error & err ) { std::cout << "std::runtime_error: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/InstanceExtensionProperties/InstanceExtensionProperties.cpp b/samples/InstanceExtensionProperties/InstanceExtensionProperties.cpp index 50c3f53..cbc54f4 100644 --- a/samples/InstanceExtensionProperties/InstanceExtensionProperties.cpp +++ b/samples/InstanceExtensionProperties/InstanceExtensionProperties.cpp @@ -13,16 +13,18 @@ // limitations under the License. // // VulkanHpp Samples : InstanceExtensionProperties -// Get global extension properties to know what extension are available to enable at CreateInstance time. +// Get global extension properties to know what extension are available to enable at CreateInstance +// time. #include "vulkan/vulkan.hpp" + #include #include -static char const* AppName = "InstanceExtensionProperties"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "InstanceExtensionProperties"; +static char const * EngineName = "Vulkan.hpp"; -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { @@ -32,10 +34,14 @@ int main(int /*argc*/, char ** /*argv*/) // sort the extensions alphabetically - std::sort(extensionProperties.begin(), extensionProperties.end(), [](vk::ExtensionProperties const& a, vk::ExtensionProperties const& b) { return strcmp(a.extensionName, b.extensionName) < 0; }); + std::sort( extensionProperties.begin(), + extensionProperties.end(), + []( vk::ExtensionProperties const & a, vk::ExtensionProperties const & b ) { + return strcmp( a.extensionName, b.extensionName ) < 0; + } ); std::cout << "Instance Extensions:" << std::endl; - for (auto const& ep : extensionProperties) + for ( auto const & ep : extensionProperties ) { std::cout << ep.extensionName << ":" << std::endl; std::cout << "\tVersion: " << ep.specVersion << std::endl; @@ -44,20 +50,20 @@ int main(int /*argc*/, char ** /*argv*/) /* VULKAN_KEY_END */ } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (std::runtime_error& err) + catch ( std::runtime_error & err ) { std::cout << "std::runtime_error: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/InstanceLayerExtensionProperties/InstanceLayerExtensionProperties.cpp b/samples/InstanceLayerExtensionProperties/InstanceLayerExtensionProperties.cpp index 5bde276..3e3bf6c 100644 --- a/samples/InstanceLayerExtensionProperties/InstanceLayerExtensionProperties.cpp +++ b/samples/InstanceLayerExtensionProperties/InstanceLayerExtensionProperties.cpp @@ -16,25 +16,26 @@ // Get list of global layers and their associated extensions, if any. #include "vulkan/vulkan.hpp" + #include #include #include -static char const* AppName = "InstanceLayerExtensionProperties"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "InstanceLayerExtensionProperties"; +static char const * EngineName = "Vulkan.hpp"; struct PropertyData { - PropertyData(vk::LayerProperties const& layerProperties_, std::vector const& extensionProperties_) - : layerProperties(layerProperties_) - , extensionProperties(extensionProperties_) + PropertyData( vk::LayerProperties const & layerProperties_, + std::vector const & extensionProperties_ ) + : layerProperties( layerProperties_ ), extensionProperties( extensionProperties_ ) {} - vk::LayerProperties layerProperties; - std::vector extensionProperties; + vk::LayerProperties layerProperties; + std::vector extensionProperties; }; -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { @@ -43,35 +44,36 @@ int main(int /*argc*/, char ** /*argv*/) /* VULKAN_KEY_START */ std::vector propertyData; - propertyData.reserve(layerProperties.size()); + propertyData.reserve( layerProperties.size() ); - for (auto const& layerProperty : layerProperties) + for ( auto const & layerProperty : layerProperties ) { - std::vector extensionProperties = vk::enumerateInstanceExtensionProperties(std::string(layerProperty.layerName)); - propertyData.push_back(PropertyData(layerProperty, extensionProperties)); + std::vector extensionProperties = + vk::enumerateInstanceExtensionProperties( std::string( layerProperty.layerName ) ); + propertyData.push_back( PropertyData( layerProperty, extensionProperties ) ); } /* VULKAN_KEY_END */ std::cout << "Instance Layers:" << std::endl; - if (propertyData.empty()) + if ( propertyData.empty() ) { std::cout << "Set the environment variable VK_LAYER_PATH to point to the location of your layers" << std::endl; } else { - for (auto const& pd : propertyData) + for ( auto const & pd : propertyData ) { std::cout << pd.layerProperties.layerName << std::endl; - if (pd.extensionProperties.empty()) + if ( pd.extensionProperties.empty() ) { std::cout << "Layer Extension: None"; } else { - for (auto it = pd.extensionProperties.begin(); it != pd.extensionProperties.end(); ++it) + for ( auto it = pd.extensionProperties.begin(); it != pd.extensionProperties.end(); ++it ) { - if (it != pd.extensionProperties.begin()) + if ( it != pd.extensionProperties.begin() ) { std::cout << ", "; } @@ -83,20 +85,20 @@ int main(int /*argc*/, char ** /*argv*/) } std::cout << std::endl; } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (std::runtime_error& err) + catch ( std::runtime_error & err ) { std::cout << "std::runtime_error: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/InstanceLayerProperties/InstanceLayerProperties.cpp b/samples/InstanceLayerProperties/InstanceLayerProperties.cpp index 1b061fb..faa7ab5 100644 --- a/samples/InstanceLayerProperties/InstanceLayerProperties.cpp +++ b/samples/InstanceLayerProperties/InstanceLayerProperties.cpp @@ -15,16 +15,16 @@ // VulkanHpp Samples : InstanceLayerProperties // Get global layer properties to know what layers are available to enable at CreateInstance time. - #include "vulkan/vulkan.hpp" + #include #include #include -static char const* AppName = "InstanceLayerProperties"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "InstanceLayerProperties"; +static char const * EngineName = "Vulkan.hpp"; -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { @@ -33,35 +33,36 @@ int main(int /*argc*/, char ** /*argv*/) std::vector layerProperties = vk::enumerateInstanceLayerProperties(); std::cout << "Instance Layers:" << std::endl; - if (layerProperties.empty()) + if ( layerProperties.empty() ) { std::cout << "Set the environment variable VK_LAYER_PATH to point to the location of your layers" << std::endl; } - for (auto const& lp : layerProperties) + for ( auto const & lp : layerProperties ) { std::cout << lp.layerName << ":" << std::endl; std::cout << "\tVersion: " << lp.implementationVersion << std::endl; - std::cout << "\tAPI Version: (" << (lp.specVersion >> 22) << "." << ((lp.specVersion >> 12) & 0x03FF) << "." << (lp.specVersion & 0xFFF) << ")" << std::endl; + std::cout << "\tAPI Version: (" << ( lp.specVersion >> 22 ) << "." << ( ( lp.specVersion >> 12 ) & 0x03FF ) << "." + << ( lp.specVersion & 0xFFF ) << ")" << std::endl; std::cout << "\tDescription: " << lp.description << std::endl; std::cout << std::endl; } /* VULKAN_KEY_END */ } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (std::runtime_error& err) + catch ( std::runtime_error & err ) { std::cout << "std::runtime_error: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/InstanceVersion/InstanceVersion.cpp b/samples/InstanceVersion/InstanceVersion.cpp index 5bb9b9a..7d9fcd1 100644 --- a/samples/InstanceVersion/InstanceVersion.cpp +++ b/samples/InstanceVersion/InstanceVersion.cpp @@ -16,42 +16,44 @@ // Get the version of instance-level functionality supported by the implementation. #include "vulkan/vulkan.hpp" + #include #include -static char const* AppName = "InstanceVersion"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "InstanceVersion"; +static char const * EngineName = "Vulkan.hpp"; -std::string decodeAPIVersion(uint32_t apiVersion) +std::string decodeAPIVersion( uint32_t apiVersion ) { - return std::to_string(VK_VERSION_MAJOR(apiVersion)) + "." + std::to_string(VK_VERSION_MINOR(apiVersion)) + "." + std::to_string(VK_VERSION_PATCH(apiVersion)); + return std::to_string( VK_VERSION_MAJOR( apiVersion ) ) + "." + std::to_string( VK_VERSION_MINOR( apiVersion ) ) + + "." + std::to_string( VK_VERSION_PATCH( apiVersion ) ); } -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { /* VULKAN_KEY_START */ uint32_t apiVersion = vk::enumerateInstanceVersion(); - std::cout << "APIVersion = " << decodeAPIVersion(apiVersion); + std::cout << "APIVersion = " << decodeAPIVersion( apiVersion ); /* VULKAN_KEY_END */ } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (std::runtime_error& err) + catch ( std::runtime_error & err ) { std::cout << "std::runtime_error: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/MultipleSets/MultipleSets.cpp b/samples/MultipleSets/MultipleSets.cpp index 15ce1d9..ece642f 100644 --- a/samples/MultipleSets/MultipleSets.cpp +++ b/samples/MultipleSets/MultipleSets.cpp @@ -19,13 +19,14 @@ #include "../utils/math.hpp" #include "../utils/shaders.hpp" #include "../utils/utils.hpp" -#include "vulkan/vulkan.hpp" #include "SPIRV/GlslangToSpv.h" +#include "vulkan/vulkan.hpp" + #include #include -static char const* AppName = "MultipleSets"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "MultipleSets"; +static char const * EngineName = "Vulkan.hpp"; const std::string vertexShaderText = R"( #version 400 @@ -77,145 +78,200 @@ void main() } )"; -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { - vk::UniqueInstance instance = vk::su::createInstance(AppName, EngineName, {}, vk::su::getInstanceExtensions()); -#if !defined(NDEBUG) - vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger(instance); + vk::UniqueInstance instance = vk::su::createInstance( AppName, EngineName, {}, vk::su::getInstanceExtensions() ); +#if !defined( NDEBUG ) + vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger( instance ); #endif vk::PhysicalDevice physicalDevice = instance->enumeratePhysicalDevices().front(); - vk::su::SurfaceData surfaceData(instance, AppName, vk::Extent2D(500, 500)); + vk::su::SurfaceData surfaceData( instance, AppName, vk::Extent2D( 500, 500 ) ); - std::pair graphicsAndPresentQueueFamilyIndex = vk::su::findGraphicsAndPresentQueueFamilyIndex(physicalDevice, *surfaceData.surface); - vk::UniqueDevice device = vk::su::createDevice(physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions()); + std::pair graphicsAndPresentQueueFamilyIndex = + vk::su::findGraphicsAndPresentQueueFamilyIndex( physicalDevice, *surfaceData.surface ); + vk::UniqueDevice device = + vk::su::createDevice( physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions() ); - vk::UniqueCommandPool commandPool = vk::su::createCommandPool(device, graphicsAndPresentQueueFamilyIndex.first); - vk::UniqueCommandBuffer commandBuffer = std::move(device->allocateCommandBuffersUnique(vk::CommandBufferAllocateInfo(commandPool.get(), vk::CommandBufferLevel::ePrimary, 1)).front()); + vk::UniqueCommandPool commandPool = vk::su::createCommandPool( device, graphicsAndPresentQueueFamilyIndex.first ); + vk::UniqueCommandBuffer commandBuffer = std::move( device + ->allocateCommandBuffersUnique( vk::CommandBufferAllocateInfo( + commandPool.get(), vk::CommandBufferLevel::ePrimary, 1 ) ) + .front() ); - vk::Queue graphicsQueue = device->getQueue(graphicsAndPresentQueueFamilyIndex.first, 0); - vk::Queue presentQueue = device->getQueue(graphicsAndPresentQueueFamilyIndex.second, 0); + vk::Queue graphicsQueue = device->getQueue( graphicsAndPresentQueueFamilyIndex.first, 0 ); + vk::Queue presentQueue = device->getQueue( graphicsAndPresentQueueFamilyIndex.second, 0 ); - vk::su::SwapChainData swapChainData(physicalDevice, device, *surfaceData.surface, surfaceData.extent, vk::ImageUsageFlagBits::eColorAttachment | vk::ImageUsageFlagBits::eTransferSrc, - vk::UniqueSwapchainKHR(), graphicsAndPresentQueueFamilyIndex.first, graphicsAndPresentQueueFamilyIndex.second); + vk::su::SwapChainData swapChainData( physicalDevice, + device, + *surfaceData.surface, + surfaceData.extent, + vk::ImageUsageFlagBits::eColorAttachment | + vk::ImageUsageFlagBits::eTransferSrc, + vk::UniqueSwapchainKHR(), + graphicsAndPresentQueueFamilyIndex.first, + graphicsAndPresentQueueFamilyIndex.second ); - vk::su::DepthBufferData depthBufferData(physicalDevice, device, vk::Format::eD16Unorm, surfaceData.extent); + vk::su::DepthBufferData depthBufferData( physicalDevice, device, vk::Format::eD16Unorm, surfaceData.extent ); - vk::su::TextureData textureData(physicalDevice, device); + vk::su::TextureData textureData( physicalDevice, device ); - commandBuffer->begin(vk::CommandBufferBeginInfo()); - textureData.setImage(device, commandBuffer, vk::su::MonochromeImageGenerator({ 118, 185, 0 })); + commandBuffer->begin( vk::CommandBufferBeginInfo() ); + textureData.setImage( device, commandBuffer, vk::su::MonochromeImageGenerator( { 118, 185, 0 } ) ); - vk::su::BufferData uniformBufferData(physicalDevice, device, sizeof(glm::mat4x4), vk::BufferUsageFlagBits::eUniformBuffer); - vk::su::copyToDevice(device, uniformBufferData.deviceMemory, vk::su::createModelViewProjectionClipMatrix(surfaceData.extent)); + vk::su::BufferData uniformBufferData( + physicalDevice, device, sizeof( glm::mat4x4 ), vk::BufferUsageFlagBits::eUniformBuffer ); + vk::su::copyToDevice( + device, uniformBufferData.deviceMemory, vk::su::createModelViewProjectionClipMatrix( surfaceData.extent ) ); - vk::UniqueRenderPass renderPass = vk::su::createRenderPass(device, vk::su::pickSurfaceFormat(physicalDevice.getSurfaceFormatsKHR(surfaceData.surface.get())).format, depthBufferData.format); + vk::UniqueRenderPass renderPass = vk::su::createRenderPass( + device, + vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( surfaceData.surface.get() ) ).format, + depthBufferData.format ); glslang::InitializeProcess(); - vk::UniqueShaderModule vertexShaderModule = vk::su::createShaderModule(device, vk::ShaderStageFlagBits::eVertex, vertexShaderText); - vk::UniqueShaderModule fragmentShaderModule = vk::su::createShaderModule(device, vk::ShaderStageFlagBits::eFragment, fragmentShaderText); + vk::UniqueShaderModule vertexShaderModule = + vk::su::createShaderModule( device, vk::ShaderStageFlagBits::eVertex, vertexShaderText ); + vk::UniqueShaderModule fragmentShaderModule = + vk::su::createShaderModule( device, vk::ShaderStageFlagBits::eFragment, fragmentShaderText ); glslang::FinalizeProcess(); - std::vector framebuffers = vk::su::createFramebuffers(device, renderPass, swapChainData.imageViews, depthBufferData.imageView, surfaceData.extent); + std::vector framebuffers = vk::su::createFramebuffers( + device, renderPass, swapChainData.imageViews, depthBufferData.imageView, surfaceData.extent ); - vk::su::BufferData vertexBufferData(physicalDevice, device, sizeof(texturedCubeData), vk::BufferUsageFlagBits::eVertexBuffer); - vk::su::copyToDevice(device, vertexBufferData.deviceMemory, texturedCubeData, sizeof(texturedCubeData) / sizeof(texturedCubeData[0])); + vk::su::BufferData vertexBufferData( + physicalDevice, device, sizeof( texturedCubeData ), vk::BufferUsageFlagBits::eVertexBuffer ); + vk::su::copyToDevice( device, + vertexBufferData.deviceMemory, + texturedCubeData, + sizeof( texturedCubeData ) / sizeof( texturedCubeData[0] ) ); /* VULKAN_KEY_START */ // Create first layout to contain uniform buffer data - vk::DescriptorSetLayoutBinding uniformBinding(0, vk::DescriptorType::eUniformBuffer, 1, vk::ShaderStageFlagBits::eVertex); - vk::UniqueDescriptorSetLayout uniformLayout = device->createDescriptorSetLayoutUnique(vk::DescriptorSetLayoutCreateInfo(vk::DescriptorSetLayoutCreateFlags(), 1, &uniformBinding)); + vk::DescriptorSetLayoutBinding uniformBinding( + 0, vk::DescriptorType::eUniformBuffer, 1, vk::ShaderStageFlagBits::eVertex ); + vk::UniqueDescriptorSetLayout uniformLayout = device->createDescriptorSetLayoutUnique( + vk::DescriptorSetLayoutCreateInfo( vk::DescriptorSetLayoutCreateFlags(), 1, &uniformBinding ) ); // Create second layout containing combined sampler/image data - vk::DescriptorSetLayoutBinding sampler2DBinding(0, vk::DescriptorType::eCombinedImageSampler, 1, vk::ShaderStageFlagBits::eVertex); - vk::UniqueDescriptorSetLayout samplerLayout = device->createDescriptorSetLayoutUnique(vk::DescriptorSetLayoutCreateInfo(vk::DescriptorSetLayoutCreateFlags(), 1, &sampler2DBinding)); + vk::DescriptorSetLayoutBinding sampler2DBinding( + 0, vk::DescriptorType::eCombinedImageSampler, 1, vk::ShaderStageFlagBits::eVertex ); + vk::UniqueDescriptorSetLayout samplerLayout = device->createDescriptorSetLayoutUnique( + vk::DescriptorSetLayoutCreateInfo( vk::DescriptorSetLayoutCreateFlags(), 1, &sampler2DBinding ) ); // Create pipeline layout with multiple descriptor sets std::array descriptorSetLayouts = { uniformLayout.get(), samplerLayout.get() }; - vk::UniquePipelineLayout pipelineLayout = device->createPipelineLayoutUnique(vk::PipelineLayoutCreateInfo(vk::PipelineLayoutCreateFlags(), 2, descriptorSetLayouts.data())); + vk::UniquePipelineLayout pipelineLayout = device->createPipelineLayoutUnique( + vk::PipelineLayoutCreateInfo( vk::PipelineLayoutCreateFlags(), 2, descriptorSetLayouts.data() ) ); // Create a single pool to contain data for our two descriptor sets - vk::DescriptorPoolSize poolSizes[2] = - { - vk::DescriptorPoolSize(vk::DescriptorType::eUniformBuffer, 1), - vk::DescriptorPoolSize(vk::DescriptorType::eCombinedImageSampler, 1) - }; - vk::UniqueDescriptorPool descriptorPool = device->createDescriptorPoolUnique(vk::DescriptorPoolCreateInfo(vk::DescriptorPoolCreateFlagBits::eFreeDescriptorSet, 2, 2, poolSizes)); + vk::DescriptorPoolSize poolSizes[2] = { vk::DescriptorPoolSize( vk::DescriptorType::eUniformBuffer, 1 ), + vk::DescriptorPoolSize( vk::DescriptorType::eCombinedImageSampler, 1 ) }; + vk::UniqueDescriptorPool descriptorPool = device->createDescriptorPoolUnique( + vk::DescriptorPoolCreateInfo( vk::DescriptorPoolCreateFlagBits::eFreeDescriptorSet, 2, 2, poolSizes ) ); // Populate descriptor sets - std::vector descriptorSets = device->allocateDescriptorSetsUnique(vk::DescriptorSetAllocateInfo(descriptorPool.get(), 2, descriptorSetLayouts.data())); + std::vector descriptorSets = device->allocateDescriptorSetsUnique( + vk::DescriptorSetAllocateInfo( descriptorPool.get(), 2, descriptorSetLayouts.data() ) ); // Populate with info about our uniform buffer - vk::DescriptorBufferInfo uniformBufferInfo(uniformBufferData.buffer.get(), 0, sizeof(glm::mat4x4)); - vk::DescriptorImageInfo textureImageInfo(textureData.textureSampler.get(), textureData.imageData->imageView.get(), vk::ImageLayout::eShaderReadOnlyOptimal); - std::array writeDescriptorSets = - { - vk::WriteDescriptorSet(descriptorSets[0].get(), 0, 0, 1, vk::DescriptorType::eUniformBuffer, nullptr, &uniformBufferInfo), - vk::WriteDescriptorSet(descriptorSets[1].get(), 0, 0, 1, vk::DescriptorType::eCombinedImageSampler, &textureImageInfo) + vk::DescriptorBufferInfo uniformBufferInfo( uniformBufferData.buffer.get(), 0, sizeof( glm::mat4x4 ) ); + vk::DescriptorImageInfo textureImageInfo( textureData.textureSampler.get(), + textureData.imageData->imageView.get(), + vk::ImageLayout::eShaderReadOnlyOptimal ); + std::array writeDescriptorSets = { + vk::WriteDescriptorSet( + descriptorSets[0].get(), 0, 0, 1, vk::DescriptorType::eUniformBuffer, nullptr, &uniformBufferInfo ), + vk::WriteDescriptorSet( + descriptorSets[1].get(), 0, 0, 1, vk::DescriptorType::eCombinedImageSampler, &textureImageInfo ) }; - device->updateDescriptorSets(writeDescriptorSets, nullptr); + device->updateDescriptorSets( writeDescriptorSets, nullptr ); /* VULKAN_KEY_END */ - vk::UniquePipelineCache pipelineCache = device->createPipelineCacheUnique(vk::PipelineCacheCreateInfo()); - vk::UniquePipeline graphicsPipeline = vk::su::createGraphicsPipeline(device, pipelineCache, std::make_pair(*vertexShaderModule, nullptr), std::make_pair(*fragmentShaderModule, nullptr), - sizeof(texturedCubeData[0]), { { vk::Format::eR32G32B32A32Sfloat, 0 }, { vk::Format::eR32G32Sfloat, 16 } }, - vk::FrontFace::eClockwise, true, pipelineLayout, renderPass); + vk::UniquePipelineCache pipelineCache = device->createPipelineCacheUnique( vk::PipelineCacheCreateInfo() ); + vk::UniquePipeline graphicsPipeline = + vk::su::createGraphicsPipeline( device, + pipelineCache, + std::make_pair( *vertexShaderModule, nullptr ), + std::make_pair( *fragmentShaderModule, nullptr ), + sizeof( texturedCubeData[0] ), + { { vk::Format::eR32G32B32A32Sfloat, 0 }, { vk::Format::eR32G32Sfloat, 16 } }, + vk::FrontFace::eClockwise, + true, + pipelineLayout, + renderPass ); // Get the index of the next available swapchain image: - vk::UniqueSemaphore imageAcquiredSemaphore = device->createSemaphoreUnique(vk::SemaphoreCreateInfo()); - vk::ResultValue currentBuffer = device->acquireNextImageKHR(swapChainData.swapChain.get(), vk::su::FenceTimeout, imageAcquiredSemaphore.get(), nullptr); - assert(currentBuffer.result == vk::Result::eSuccess); - assert(currentBuffer.value < framebuffers.size()); + vk::UniqueSemaphore imageAcquiredSemaphore = device->createSemaphoreUnique( vk::SemaphoreCreateInfo() ); + vk::ResultValue currentBuffer = device->acquireNextImageKHR( + swapChainData.swapChain.get(), vk::su::FenceTimeout, imageAcquiredSemaphore.get(), nullptr ); + assert( currentBuffer.result == vk::Result::eSuccess ); + assert( currentBuffer.value < framebuffers.size() ); vk::ClearValue clearValues[2]; - clearValues[0].color = vk::ClearColorValue(std::array({ 0.2f, 0.2f, 0.2f, 0.2f })); - clearValues[1].depthStencil = vk::ClearDepthStencilValue(1.0f, 0); - vk::RenderPassBeginInfo renderPassBeginInfo(renderPass.get(), framebuffers[currentBuffer.value].get(), vk::Rect2D(vk::Offset2D(0, 0), surfaceData.extent), 2, clearValues); - commandBuffer->beginRenderPass(renderPassBeginInfo, vk::SubpassContents::eInline); - commandBuffer->bindPipeline(vk::PipelineBindPoint::eGraphics, graphicsPipeline.get()); - commandBuffer->bindDescriptorSets(vk::PipelineBindPoint::eGraphics, pipelineLayout.get(), 0, { descriptorSets[0].get(), descriptorSets[1].get() }, nullptr); + clearValues[0].color = vk::ClearColorValue( std::array( { 0.2f, 0.2f, 0.2f, 0.2f } ) ); + clearValues[1].depthStencil = vk::ClearDepthStencilValue( 1.0f, 0 ); + vk::RenderPassBeginInfo renderPassBeginInfo( renderPass.get(), + framebuffers[currentBuffer.value].get(), + vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ), + 2, + clearValues ); + commandBuffer->beginRenderPass( renderPassBeginInfo, vk::SubpassContents::eInline ); + commandBuffer->bindPipeline( vk::PipelineBindPoint::eGraphics, graphicsPipeline.get() ); + commandBuffer->bindDescriptorSets( vk::PipelineBindPoint::eGraphics, + pipelineLayout.get(), + 0, + { descriptorSets[0].get(), descriptorSets[1].get() }, + nullptr ); - commandBuffer->bindVertexBuffers(0, *vertexBufferData.buffer, {0}); - commandBuffer->setViewport(0, vk::Viewport(0.0f, 0.0f, static_cast(surfaceData.extent.width), static_cast(surfaceData.extent.height), 0.0f, 1.0f)); - commandBuffer->setScissor(0, vk::Rect2D(vk::Offset2D(0, 0), surfaceData.extent)); + commandBuffer->bindVertexBuffers( 0, *vertexBufferData.buffer, { 0 } ); + commandBuffer->setViewport( 0, + vk::Viewport( 0.0f, + 0.0f, + static_cast( surfaceData.extent.width ), + static_cast( surfaceData.extent.height ), + 0.0f, + 1.0f ) ); + commandBuffer->setScissor( 0, vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ) ); - commandBuffer->draw(12 * 3, 1, 0, 0); + commandBuffer->draw( 12 * 3, 1, 0, 0 ); commandBuffer->endRenderPass(); commandBuffer->end(); - vk::UniqueFence drawFence = device->createFenceUnique(vk::FenceCreateInfo()); + vk::UniqueFence drawFence = device->createFenceUnique( vk::FenceCreateInfo() ); - vk::PipelineStageFlags waitDestinationStageMask(vk::PipelineStageFlagBits::eColorAttachmentOutput); - vk::SubmitInfo submitInfo(1, &imageAcquiredSemaphore.get(), &waitDestinationStageMask, 1, &commandBuffer.get()); - graphicsQueue.submit(submitInfo, drawFence.get()); + vk::PipelineStageFlags waitDestinationStageMask( vk::PipelineStageFlagBits::eColorAttachmentOutput ); + vk::SubmitInfo submitInfo( 1, &imageAcquiredSemaphore.get(), &waitDestinationStageMask, 1, &commandBuffer.get() ); + graphicsQueue.submit( submitInfo, drawFence.get() ); - while (vk::Result::eTimeout == device->waitForFences(drawFence.get(), VK_TRUE, vk::su::FenceTimeout)) + while ( vk::Result::eTimeout == device->waitForFences( drawFence.get(), VK_TRUE, vk::su::FenceTimeout ) ) ; - presentQueue.presentKHR(vk::PresentInfoKHR(0, nullptr, 1, &swapChainData.swapChain.get(), ¤tBuffer.value)); - std::this_thread::sleep_for(std::chrono::milliseconds(1000)); + presentQueue.presentKHR( + vk::PresentInfoKHR( 0, nullptr, 1, &swapChainData.swapChain.get(), ¤tBuffer.value ) ); + std::this_thread::sleep_for( std::chrono::milliseconds( 1000 ) ); device->waitIdle(); } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (std::runtime_error& err) + catch ( std::runtime_error & err ) { std::cout << "std::runtime_error: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/OcclusionQuery/OcclusionQuery.cpp b/samples/OcclusionQuery/OcclusionQuery.cpp index b94e161..18c669b 100644 --- a/samples/OcclusionQuery/OcclusionQuery.cpp +++ b/samples/OcclusionQuery/OcclusionQuery.cpp @@ -19,161 +19,238 @@ #include "../utils/math.hpp" #include "../utils/shaders.hpp" #include "../utils/utils.hpp" -#include "vulkan/vulkan.hpp" #include "SPIRV/GlslangToSpv.h" +#include "vulkan/vulkan.hpp" + #include #include -static char const* AppName = "OcclusionQuery"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "OcclusionQuery"; +static char const * EngineName = "Vulkan.hpp"; -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { - vk::UniqueInstance instance = vk::su::createInstance(AppName, EngineName, {}, vk::su::getInstanceExtensions()); -#if !defined(NDEBUG) - vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger(instance); + vk::UniqueInstance instance = vk::su::createInstance( AppName, EngineName, {}, vk::su::getInstanceExtensions() ); +#if !defined( NDEBUG ) + vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger( instance ); #endif vk::PhysicalDevice physicalDevice = instance->enumeratePhysicalDevices().front(); - vk::su::SurfaceData surfaceData(instance, AppName, vk::Extent2D(500, 500)); + vk::su::SurfaceData surfaceData( instance, AppName, vk::Extent2D( 500, 500 ) ); - std::pair graphicsAndPresentQueueFamilyIndex = vk::su::findGraphicsAndPresentQueueFamilyIndex(physicalDevice, *surfaceData.surface); - vk::UniqueDevice device = vk::su::createDevice(physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions()); + std::pair graphicsAndPresentQueueFamilyIndex = + vk::su::findGraphicsAndPresentQueueFamilyIndex( physicalDevice, *surfaceData.surface ); + vk::UniqueDevice device = + vk::su::createDevice( physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions() ); - vk::UniqueCommandPool commandPool = vk::su::createCommandPool(device, graphicsAndPresentQueueFamilyIndex.first); - vk::UniqueCommandBuffer commandBuffer = std::move(device->allocateCommandBuffersUnique(vk::CommandBufferAllocateInfo(commandPool.get(), vk::CommandBufferLevel::ePrimary, 1)).front()); + vk::UniqueCommandPool commandPool = vk::su::createCommandPool( device, graphicsAndPresentQueueFamilyIndex.first ); + vk::UniqueCommandBuffer commandBuffer = std::move( device + ->allocateCommandBuffersUnique( vk::CommandBufferAllocateInfo( + commandPool.get(), vk::CommandBufferLevel::ePrimary, 1 ) ) + .front() ); - vk::Queue graphicsQueue = device->getQueue(graphicsAndPresentQueueFamilyIndex.first, 0); - vk::Queue presentQueue = device->getQueue(graphicsAndPresentQueueFamilyIndex.second, 0); + vk::Queue graphicsQueue = device->getQueue( graphicsAndPresentQueueFamilyIndex.first, 0 ); + vk::Queue presentQueue = device->getQueue( graphicsAndPresentQueueFamilyIndex.second, 0 ); - vk::su::SwapChainData swapChainData(physicalDevice, device, *surfaceData.surface, surfaceData.extent, vk::ImageUsageFlagBits::eColorAttachment | vk::ImageUsageFlagBits::eTransferSrc, - vk::UniqueSwapchainKHR(), graphicsAndPresentQueueFamilyIndex.first, graphicsAndPresentQueueFamilyIndex.second); + vk::su::SwapChainData swapChainData( physicalDevice, + device, + *surfaceData.surface, + surfaceData.extent, + vk::ImageUsageFlagBits::eColorAttachment | + vk::ImageUsageFlagBits::eTransferSrc, + vk::UniqueSwapchainKHR(), + graphicsAndPresentQueueFamilyIndex.first, + graphicsAndPresentQueueFamilyIndex.second ); - vk::su::DepthBufferData depthBufferData(physicalDevice, device, vk::Format::eD16Unorm, surfaceData.extent); + vk::su::DepthBufferData depthBufferData( physicalDevice, device, vk::Format::eD16Unorm, surfaceData.extent ); - vk::su::BufferData uniformBufferData(physicalDevice, device, sizeof(glm::mat4x4), vk::BufferUsageFlagBits::eUniformBuffer); - vk::su::copyToDevice(device, uniformBufferData.deviceMemory, vk::su::createModelViewProjectionClipMatrix(surfaceData.extent)); + vk::su::BufferData uniformBufferData( + physicalDevice, device, sizeof( glm::mat4x4 ), vk::BufferUsageFlagBits::eUniformBuffer ); + vk::su::copyToDevice( + device, uniformBufferData.deviceMemory, vk::su::createModelViewProjectionClipMatrix( surfaceData.extent ) ); - vk::UniqueDescriptorSetLayout descriptorSetLayout = vk::su::createDescriptorSetLayout(device, { {vk::DescriptorType::eUniformBuffer, 1, vk::ShaderStageFlagBits::eVertex} }); - vk::UniquePipelineLayout pipelineLayout = device->createPipelineLayoutUnique(vk::PipelineLayoutCreateInfo(vk::PipelineLayoutCreateFlags(), 1, &descriptorSetLayout.get())); + vk::UniqueDescriptorSetLayout descriptorSetLayout = vk::su::createDescriptorSetLayout( + device, { { vk::DescriptorType::eUniformBuffer, 1, vk::ShaderStageFlagBits::eVertex } } ); + vk::UniquePipelineLayout pipelineLayout = device->createPipelineLayoutUnique( + vk::PipelineLayoutCreateInfo( vk::PipelineLayoutCreateFlags(), 1, &descriptorSetLayout.get() ) ); - vk::UniqueRenderPass renderPass = vk::su::createRenderPass(device, vk::su::pickSurfaceFormat(physicalDevice.getSurfaceFormatsKHR(surfaceData.surface.get())).format, depthBufferData.format); + vk::UniqueRenderPass renderPass = vk::su::createRenderPass( + device, + vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( surfaceData.surface.get() ) ).format, + depthBufferData.format ); glslang::InitializeProcess(); - vk::UniqueShaderModule vertexShaderModule = vk::su::createShaderModule(device, vk::ShaderStageFlagBits::eVertex, vertexShaderText_PC_C); - vk::UniqueShaderModule fragmentShaderModule = vk::su::createShaderModule(device, vk::ShaderStageFlagBits::eFragment, fragmentShaderText_C_C); + vk::UniqueShaderModule vertexShaderModule = + vk::su::createShaderModule( device, vk::ShaderStageFlagBits::eVertex, vertexShaderText_PC_C ); + vk::UniqueShaderModule fragmentShaderModule = + vk::su::createShaderModule( device, vk::ShaderStageFlagBits::eFragment, fragmentShaderText_C_C ); glslang::FinalizeProcess(); - std::vector framebuffers = vk::su::createFramebuffers(device, renderPass, swapChainData.imageViews, depthBufferData.imageView, surfaceData.extent); + std::vector framebuffers = vk::su::createFramebuffers( + device, renderPass, swapChainData.imageViews, depthBufferData.imageView, surfaceData.extent ); - vk::su::BufferData vertexBufferData(physicalDevice, device, sizeof(coloredCubeData), vk::BufferUsageFlagBits::eVertexBuffer); - vk::su::copyToDevice(device, vertexBufferData.deviceMemory, coloredCubeData, sizeof(coloredCubeData) / sizeof(coloredCubeData[0])); + vk::su::BufferData vertexBufferData( + physicalDevice, device, sizeof( coloredCubeData ), vk::BufferUsageFlagBits::eVertexBuffer ); + vk::su::copyToDevice( device, + vertexBufferData.deviceMemory, + coloredCubeData, + sizeof( coloredCubeData ) / sizeof( coloredCubeData[0] ) ); - vk::UniqueDescriptorPool descriptorPool = vk::su::createDescriptorPool(device, { {vk::DescriptorType::eUniformBuffer, 1} }); - vk::UniqueDescriptorSet descriptorSet = std::move(device->allocateDescriptorSetsUnique(vk::DescriptorSetAllocateInfo(*descriptorPool, 1, &*descriptorSetLayout)).front()); + vk::UniqueDescriptorPool descriptorPool = + vk::su::createDescriptorPool( device, { { vk::DescriptorType::eUniformBuffer, 1 } } ); + vk::UniqueDescriptorSet descriptorSet = std::move( + device->allocateDescriptorSetsUnique( vk::DescriptorSetAllocateInfo( *descriptorPool, 1, &*descriptorSetLayout ) ) + .front() ); - vk::su::updateDescriptorSets(device, descriptorSet, {{vk::DescriptorType::eUniformBuffer, uniformBufferData.buffer, vk::UniqueBufferView()}}, {}); + vk::su::updateDescriptorSets( + device, + descriptorSet, + { { vk::DescriptorType::eUniformBuffer, uniformBufferData.buffer, vk::UniqueBufferView() } }, + {} ); - vk::UniquePipelineCache pipelineCache = device->createPipelineCacheUnique(vk::PipelineCacheCreateInfo()); - vk::UniquePipeline graphicsPipeline = vk::su::createGraphicsPipeline(device, pipelineCache, std::make_pair(*vertexShaderModule, nullptr), std::make_pair(*fragmentShaderModule, nullptr), - sizeof(coloredCubeData[0]), { { vk::Format::eR32G32B32A32Sfloat, 0 }, { vk::Format::eR32G32B32A32Sfloat, 16 } }, - vk::FrontFace::eClockwise, true, pipelineLayout, renderPass); + vk::UniquePipelineCache pipelineCache = device->createPipelineCacheUnique( vk::PipelineCacheCreateInfo() ); + vk::UniquePipeline graphicsPipeline = vk::su::createGraphicsPipeline( + device, + pipelineCache, + std::make_pair( *vertexShaderModule, nullptr ), + std::make_pair( *fragmentShaderModule, nullptr ), + sizeof( coloredCubeData[0] ), + { { vk::Format::eR32G32B32A32Sfloat, 0 }, { vk::Format::eR32G32B32A32Sfloat, 16 } }, + vk::FrontFace::eClockwise, + true, + pipelineLayout, + renderPass ); /* VULKAN_KEY_START */ - vk::UniqueSemaphore imageAcquiredSemaphore = device->createSemaphoreUnique(vk::SemaphoreCreateInfo(vk::SemaphoreCreateFlags())); + vk::UniqueSemaphore imageAcquiredSemaphore = + device->createSemaphoreUnique( vk::SemaphoreCreateInfo( vk::SemaphoreCreateFlags() ) ); // Get the index of the next available swapchain image: - vk::ResultValue currentBuffer = device->acquireNextImageKHR(swapChainData.swapChain.get(), UINT64_MAX, imageAcquiredSemaphore.get(), nullptr); - assert(currentBuffer.result == vk::Result::eSuccess); - assert(currentBuffer.value < framebuffers.size()); + vk::ResultValue currentBuffer = + device->acquireNextImageKHR( swapChainData.swapChain.get(), UINT64_MAX, imageAcquiredSemaphore.get(), nullptr ); + assert( currentBuffer.result == vk::Result::eSuccess ); + assert( currentBuffer.value < framebuffers.size() ); /* Allocate a uniform buffer that will take query results. */ - vk::UniqueBuffer queryResultBuffer = device->createBufferUnique(vk::BufferCreateInfo(vk::BufferCreateFlags(), 4 * sizeof(uint64_t), vk::BufferUsageFlagBits::eUniformBuffer | vk::BufferUsageFlagBits::eTransferDst)); + vk::UniqueBuffer queryResultBuffer = device->createBufferUnique( + vk::BufferCreateInfo( vk::BufferCreateFlags(), + 4 * sizeof( uint64_t ), + vk::BufferUsageFlagBits::eUniformBuffer | vk::BufferUsageFlagBits::eTransferDst ) ); - vk::MemoryRequirements memoryRequirements = device->getBufferMemoryRequirements(queryResultBuffer.get()); - uint32_t memoryTypeIndex = vk::su::findMemoryType(physicalDevice.getMemoryProperties(), memoryRequirements.memoryTypeBits, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent); - vk::UniqueDeviceMemory queryResultMemory = device->allocateMemoryUnique(vk::MemoryAllocateInfo(memoryRequirements.size, memoryTypeIndex)); + vk::MemoryRequirements memoryRequirements = device->getBufferMemoryRequirements( queryResultBuffer.get() ); + uint32_t memoryTypeIndex = + vk::su::findMemoryType( physicalDevice.getMemoryProperties(), + memoryRequirements.memoryTypeBits, + vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent ); + vk::UniqueDeviceMemory queryResultMemory = + device->allocateMemoryUnique( vk::MemoryAllocateInfo( memoryRequirements.size, memoryTypeIndex ) ); - device->bindBufferMemory(queryResultBuffer.get(), queryResultMemory.get(), 0); + device->bindBufferMemory( queryResultBuffer.get(), queryResultMemory.get(), 0 ); - vk::UniqueQueryPool queryPool = device->createQueryPoolUnique(vk::QueryPoolCreateInfo(vk::QueryPoolCreateFlags(), vk::QueryType::eOcclusion, 2, vk::QueryPipelineStatisticFlags())); + vk::UniqueQueryPool queryPool = device->createQueryPoolUnique( vk::QueryPoolCreateInfo( + vk::QueryPoolCreateFlags(), vk::QueryType::eOcclusion, 2, vk::QueryPipelineStatisticFlags() ) ); - commandBuffer->begin(vk::CommandBufferBeginInfo(vk::CommandBufferUsageFlags())); - commandBuffer->resetQueryPool(queryPool.get(), 0, 2); + commandBuffer->begin( vk::CommandBufferBeginInfo( vk::CommandBufferUsageFlags() ) ); + commandBuffer->resetQueryPool( queryPool.get(), 0, 2 ); vk::ClearValue clearValues[2]; - clearValues[0].color = vk::ClearColorValue(std::array({ 0.2f, 0.2f, 0.2f, 0.2f })); - clearValues[1].depthStencil = vk::ClearDepthStencilValue(1.0f, 0); - commandBuffer->beginRenderPass(vk::RenderPassBeginInfo(renderPass.get(), framebuffers[currentBuffer.value].get(), vk::Rect2D(vk::Offset2D(), surfaceData.extent), 2, clearValues), vk::SubpassContents::eInline); + clearValues[0].color = vk::ClearColorValue( std::array( { 0.2f, 0.2f, 0.2f, 0.2f } ) ); + clearValues[1].depthStencil = vk::ClearDepthStencilValue( 1.0f, 0 ); + commandBuffer->beginRenderPass( vk::RenderPassBeginInfo( renderPass.get(), + framebuffers[currentBuffer.value].get(), + vk::Rect2D( vk::Offset2D(), surfaceData.extent ), + 2, + clearValues ), + vk::SubpassContents::eInline ); - commandBuffer->bindPipeline(vk::PipelineBindPoint::eGraphics, graphicsPipeline.get()); - commandBuffer->bindDescriptorSets(vk::PipelineBindPoint::eGraphics, pipelineLayout.get(), 0, descriptorSet.get(), {}); + commandBuffer->bindPipeline( vk::PipelineBindPoint::eGraphics, graphicsPipeline.get() ); + commandBuffer->bindDescriptorSets( + vk::PipelineBindPoint::eGraphics, pipelineLayout.get(), 0, descriptorSet.get(), {} ); - commandBuffer->bindVertexBuffers(0, *vertexBufferData.buffer, {0}); - commandBuffer->setViewport(0, vk::Viewport(0.0f, 0.0f, static_cast(surfaceData.extent.width), static_cast(surfaceData.extent.height), 0.0f, 1.0f)); - commandBuffer->setScissor(0, vk::Rect2D(vk::Offset2D(0, 0), surfaceData.extent)); + commandBuffer->bindVertexBuffers( 0, *vertexBufferData.buffer, { 0 } ); + commandBuffer->setViewport( 0, + vk::Viewport( 0.0f, + 0.0f, + static_cast( surfaceData.extent.width ), + static_cast( surfaceData.extent.height ), + 0.0f, + 1.0f ) ); + commandBuffer->setScissor( 0, vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ) ); - commandBuffer->beginQuery(queryPool.get(), 0, vk::QueryControlFlags()); - commandBuffer->endQuery(queryPool.get(), 0); + commandBuffer->beginQuery( queryPool.get(), 0, vk::QueryControlFlags() ); + commandBuffer->endQuery( queryPool.get(), 0 ); - commandBuffer->beginQuery(queryPool.get(), 1, vk::QueryControlFlags()); - commandBuffer->draw(12 * 3, 1, 0, 0); + commandBuffer->beginQuery( queryPool.get(), 1, vk::QueryControlFlags() ); + commandBuffer->draw( 12 * 3, 1, 0, 0 ); commandBuffer->endRenderPass(); - commandBuffer->endQuery(queryPool.get(), 1); + commandBuffer->endQuery( queryPool.get(), 1 ); - commandBuffer->copyQueryPoolResults(queryPool.get(), 0, 2, queryResultBuffer.get(), 0, sizeof(uint64_t), vk::QueryResultFlagBits::e64 | vk::QueryResultFlagBits::eWait); + commandBuffer->copyQueryPoolResults( queryPool.get(), + 0, + 2, + queryResultBuffer.get(), + 0, + sizeof( uint64_t ), + vk::QueryResultFlagBits::e64 | vk::QueryResultFlagBits::eWait ); commandBuffer->end(); - vk::UniqueFence drawFence = device->createFenceUnique(vk::FenceCreateInfo()); + vk::UniqueFence drawFence = device->createFenceUnique( vk::FenceCreateInfo() ); - vk::PipelineStageFlags waitDestinationStageMask(vk::PipelineStageFlagBits::eColorAttachmentOutput); - vk::SubmitInfo submitInfo(1, &imageAcquiredSemaphore.get(), &waitDestinationStageMask, 1, &commandBuffer.get()); - graphicsQueue.submit(submitInfo, drawFence.get()); + vk::PipelineStageFlags waitDestinationStageMask( vk::PipelineStageFlagBits::eColorAttachmentOutput ); + vk::SubmitInfo submitInfo( 1, &imageAcquiredSemaphore.get(), &waitDestinationStageMask, 1, &commandBuffer.get() ); + graphicsQueue.submit( submitInfo, drawFence.get() ); graphicsQueue.waitIdle(); uint64_t samplesPassed[2]; - device->getQueryPoolResults(queryPool.get(), 0, 2, vk::ArrayProxy(4, samplesPassed), sizeof(uint64_t), vk::QueryResultFlagBits::e64 | vk::QueryResultFlagBits::eWait); + device->getQueryPoolResults( queryPool.get(), + 0, + 2, + vk::ArrayProxy( 4, samplesPassed ), + sizeof( uint64_t ), + vk::QueryResultFlagBits::e64 | vk::QueryResultFlagBits::eWait ); std::cout << "vkGetQueryPoolResults data\n"; std::cout << "samples_passed[0] = " << samplesPassed[0] << "\n"; std::cout << "samples_passed[1] = " << samplesPassed[1] << "\n"; /* Read back query result from buffer */ - uint64_t *samplesPassedPtr = static_cast(device->mapMemory(queryResultMemory.get(), 0, memoryRequirements.size, vk::MemoryMapFlags())); + uint64_t * samplesPassedPtr = static_cast( + device->mapMemory( queryResultMemory.get(), 0, memoryRequirements.size, vk::MemoryMapFlags() ) ); std::cout << "vkCmdCopyQueryPoolResults data\n"; std::cout << "samples_passed[0] = " << samplesPassedPtr[0] << "\n"; std::cout << "samples_passed[1] = " << samplesPassedPtr[1] << "\n"; - device->unmapMemory(queryResultMemory.get()); + device->unmapMemory( queryResultMemory.get() ); - while (vk::Result::eTimeout == device->waitForFences(drawFence.get(), VK_TRUE, vk::su::FenceTimeout)) + while ( vk::Result::eTimeout == device->waitForFences( drawFence.get(), VK_TRUE, vk::su::FenceTimeout ) ) ; - presentQueue.presentKHR(vk::PresentInfoKHR(0, nullptr, 1, &swapChainData.swapChain.get(), ¤tBuffer.value)); - std::this_thread::sleep_for(std::chrono::milliseconds(1000)); + presentQueue.presentKHR( + vk::PresentInfoKHR( 0, nullptr, 1, &swapChainData.swapChain.get(), ¤tBuffer.value ) ); + std::this_thread::sleep_for( std::chrono::milliseconds( 1000 ) ); /* VULKAN_KEY_END */ } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (std::runtime_error& err) + catch ( std::runtime_error & err ) { std::cout << "std::runtime_error: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/PhysicalDeviceExtensions/PhysicalDeviceExtensions.cpp b/samples/PhysicalDeviceExtensions/PhysicalDeviceExtensions.cpp index d9f1702..72496b9 100644 --- a/samples/PhysicalDeviceExtensions/PhysicalDeviceExtensions.cpp +++ b/samples/PhysicalDeviceExtensions/PhysicalDeviceExtensions.cpp @@ -17,18 +17,19 @@ #include "../utils/utils.hpp" #include "vulkan/vulkan.hpp" + #include -static char const* AppName = "DeviceExtensionProperties"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "DeviceExtensionProperties"; +static char const * EngineName = "Vulkan.hpp"; -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { - vk::UniqueInstance instance = vk::su::createInstance(AppName, EngineName); -#if !defined(NDEBUG) - vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger(instance); + vk::UniqueInstance instance = vk::su::createInstance( AppName, EngineName ); +#if !defined( NDEBUG ) + vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger( instance ); #endif // enumerate the physicalDevices @@ -36,14 +37,19 @@ int main(int /*argc*/, char ** /*argv*/) /* VULKAN_KEY_START */ - for (size_t i=0 ; i extensionProperties = physicalDevices[i].enumerateDeviceExtensionProperties(); + std::vector extensionProperties = + physicalDevices[i].enumerateDeviceExtensionProperties(); // sort the extensions alphabetically - std::sort(extensionProperties.begin(), extensionProperties.end(), [](vk::ExtensionProperties const& a, vk::ExtensionProperties const& b) { return strcmp(a.extensionName, b.extensionName) < 0; }); - for (auto const& ep : extensionProperties) + std::sort( extensionProperties.begin(), + extensionProperties.end(), + []( vk::ExtensionProperties const & a, vk::ExtensionProperties const & b ) { + return strcmp( a.extensionName, b.extensionName ) < 0; + } ); + for ( auto const & ep : extensionProperties ) { std::cout << "\t" << ep.extensionName << ":" << std::endl; std::cout << "\t\tVersion: " << ep.specVersion << std::endl; @@ -53,20 +59,20 @@ int main(int /*argc*/, char ** /*argv*/) /* VULKAN_KEY_END */ } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (std::runtime_error& err) + catch ( std::runtime_error & err ) { std::cout << "std::runtime_error: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/PhysicalDeviceFeatures/PhysicalDeviceFeatures.cpp b/samples/PhysicalDeviceFeatures/PhysicalDeviceFeatures.cpp index 5d0d984..c94e664 100644 --- a/samples/PhysicalDeviceFeatures/PhysicalDeviceFeatures.cpp +++ b/samples/PhysicalDeviceFeatures/PhysicalDeviceFeatures.cpp @@ -17,18 +17,19 @@ #include "../utils/utils.hpp" #include "vulkan/vulkan.hpp" + #include -static char const* AppName = "PhysicalDeviceFeatures"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "PhysicalDeviceFeatures"; +static char const * EngineName = "Vulkan.hpp"; -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { - vk::UniqueInstance instance = vk::su::createInstance(AppName, EngineName); -#if !defined(NDEBUG) - vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger(instance); + vk::UniqueInstance instance = vk::su::createInstance( AppName, EngineName ); +#if !defined( NDEBUG ) + vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger( instance ); #endif // enumerate the physicalDevices @@ -37,548 +38,782 @@ int main(int /*argc*/, char ** /*argv*/) /* VULKAN_KEY_START */ std::cout << std::boolalpha; - for (size_t i=0 ; i extensionProperties = physicalDevices[i].enumerateDeviceExtensionProperties(); + std::vector extensionProperties = + physicalDevices[i].enumerateDeviceExtensionProperties(); std::cout << "PhysicalDevice " << i << "\n"; - auto features2 = physicalDevices[i].getFeatures2(); - vk::PhysicalDeviceFeatures const& features = features2.get().features; + auto features2 = physicalDevices[i] + .getFeatures2(); + vk::PhysicalDeviceFeatures const & features = features2.get().features; std::cout << "\tFeatures:\n"; - std::cout << "\t\talphaToOne : " << static_cast(features.alphaToOne) << "\n"; - std::cout << "\t\tdepthBiasClamp : " << static_cast(features.depthBiasClamp) << "\n"; - std::cout << "\t\tdepthBounds : " << static_cast(features.depthBounds) << "\n"; - std::cout << "\t\tdepthClamp : " << static_cast(features.depthClamp) << "\n"; - std::cout << "\t\tdrawIndirectFirstInstance : " << static_cast(features.drawIndirectFirstInstance) << "\n"; - std::cout << "\t\tdualSrcBlend : " << static_cast(features.dualSrcBlend) << "\n"; - std::cout << "\t\tfillModeNonSolid : " << static_cast(features.fillModeNonSolid) << "\n"; - std::cout << "\t\tfragmentStoresAndAtomics : " << static_cast(features.fragmentStoresAndAtomics) << "\n"; - std::cout << "\t\tfullDrawIndexUint32 : " << static_cast(features.fullDrawIndexUint32) << "\n"; - std::cout << "\t\tgeometryShader : " << static_cast(features.geometryShader) << "\n"; - std::cout << "\t\timageCubeArray : " << static_cast(features.imageCubeArray) << "\n"; - std::cout << "\t\tindependentBlend : " << static_cast(features.independentBlend) << "\n"; - std::cout << "\t\tinheritedQueries : " << static_cast(features.inheritedQueries) << "\n"; - std::cout << "\t\tlargePoints : " << static_cast(features.largePoints) << "\n"; - std::cout << "\t\tlogicOp : " << static_cast(features.logicOp) << "\n"; - std::cout << "\t\tmultiDrawIndirect : " << static_cast(features.multiDrawIndirect) << "\n"; - std::cout << "\t\tmultiViewport : " << static_cast(features.multiViewport) << "\n"; - std::cout << "\t\tocclusionQueryPrecise : " << static_cast(features.occlusionQueryPrecise) << "\n"; - std::cout << "\t\tpipelineStatisticsQuery : " << static_cast(features.pipelineStatisticsQuery) << "\n"; - std::cout << "\t\trobustBufferAccess : " << static_cast(features.robustBufferAccess) << "\n"; - std::cout << "\t\tsamplerAnisotropy : " << static_cast(features.samplerAnisotropy) << "\n"; - std::cout << "\t\tsampleRateShading : " << static_cast(features.sampleRateShading) << "\n"; - std::cout << "\t\tshaderClipDistance : " << static_cast(features.shaderClipDistance) << "\n"; - std::cout << "\t\tshaderCullDistance : " << static_cast(features.shaderCullDistance) << "\n"; - std::cout << "\t\tshaderFloat64 : " << static_cast(features.shaderFloat64) << "\n"; - std::cout << "\t\tshaderImageGatherExtended : " << static_cast(features.shaderImageGatherExtended) << "\n"; - std::cout << "\t\tshaderInt16 : " << static_cast(features.shaderInt16) << "\n"; - std::cout << "\t\tshaderInt64 : " << static_cast(features.shaderInt64) << "\n"; - std::cout << "\t\tshaderResourceMinLod : " << static_cast(features.shaderResourceMinLod) << "\n"; - std::cout << "\t\tshaderResourceResidency : " << static_cast(features.shaderResourceResidency) << "\n"; - std::cout << "\t\tshaderSampledImageArrayDynamicIndexing : " << static_cast(features.shaderSampledImageArrayDynamicIndexing) << "\n"; - std::cout << "\t\tshaderStorageBufferArrayDynamicIndexing : " << static_cast(features.shaderStorageBufferArrayDynamicIndexing) << "\n"; - std::cout << "\t\tshaderStorageImageArrayDynamicIndexing : " << static_cast(features.shaderStorageImageArrayDynamicIndexing) << "\n"; - std::cout << "\t\tshaderStorageImageExtendedFormats : " << static_cast(features.shaderStorageImageExtendedFormats) << "\n"; - std::cout << "\t\tshaderStorageImageMultisample : " << static_cast(features.shaderStorageImageMultisample) << "\n"; - std::cout << "\t\tshaderStorageImageReadWithoutFormat : " << static_cast(features.shaderStorageImageReadWithoutFormat) << "\n"; - std::cout << "\t\tshaderStorageImageWriteWithoutFormat : " << static_cast(features.shaderStorageImageWriteWithoutFormat) << "\n"; - std::cout << "\t\tshaderTessellationAndGeometryPointSize : " << static_cast(features.shaderTessellationAndGeometryPointSize) << "\n"; - std::cout << "\t\tshaderUniformBufferArrayDynamicIndexing : " << static_cast(features.shaderUniformBufferArrayDynamicIndexing) << "\n"; - std::cout << "\t\tsparseBinding : " << static_cast(features.sparseBinding) << "\n"; - std::cout << "\t\tsparseResidency16Samples : " << static_cast(features.sparseResidency16Samples) << "\n"; - std::cout << "\t\tsparseResidency2Samples : " << static_cast(features.sparseResidency2Samples) << "\n"; - std::cout << "\t\tsparseResidency4Samples : " << static_cast(features.sparseResidency4Samples) << "\n"; - std::cout << "\t\tsparseResidency8Samples : " << static_cast(features.sparseResidency8Samples) << "\n"; - std::cout << "\t\tsparseResidencyAliased : " << static_cast(features.sparseResidencyAliased) << "\n"; - std::cout << "\t\tsparseResidencyBuffer : " << static_cast(features.sparseResidencyBuffer) << "\n"; - std::cout << "\t\tsparseResidencyImage2D : " << static_cast(features.sparseResidencyImage2D) << "\n"; - std::cout << "\t\tsparseResidencyImage3D : " << static_cast(features.sparseResidencyImage3D) << "\n"; - std::cout << "\t\ttessellationShader : " << static_cast(features.tessellationShader) << "\n"; - std::cout << "\t\ttextureCompressionASTC_LDR : " << static_cast(features.textureCompressionASTC_LDR) << "\n"; - std::cout << "\t\ttextureCompressionBC : " << static_cast(features.textureCompressionBC) << "\n"; - std::cout << "\t\ttextureCompressionETC2 : " << static_cast(features.textureCompressionETC2) << "\n"; - std::cout << "\t\tvariableMultisampleRate : " << static_cast(features.variableMultisampleRate) << "\n"; - std::cout << "\t\tvertexPipelineStoresAndAtomics : " << static_cast(features.vertexPipelineStoresAndAtomics) << "\n"; - std::cout << "\t\twideLines : " << static_cast(features.wideLines) << "\n"; + std::cout << "\t\talphaToOne : " << static_cast( features.alphaToOne ) << "\n"; + std::cout << "\t\tdepthBiasClamp : " << static_cast( features.depthBiasClamp ) + << "\n"; + std::cout << "\t\tdepthBounds : " << static_cast( features.depthBounds ) + << "\n"; + std::cout << "\t\tdepthClamp : " << static_cast( features.depthClamp ) << "\n"; + std::cout << "\t\tdrawIndirectFirstInstance : " + << static_cast( features.drawIndirectFirstInstance ) << "\n"; + std::cout << "\t\tdualSrcBlend : " << static_cast( features.dualSrcBlend ) + << "\n"; + std::cout << "\t\tfillModeNonSolid : " << static_cast( features.fillModeNonSolid ) + << "\n"; + std::cout << "\t\tfragmentStoresAndAtomics : " + << static_cast( features.fragmentStoresAndAtomics ) << "\n"; + std::cout << "\t\tfullDrawIndexUint32 : " << static_cast( features.fullDrawIndexUint32 ) + << "\n"; + std::cout << "\t\tgeometryShader : " << static_cast( features.geometryShader ) + << "\n"; + std::cout << "\t\timageCubeArray : " << static_cast( features.imageCubeArray ) + << "\n"; + std::cout << "\t\tindependentBlend : " << static_cast( features.independentBlend ) + << "\n"; + std::cout << "\t\tinheritedQueries : " << static_cast( features.inheritedQueries ) + << "\n"; + std::cout << "\t\tlargePoints : " << static_cast( features.largePoints ) + << "\n"; + std::cout << "\t\tlogicOp : " << static_cast( features.logicOp ) << "\n"; + std::cout << "\t\tmultiDrawIndirect : " << static_cast( features.multiDrawIndirect ) + << "\n"; + std::cout << "\t\tmultiViewport : " << static_cast( features.multiViewport ) + << "\n"; + std::cout << "\t\tocclusionQueryPrecise : " + << static_cast( features.occlusionQueryPrecise ) << "\n"; + std::cout << "\t\tpipelineStatisticsQuery : " + << static_cast( features.pipelineStatisticsQuery ) << "\n"; + std::cout << "\t\trobustBufferAccess : " << static_cast( features.robustBufferAccess ) + << "\n"; + std::cout << "\t\tsamplerAnisotropy : " << static_cast( features.samplerAnisotropy ) + << "\n"; + std::cout << "\t\tsampleRateShading : " << static_cast( features.sampleRateShading ) + << "\n"; + std::cout << "\t\tshaderClipDistance : " << static_cast( features.shaderClipDistance ) + << "\n"; + std::cout << "\t\tshaderCullDistance : " << static_cast( features.shaderCullDistance ) + << "\n"; + std::cout << "\t\tshaderFloat64 : " << static_cast( features.shaderFloat64 ) + << "\n"; + std::cout << "\t\tshaderImageGatherExtended : " + << static_cast( features.shaderImageGatherExtended ) << "\n"; + std::cout << "\t\tshaderInt16 : " << static_cast( features.shaderInt16 ) + << "\n"; + std::cout << "\t\tshaderInt64 : " << static_cast( features.shaderInt64 ) + << "\n"; + std::cout << "\t\tshaderResourceMinLod : " + << static_cast( features.shaderResourceMinLod ) << "\n"; + std::cout << "\t\tshaderResourceResidency : " + << static_cast( features.shaderResourceResidency ) << "\n"; + std::cout << "\t\tshaderSampledImageArrayDynamicIndexing : " + << static_cast( features.shaderSampledImageArrayDynamicIndexing ) << "\n"; + std::cout << "\t\tshaderStorageBufferArrayDynamicIndexing : " + << static_cast( features.shaderStorageBufferArrayDynamicIndexing ) << "\n"; + std::cout << "\t\tshaderStorageImageArrayDynamicIndexing : " + << static_cast( features.shaderStorageImageArrayDynamicIndexing ) << "\n"; + std::cout << "\t\tshaderStorageImageExtendedFormats : " + << static_cast( features.shaderStorageImageExtendedFormats ) << "\n"; + std::cout << "\t\tshaderStorageImageMultisample : " + << static_cast( features.shaderStorageImageMultisample ) << "\n"; + std::cout << "\t\tshaderStorageImageReadWithoutFormat : " + << static_cast( features.shaderStorageImageReadWithoutFormat ) << "\n"; + std::cout << "\t\tshaderStorageImageWriteWithoutFormat : " + << static_cast( features.shaderStorageImageWriteWithoutFormat ) << "\n"; + std::cout << "\t\tshaderTessellationAndGeometryPointSize : " + << static_cast( features.shaderTessellationAndGeometryPointSize ) << "\n"; + std::cout << "\t\tshaderUniformBufferArrayDynamicIndexing : " + << static_cast( features.shaderUniformBufferArrayDynamicIndexing ) << "\n"; + std::cout << "\t\tsparseBinding : " << static_cast( features.sparseBinding ) + << "\n"; + std::cout << "\t\tsparseResidency16Samples : " + << static_cast( features.sparseResidency16Samples ) << "\n"; + std::cout << "\t\tsparseResidency2Samples : " + << static_cast( features.sparseResidency2Samples ) << "\n"; + std::cout << "\t\tsparseResidency4Samples : " + << static_cast( features.sparseResidency4Samples ) << "\n"; + std::cout << "\t\tsparseResidency8Samples : " + << static_cast( features.sparseResidency8Samples ) << "\n"; + std::cout << "\t\tsparseResidencyAliased : " + << static_cast( features.sparseResidencyAliased ) << "\n"; + std::cout << "\t\tsparseResidencyBuffer : " + << static_cast( features.sparseResidencyBuffer ) << "\n"; + std::cout << "\t\tsparseResidencyImage2D : " + << static_cast( features.sparseResidencyImage2D ) << "\n"; + std::cout << "\t\tsparseResidencyImage3D : " + << static_cast( features.sparseResidencyImage3D ) << "\n"; + std::cout << "\t\ttessellationShader : " << static_cast( features.tessellationShader ) + << "\n"; + std::cout << "\t\ttextureCompressionASTC_LDR : " + << static_cast( features.textureCompressionASTC_LDR ) << "\n"; + std::cout << "\t\ttextureCompressionBC : " + << static_cast( features.textureCompressionBC ) << "\n"; + std::cout << "\t\ttextureCompressionETC2 : " + << static_cast( features.textureCompressionETC2 ) << "\n"; + std::cout << "\t\tvariableMultisampleRate : " + << static_cast( features.variableMultisampleRate ) << "\n"; + std::cout << "\t\tvertexPipelineStoresAndAtomics : " + << static_cast( features.vertexPipelineStoresAndAtomics ) << "\n"; + std::cout << "\t\twideLines : " << static_cast( features.wideLines ) << "\n"; std::cout << "\n"; - vk::PhysicalDevice16BitStorageFeatures const& sixteenBitStorageFeatures = features2.get(); + vk::PhysicalDevice16BitStorageFeatures const & sixteenBitStorageFeatures = + features2.get(); std::cout << "\t16BitStorageFeatures:\n"; - std::cout << "\t\tstorageBuffer16BitAccess : " << static_cast(sixteenBitStorageFeatures.storageBuffer16BitAccess) << "\n"; - std::cout << "\t\tstorageInputOutput16 : " << static_cast(sixteenBitStorageFeatures.storageInputOutput16) << "\n"; - std::cout << "\t\tstoragePushConstant16 : " << static_cast(sixteenBitStorageFeatures.storagePushConstant16) << "\n"; - std::cout << "\t\tuniformAndStorageBuffer16BitAccess : " << static_cast(sixteenBitStorageFeatures.uniformAndStorageBuffer16BitAccess) << "\n"; + std::cout << "\t\tstorageBuffer16BitAccess : " + << static_cast( sixteenBitStorageFeatures.storageBuffer16BitAccess ) << "\n"; + std::cout << "\t\tstorageInputOutput16 : " + << static_cast( sixteenBitStorageFeatures.storageInputOutput16 ) << "\n"; + std::cout << "\t\tstoragePushConstant16 : " + << static_cast( sixteenBitStorageFeatures.storagePushConstant16 ) << "\n"; + std::cout << "\t\tuniformAndStorageBuffer16BitAccess : " + << static_cast( sixteenBitStorageFeatures.uniformAndStorageBuffer16BitAccess ) << "\n"; std::cout << "\n"; - if (vk::su::contains(extensionProperties, "VK_KHR_8bit_storage")) + if ( vk::su::contains( extensionProperties, "VK_KHR_8bit_storage" ) ) { - vk::PhysicalDevice8BitStorageFeaturesKHR const& eightBitStorageFeatures = features2.get(); + vk::PhysicalDevice8BitStorageFeaturesKHR const & eightBitStorageFeatures = + features2.get(); std::cout << "\t8BitStorageFeatures:\n"; - std::cout << "\t\tstorageBuffer8BitAccess : " << static_cast(eightBitStorageFeatures.storageBuffer8BitAccess) << "\n"; - std::cout << "\t\tstoragePushConstant8 : " << static_cast(eightBitStorageFeatures.storagePushConstant8) << "\n"; - std::cout << "\t\tuniformAndStorageBuffer8BitAccess : " << static_cast(eightBitStorageFeatures.uniformAndStorageBuffer8BitAccess) << "\n"; + std::cout << "\t\tstorageBuffer8BitAccess : " + << static_cast( eightBitStorageFeatures.storageBuffer8BitAccess ) << "\n"; + std::cout << "\t\tstoragePushConstant8 : " + << static_cast( eightBitStorageFeatures.storagePushConstant8 ) << "\n"; + std::cout << "\t\tuniformAndStorageBuffer8BitAccess : " + << static_cast( eightBitStorageFeatures.uniformAndStorageBuffer8BitAccess ) << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_EXT_astc_decode_mode")) + if ( vk::su::contains( extensionProperties, "VK_EXT_astc_decode_mode" ) ) { - vk::PhysicalDeviceASTCDecodeFeaturesEXT const& astcDecodeFeatures = features2.get(); + vk::PhysicalDeviceASTCDecodeFeaturesEXT const & astcDecodeFeatures = + features2.get(); std::cout << "\tASTCDecodeFeature:\n"; - std::cout << "\t\tdecodeModeSharedExponent : " << static_cast(astcDecodeFeatures.decodeModeSharedExponent) << "\n"; + std::cout << "\t\tdecodeModeSharedExponent : " + << static_cast( astcDecodeFeatures.decodeModeSharedExponent ) << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_EXT_blend_operation_advanced")) + if ( vk::su::contains( extensionProperties, "VK_EXT_blend_operation_advanced" ) ) { - vk::PhysicalDeviceBlendOperationAdvancedFeaturesEXT const& blendOperationAdvancedFeatures = features2.get(); + vk::PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & blendOperationAdvancedFeatures = + features2.get(); std::cout << "\tBlendOperationAdvancedFeatures:\n"; - std::cout << "\t\tadvancedBlendCoherentOperations : " << static_cast(blendOperationAdvancedFeatures.advancedBlendCoherentOperations) << "\n"; + std::cout << "\t\tadvancedBlendCoherentOperations : " + << static_cast( blendOperationAdvancedFeatures.advancedBlendCoherentOperations ) << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_EXT_buffer_device_address")) + if ( vk::su::contains( extensionProperties, "VK_EXT_buffer_device_address" ) ) { - vk::PhysicalDeviceBufferDeviceAddressFeaturesEXT const& bufferDeviceAddressFeatures = features2.get(); + vk::PhysicalDeviceBufferDeviceAddressFeaturesEXT const & bufferDeviceAddressFeatures = + features2.get(); std::cout << "\tBufferDeviceAddressFeatures:\n"; - std::cout << "\t\tbufferDeviceAddress : " << static_cast(bufferDeviceAddressFeatures.bufferDeviceAddress) << "\n"; - std::cout << "\t\tbufferDeviceAddressCaptureReplay : " << static_cast(bufferDeviceAddressFeatures.bufferDeviceAddressCaptureReplay) << "\n"; - std::cout << "\t\tbufferDeviceAddressMultiDevice : " << static_cast(bufferDeviceAddressFeatures.bufferDeviceAddressMultiDevice) << "\n"; + std::cout << "\t\tbufferDeviceAddress : " + << static_cast( bufferDeviceAddressFeatures.bufferDeviceAddress ) << "\n"; + std::cout << "\t\tbufferDeviceAddressCaptureReplay : " + << static_cast( bufferDeviceAddressFeatures.bufferDeviceAddressCaptureReplay ) << "\n"; + std::cout << "\t\tbufferDeviceAddressMultiDevice : " + << static_cast( bufferDeviceAddressFeatures.bufferDeviceAddressMultiDevice ) << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_AMD_device_coherent_memory")) + if ( vk::su::contains( extensionProperties, "VK_AMD_device_coherent_memory" ) ) { - vk::PhysicalDeviceCoherentMemoryFeaturesAMD const& coherentMemoryFeatures = features2.get(); + vk::PhysicalDeviceCoherentMemoryFeaturesAMD const & coherentMemoryFeatures = + features2.get(); std::cout << "\tCoherentMemoryFeatures:\n"; - std::cout << "\t\tdeviceCoherentMemory : " << static_cast(coherentMemoryFeatures.deviceCoherentMemory) << "\n"; + std::cout << "\t\tdeviceCoherentMemory : " << static_cast( coherentMemoryFeatures.deviceCoherentMemory ) + << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_NV_compute_shader_derivatives")) + if ( vk::su::contains( extensionProperties, "VK_NV_compute_shader_derivatives" ) ) { - vk::PhysicalDeviceComputeShaderDerivativesFeaturesNV const& computeShaderDerivativesFeatures = features2.get(); + vk::PhysicalDeviceComputeShaderDerivativesFeaturesNV const & computeShaderDerivativesFeatures = + features2.get(); std::cout << "\tComputeShaderDerivativeFeatures:\n"; - std::cout << "\t\tcomputeDerivativeGroupLinear : " << static_cast(computeShaderDerivativesFeatures.computeDerivativeGroupLinear) << "\n"; - std::cout << "\t\tcomputeDerivativeGroupQuads : " << static_cast(computeShaderDerivativesFeatures.computeDerivativeGroupQuads) << "\n"; + std::cout << "\t\tcomputeDerivativeGroupLinear : " + << static_cast( computeShaderDerivativesFeatures.computeDerivativeGroupLinear ) << "\n"; + std::cout << "\t\tcomputeDerivativeGroupQuads : " + << static_cast( computeShaderDerivativesFeatures.computeDerivativeGroupQuads ) << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_EXT_conditional_rendering")) + if ( vk::su::contains( extensionProperties, "VK_EXT_conditional_rendering" ) ) { - vk::PhysicalDeviceConditionalRenderingFeaturesEXT const& conditionalRenderingFeatures = features2.get(); + vk::PhysicalDeviceConditionalRenderingFeaturesEXT const & conditionalRenderingFeatures = + features2.get(); std::cout << "\tConditionalRenderingFeatures:\n"; - std::cout << "\t\tconditionalRendering : " << static_cast(conditionalRenderingFeatures.conditionalRendering) << "\n"; - std::cout << "\t\tinheritedConditionalRendering : " << static_cast(conditionalRenderingFeatures.inheritedConditionalRendering) << "\n"; + std::cout << "\t\tconditionalRendering : " + << static_cast( conditionalRenderingFeatures.conditionalRendering ) << "\n"; + std::cout << "\t\tinheritedConditionalRendering : " + << static_cast( conditionalRenderingFeatures.inheritedConditionalRendering ) << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_NV_cooperative_matrix")) + if ( vk::su::contains( extensionProperties, "VK_NV_cooperative_matrix" ) ) { - vk::PhysicalDeviceCooperativeMatrixFeaturesNV const& cooperativeMatrixFeatures = features2.get(); + vk::PhysicalDeviceCooperativeMatrixFeaturesNV const & cooperativeMatrixFeatures = + features2.get(); std::cout << "\tCooperativeMatrixFeatures:\n"; - std::cout << "\t\tcooperativeMatrix : " << static_cast(cooperativeMatrixFeatures.cooperativeMatrix) << "\n"; - std::cout << "\t\tcooperativeMatrixRobustBufferAccess : " << static_cast(cooperativeMatrixFeatures.cooperativeMatrixRobustBufferAccess) << "\n"; + std::cout << "\t\tcooperativeMatrix : " + << static_cast( cooperativeMatrixFeatures.cooperativeMatrix ) << "\n"; + std::cout << "\t\tcooperativeMatrixRobustBufferAccess : " + << static_cast( cooperativeMatrixFeatures.cooperativeMatrixRobustBufferAccess ) << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_NV_corner_sampled_image")) + if ( vk::su::contains( extensionProperties, "VK_NV_corner_sampled_image" ) ) { - vk::PhysicalDeviceCornerSampledImageFeaturesNV const& cornerSampledImageFeatures = features2.get(); + vk::PhysicalDeviceCornerSampledImageFeaturesNV const & cornerSampledImageFeatures = + features2.get(); std::cout << "\tCornerSampledImageFeatures:\n"; - std::cout << "\t\tcornerSampledImage : " << static_cast(cornerSampledImageFeatures.cornerSampledImage) << "\n"; + std::cout << "\t\tcornerSampledImage : " << static_cast( cornerSampledImageFeatures.cornerSampledImage ) + << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_NV_coverage_reduction_mode")) + if ( vk::su::contains( extensionProperties, "VK_NV_coverage_reduction_mode" ) ) { - vk::PhysicalDeviceCoverageReductionModeFeaturesNV const& coverageReductionModeFeatures = features2.get(); + vk::PhysicalDeviceCoverageReductionModeFeaturesNV const & coverageReductionModeFeatures = + features2.get(); std::cout << "\tCoverageReductionModeFeatures:\n"; - std::cout << "\t\tcoverageReductionMode : " << static_cast(coverageReductionModeFeatures.coverageReductionMode) << "\n"; + std::cout << "\t\tcoverageReductionMode : " + << static_cast( coverageReductionModeFeatures.coverageReductionMode ) << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_NV_dedicated_allocation_image_aliasing")) + if ( vk::su::contains( extensionProperties, "VK_NV_dedicated_allocation_image_aliasing" ) ) { - vk::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const& dedicatedAllocationImageAliasingFeatures = features2.get(); + vk::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & dedicatedAllocationImageAliasingFeatures = + features2.get(); std::cout << "\tDedicatedAllocationAliasingFeatures:\n"; - std::cout << "\t\tdedicatedAllocationImageAliasing : " << static_cast(dedicatedAllocationImageAliasingFeatures.dedicatedAllocationImageAliasing) << "\n"; + std::cout << "\t\tdedicatedAllocationImageAliasing : " + << static_cast( dedicatedAllocationImageAliasingFeatures.dedicatedAllocationImageAliasing ) + << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_EXT_depth_clip_enable")) + if ( vk::su::contains( extensionProperties, "VK_EXT_depth_clip_enable" ) ) { - vk::PhysicalDeviceDepthClipEnableFeaturesEXT const& depthClipEnabledFeatures = features2.get(); + vk::PhysicalDeviceDepthClipEnableFeaturesEXT const & depthClipEnabledFeatures = + features2.get(); std::cout << "\tDepthClipEnabledFeatures:\n"; - std::cout << "\t\tdepthClipEnable : " << static_cast(depthClipEnabledFeatures.depthClipEnable) << "\n"; + std::cout << "\t\tdepthClipEnable : " << static_cast( depthClipEnabledFeatures.depthClipEnable ) << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_EXT_descriptor_indexing")) + if ( vk::su::contains( extensionProperties, "VK_EXT_descriptor_indexing" ) ) { - vk::PhysicalDeviceDescriptorIndexingFeaturesEXT const& descriptorIndexingFeatures = features2.get(); + vk::PhysicalDeviceDescriptorIndexingFeaturesEXT const & descriptorIndexingFeatures = + features2.get(); std::cout << "\tDescriptorIndexingFeatures:\n"; - std::cout << "\t\tdescriptorBindingPartiallyBound : " << static_cast(descriptorIndexingFeatures.descriptorBindingPartiallyBound) << "\n"; - std::cout << "\t\tdescriptorBindingSampledImageUpdateAfterBind : " << static_cast(descriptorIndexingFeatures.descriptorBindingSampledImageUpdateAfterBind) << "\n"; - std::cout << "\t\tdescriptorBindingStorageBufferUpdateAfterBind : " << static_cast(descriptorIndexingFeatures.descriptorBindingStorageBufferUpdateAfterBind) << "\n"; - std::cout << "\t\tdescriptorBindingStorageImageUpdateAfterBind : " << static_cast(descriptorIndexingFeatures.descriptorBindingStorageImageUpdateAfterBind) << "\n"; - std::cout << "\t\tdescriptorBindingStorageTexelBufferUpdateAfterBind : " << static_cast(descriptorIndexingFeatures.descriptorBindingStorageTexelBufferUpdateAfterBind) << "\n"; - std::cout << "\t\tdescriptorBindingUniformBufferUpdateAfterBind : " << static_cast(descriptorIndexingFeatures.descriptorBindingUniformBufferUpdateAfterBind) << "\n"; - std::cout << "\t\tdescriptorBindingUniformTexelBufferUpdateAfterBind : " << static_cast(descriptorIndexingFeatures.descriptorBindingUniformTexelBufferUpdateAfterBind) << "\n"; - std::cout << "\t\tdescriptorBindingUpdateUnusedWhilePending : " << static_cast(descriptorIndexingFeatures.descriptorBindingUpdateUnusedWhilePending) << "\n"; - std::cout << "\t\tdescriptorBindingVariableDescriptorCount : " << static_cast(descriptorIndexingFeatures.descriptorBindingVariableDescriptorCount) << "\n"; - std::cout << "\t\truntimeDescriptorArray : " << static_cast(descriptorIndexingFeatures.runtimeDescriptorArray) << "\n"; - std::cout << "\t\tshaderInputAttachmentArrayDynamicIndexing : " << static_cast(descriptorIndexingFeatures.shaderInputAttachmentArrayDynamicIndexing) << "\n"; - std::cout << "\t\tshaderInputAttachmentArrayNonUniformIndexing : " << static_cast(descriptorIndexingFeatures.shaderInputAttachmentArrayNonUniformIndexing) << "\n"; - std::cout << "\t\tshaderSampledImageArrayNonUniformIndexing : " << static_cast(descriptorIndexingFeatures.shaderSampledImageArrayNonUniformIndexing) << "\n"; - std::cout << "\t\tshaderStorageBufferArrayNonUniformIndexing : " << static_cast(descriptorIndexingFeatures.shaderStorageBufferArrayNonUniformIndexing) << "\n"; - std::cout << "\t\tshaderStorageImageArrayNonUniformIndexing : " << static_cast(descriptorIndexingFeatures.shaderStorageImageArrayNonUniformIndexing) << "\n"; - std::cout << "\t\tshaderStorageTexelBufferArrayDynamicIndexing : " << static_cast(descriptorIndexingFeatures.shaderStorageTexelBufferArrayDynamicIndexing) << "\n"; - std::cout << "\t\tshaderStorageTexelBufferArrayNonUniformIndexing : " << static_cast(descriptorIndexingFeatures.shaderStorageTexelBufferArrayNonUniformIndexing) << "\n"; - std::cout << "\t\tshaderUniformBufferArrayNonUniformIndexing : " << static_cast(descriptorIndexingFeatures.shaderUniformBufferArrayNonUniformIndexing) << "\n"; - std::cout << "\t\tshaderUniformTexelBufferArrayDynamicIndexing : " << static_cast(descriptorIndexingFeatures.shaderUniformTexelBufferArrayDynamicIndexing) << "\n"; - std::cout << "\t\tshaderUniformTexelBufferArrayNonUniformIndexing : " << static_cast(descriptorIndexingFeatures.shaderUniformTexelBufferArrayNonUniformIndexing) << "\n"; + std::cout << "\t\tdescriptorBindingPartiallyBound : " + << static_cast( descriptorIndexingFeatures.descriptorBindingPartiallyBound ) << "\n"; + std::cout << "\t\tdescriptorBindingSampledImageUpdateAfterBind : " + << static_cast( descriptorIndexingFeatures.descriptorBindingSampledImageUpdateAfterBind ) + << "\n"; + std::cout << "\t\tdescriptorBindingStorageBufferUpdateAfterBind : " + << static_cast( descriptorIndexingFeatures.descriptorBindingStorageBufferUpdateAfterBind ) + << "\n"; + std::cout << "\t\tdescriptorBindingStorageImageUpdateAfterBind : " + << static_cast( descriptorIndexingFeatures.descriptorBindingStorageImageUpdateAfterBind ) + << "\n"; + std::cout << "\t\tdescriptorBindingStorageTexelBufferUpdateAfterBind : " + << static_cast( descriptorIndexingFeatures.descriptorBindingStorageTexelBufferUpdateAfterBind ) + << "\n"; + std::cout << "\t\tdescriptorBindingUniformBufferUpdateAfterBind : " + << static_cast( descriptorIndexingFeatures.descriptorBindingUniformBufferUpdateAfterBind ) + << "\n"; + std::cout << "\t\tdescriptorBindingUniformTexelBufferUpdateAfterBind : " + << static_cast( descriptorIndexingFeatures.descriptorBindingUniformTexelBufferUpdateAfterBind ) + << "\n"; + std::cout << "\t\tdescriptorBindingUpdateUnusedWhilePending : " + << static_cast( descriptorIndexingFeatures.descriptorBindingUpdateUnusedWhilePending ) << "\n"; + std::cout << "\t\tdescriptorBindingVariableDescriptorCount : " + << static_cast( descriptorIndexingFeatures.descriptorBindingVariableDescriptorCount ) << "\n"; + std::cout << "\t\truntimeDescriptorArray : " + << static_cast( descriptorIndexingFeatures.runtimeDescriptorArray ) << "\n"; + std::cout << "\t\tshaderInputAttachmentArrayDynamicIndexing : " + << static_cast( descriptorIndexingFeatures.shaderInputAttachmentArrayDynamicIndexing ) << "\n"; + std::cout << "\t\tshaderInputAttachmentArrayNonUniformIndexing : " + << static_cast( descriptorIndexingFeatures.shaderInputAttachmentArrayNonUniformIndexing ) + << "\n"; + std::cout << "\t\tshaderSampledImageArrayNonUniformIndexing : " + << static_cast( descriptorIndexingFeatures.shaderSampledImageArrayNonUniformIndexing ) << "\n"; + std::cout << "\t\tshaderStorageBufferArrayNonUniformIndexing : " + << static_cast( descriptorIndexingFeatures.shaderStorageBufferArrayNonUniformIndexing ) << "\n"; + std::cout << "\t\tshaderStorageImageArrayNonUniformIndexing : " + << static_cast( descriptorIndexingFeatures.shaderStorageImageArrayNonUniformIndexing ) << "\n"; + std::cout << "\t\tshaderStorageTexelBufferArrayDynamicIndexing : " + << static_cast( descriptorIndexingFeatures.shaderStorageTexelBufferArrayDynamicIndexing ) + << "\n"; + std::cout << "\t\tshaderStorageTexelBufferArrayNonUniformIndexing : " + << static_cast( descriptorIndexingFeatures.shaderStorageTexelBufferArrayNonUniformIndexing ) + << "\n"; + std::cout << "\t\tshaderUniformBufferArrayNonUniformIndexing : " + << static_cast( descriptorIndexingFeatures.shaderUniformBufferArrayNonUniformIndexing ) << "\n"; + std::cout << "\t\tshaderUniformTexelBufferArrayDynamicIndexing : " + << static_cast( descriptorIndexingFeatures.shaderUniformTexelBufferArrayDynamicIndexing ) + << "\n"; + std::cout << "\t\tshaderUniformTexelBufferArrayNonUniformIndexing : " + << static_cast( descriptorIndexingFeatures.shaderUniformTexelBufferArrayNonUniformIndexing ) + << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_NV_scissor_exclusive")) + if ( vk::su::contains( extensionProperties, "VK_NV_scissor_exclusive" ) ) { - vk::PhysicalDeviceExclusiveScissorFeaturesNV const& exclusiveScissorFeatures = features2.get(); + vk::PhysicalDeviceExclusiveScissorFeaturesNV const & exclusiveScissorFeatures = + features2.get(); std::cout << "\tExclusiveScissorFeatures:\n"; - std::cout << "\t\texclusiveScissor : " << static_cast(exclusiveScissorFeatures.exclusiveScissor) << "\n"; + std::cout << "\t\texclusiveScissor : " << static_cast( exclusiveScissorFeatures.exclusiveScissor ) + << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_EXT_fragment_density_map")) + if ( vk::su::contains( extensionProperties, "VK_EXT_fragment_density_map" ) ) { - vk::PhysicalDeviceFragmentDensityMapFeaturesEXT const& fragmentDensityMapFeatures = features2.get(); + vk::PhysicalDeviceFragmentDensityMapFeaturesEXT const & fragmentDensityMapFeatures = + features2.get(); std::cout << "\tFragmentDensityMapFeatures:\n"; - std::cout << "\t\tfragmentDensityMap : " << static_cast(fragmentDensityMapFeatures.fragmentDensityMap) << "\n"; - std::cout << "\t\tfragmentDensityMapDynamic : " << static_cast(fragmentDensityMapFeatures.fragmentDensityMapDynamic) << "\n"; - std::cout << "\t\tfragmentDensityMapNonSubsampledImages : " << static_cast(fragmentDensityMapFeatures.fragmentDensityMapNonSubsampledImages) << "\n"; + std::cout << "\t\tfragmentDensityMap : " + << static_cast( fragmentDensityMapFeatures.fragmentDensityMap ) << "\n"; + std::cout << "\t\tfragmentDensityMapDynamic : " + << static_cast( fragmentDensityMapFeatures.fragmentDensityMapDynamic ) << "\n"; + std::cout << "\t\tfragmentDensityMapNonSubsampledImages : " + << static_cast( fragmentDensityMapFeatures.fragmentDensityMapNonSubsampledImages ) << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_NV_fragment_shader_barycentric")) + if ( vk::su::contains( extensionProperties, "VK_NV_fragment_shader_barycentric" ) ) { - vk::PhysicalDeviceFragmentShaderBarycentricFeaturesNV const& fragmentShaderBarycentricFeatures = features2.get(); + vk::PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & fragmentShaderBarycentricFeatures = + features2.get(); std::cout << "\tFragmentShaderBarycentricFeatures:\n"; - std::cout << "\t\tfragmentShaderBarycentric : " << static_cast(fragmentShaderBarycentricFeatures.fragmentShaderBarycentric) << "\n"; + std::cout << "\t\tfragmentShaderBarycentric : " + << static_cast( fragmentShaderBarycentricFeatures.fragmentShaderBarycentric ) << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_EXT_fragment_shader_interlock")) + if ( vk::su::contains( extensionProperties, "VK_EXT_fragment_shader_interlock" ) ) { - vk::PhysicalDeviceFragmentShaderInterlockFeaturesEXT const& fragmentShaderInterlockFeatures = features2.get(); + vk::PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & fragmentShaderInterlockFeatures = + features2.get(); std::cout << "\tFragmentShaderInterlockFeatures:\n"; - std::cout << "\t\tfragmentShaderPixelInterlock : " << static_cast(fragmentShaderInterlockFeatures.fragmentShaderPixelInterlock) << "\n"; - std::cout << "\t\tfragmentShaderSampleInterlock : " << static_cast(fragmentShaderInterlockFeatures.fragmentShaderSampleInterlock) << "\n"; - std::cout << "\t\tfragmentShaderShadingRateInterlock : " << static_cast(fragmentShaderInterlockFeatures.fragmentShaderShadingRateInterlock) << "\n"; + std::cout << "\t\tfragmentShaderPixelInterlock : " + << static_cast( fragmentShaderInterlockFeatures.fragmentShaderPixelInterlock ) << "\n"; + std::cout << "\t\tfragmentShaderSampleInterlock : " + << static_cast( fragmentShaderInterlockFeatures.fragmentShaderSampleInterlock ) << "\n"; + std::cout << "\t\tfragmentShaderShadingRateInterlock : " + << static_cast( fragmentShaderInterlockFeatures.fragmentShaderShadingRateInterlock ) << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_EXT_host_query_reset")) + if ( vk::su::contains( extensionProperties, "VK_EXT_host_query_reset" ) ) { - vk::PhysicalDeviceHostQueryResetFeaturesEXT const& hostQueryResetFeatures = features2.get(); + vk::PhysicalDeviceHostQueryResetFeaturesEXT const & hostQueryResetFeatures = + features2.get(); std::cout << "\tHostQueryResetFeatures:\n"; - std::cout << "\t\thostQueryReset : " << static_cast(hostQueryResetFeatures.hostQueryReset) << "\n"; + std::cout << "\t\thostQueryReset : " << static_cast( hostQueryResetFeatures.hostQueryReset ) << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_KHR_imageless_framebuffer")) + if ( vk::su::contains( extensionProperties, "VK_KHR_imageless_framebuffer" ) ) { - vk::PhysicalDeviceImagelessFramebufferFeaturesKHR const& imagelessFramebufferFeatures = features2.get(); + vk::PhysicalDeviceImagelessFramebufferFeaturesKHR const & imagelessFramebufferFeatures = + features2.get(); std::cout << "\tImagelessFramebufferFeatures:\n"; - std::cout << "\t\timagelessFramebuffer : " << static_cast(imagelessFramebufferFeatures.imagelessFramebuffer) << "\n"; + std::cout << "\t\timagelessFramebuffer : " + << static_cast( imagelessFramebufferFeatures.imagelessFramebuffer ) << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_EXT_index_type_uint8")) + if ( vk::su::contains( extensionProperties, "VK_EXT_index_type_uint8" ) ) { - vk::PhysicalDeviceIndexTypeUint8FeaturesEXT const& indexTypeUint8Features = features2.get(); + vk::PhysicalDeviceIndexTypeUint8FeaturesEXT const & indexTypeUint8Features = + features2.get(); std::cout << "\tIndexTypeUint8Features:\n"; - std::cout << "\t\tindexTypeUint8 : " << static_cast(indexTypeUint8Features.indexTypeUint8) << "\n"; + std::cout << "\t\tindexTypeUint8 : " << static_cast( indexTypeUint8Features.indexTypeUint8 ) << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_EXT_inline_uniform_block")) + if ( vk::su::contains( extensionProperties, "VK_EXT_inline_uniform_block" ) ) { - vk::PhysicalDeviceInlineUniformBlockFeaturesEXT const& inlineUniformBlockFeatures = features2.get(); + vk::PhysicalDeviceInlineUniformBlockFeaturesEXT const & inlineUniformBlockFeatures = + features2.get(); std::cout << "\tInlineUniformBlockFeatures:\n"; - std::cout << "\t\tdescriptorBindingInlineUniformBlockUpdateAfterBind : " << static_cast(inlineUniformBlockFeatures.descriptorBindingInlineUniformBlockUpdateAfterBind) << "\n"; - std::cout << "\t\tinlineUniformBlock : " << static_cast(inlineUniformBlockFeatures.inlineUniformBlock) << "\n"; + std::cout << "\t\tdescriptorBindingInlineUniformBlockUpdateAfterBind : " + << static_cast( inlineUniformBlockFeatures.descriptorBindingInlineUniformBlockUpdateAfterBind ) + << "\n"; + std::cout << "\t\tinlineUniformBlock : " + << static_cast( inlineUniformBlockFeatures.inlineUniformBlock ) << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_EXT_line_rasterization")) + if ( vk::su::contains( extensionProperties, "VK_EXT_line_rasterization" ) ) { - vk::PhysicalDeviceLineRasterizationFeaturesEXT const& lineRasterizationFeatures = features2.get(); + vk::PhysicalDeviceLineRasterizationFeaturesEXT const & lineRasterizationFeatures = + features2.get(); std::cout << "\tLineRasterizationFeatures:\n"; - std::cout << "\t\tbresenhamLines : " << static_cast(lineRasterizationFeatures.bresenhamLines) << "\n"; - std::cout << "\t\trectangularLines : " << static_cast(lineRasterizationFeatures.rectangularLines) << "\n"; - std::cout << "\t\tsmoothLines : " << static_cast(lineRasterizationFeatures.smoothLines) << "\n"; - std::cout << "\t\tstippledBresenhamLines : " << static_cast(lineRasterizationFeatures.stippledBresenhamLines) << "\n"; - std::cout << "\t\tstippledRectangularLines : " << static_cast(lineRasterizationFeatures.stippledRectangularLines) << "\n"; - std::cout << "\t\tstippledSmoothLines : " << static_cast(lineRasterizationFeatures.stippledSmoothLines) << "\n"; + std::cout << "\t\tbresenhamLines : " << static_cast( lineRasterizationFeatures.bresenhamLines ) + << "\n"; + std::cout << "\t\trectangularLines : " + << static_cast( lineRasterizationFeatures.rectangularLines ) << "\n"; + std::cout << "\t\tsmoothLines : " << static_cast( lineRasterizationFeatures.smoothLines ) + << "\n"; + std::cout << "\t\tstippledBresenhamLines : " + << static_cast( lineRasterizationFeatures.stippledBresenhamLines ) << "\n"; + std::cout << "\t\tstippledRectangularLines : " + << static_cast( lineRasterizationFeatures.stippledRectangularLines ) << "\n"; + std::cout << "\t\tstippledSmoothLines : " + << static_cast( lineRasterizationFeatures.stippledSmoothLines ) << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_EXT_memory_priority")) + if ( vk::su::contains( extensionProperties, "VK_EXT_memory_priority" ) ) { - vk::PhysicalDeviceMemoryPriorityFeaturesEXT const& memoryPriorityFeatures = features2.get(); + vk::PhysicalDeviceMemoryPriorityFeaturesEXT const & memoryPriorityFeatures = + features2.get(); std::cout << "\tMemoryPriorityFeatures:\n"; - std::cout << "\t\tmemoryPriority : " << static_cast(memoryPriorityFeatures.memoryPriority) << "\n"; + std::cout << "\t\tmemoryPriority : " << static_cast( memoryPriorityFeatures.memoryPriority ) << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_NV_mesh_shader")) + if ( vk::su::contains( extensionProperties, "VK_NV_mesh_shader" ) ) { - vk::PhysicalDeviceMeshShaderFeaturesNV const& meshShaderFeatures = features2.get(); + vk::PhysicalDeviceMeshShaderFeaturesNV const & meshShaderFeatures = + features2.get(); std::cout << "\tMeshShaderFeatures:\n"; - std::cout << "\t\tmeshShader : " << static_cast(meshShaderFeatures.meshShader) << "\n"; - std::cout << "\t\ttaskShader : " << static_cast(meshShaderFeatures.taskShader) << "\n"; + std::cout << "\t\tmeshShader : " << static_cast( meshShaderFeatures.meshShader ) << "\n"; + std::cout << "\t\ttaskShader : " << static_cast( meshShaderFeatures.taskShader ) << "\n"; std::cout << "\n"; } - vk::PhysicalDeviceMultiviewFeatures const& multiviewFeatures = features2.get(); + vk::PhysicalDeviceMultiviewFeatures const & multiviewFeatures = + features2.get(); std::cout << "\tMultiviewFeatures:\n"; - std::cout << "\t\tmultiview : " << static_cast(multiviewFeatures.multiview) << "\n"; - std::cout << "\t\tmultiviewGeometryShader : " << static_cast(multiviewFeatures.multiviewGeometryShader) << "\n"; - std::cout << "\t\tmultiviewTessellationShader : " << static_cast(multiviewFeatures.multiviewTessellationShader) << "\n"; + std::cout << "\t\tmultiview : " << static_cast( multiviewFeatures.multiview ) << "\n"; + std::cout << "\t\tmultiviewGeometryShader : " + << static_cast( multiviewFeatures.multiviewGeometryShader ) << "\n"; + std::cout << "\t\tmultiviewTessellationShader : " + << static_cast( multiviewFeatures.multiviewTessellationShader ) << "\n"; std::cout << "\n"; - if (vk::su::contains(extensionProperties, "VK_KHR_pipeline_executable_properties")) + if ( vk::su::contains( extensionProperties, "VK_KHR_pipeline_executable_properties" ) ) { - vk::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const& pipelineExecutablePropertiesFeatures = features2.get(); + vk::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & pipelineExecutablePropertiesFeatures = + features2.get(); std::cout << "\tPipelineExectuablePropertiesFeatures:\n"; - std::cout << "\t\tpipelineExecutableInfo : " << static_cast(pipelineExecutablePropertiesFeatures.pipelineExecutableInfo) << "\n"; + std::cout << "\t\tpipelineExecutableInfo : " + << static_cast( pipelineExecutablePropertiesFeatures.pipelineExecutableInfo ) << "\n"; std::cout << "\n"; } - vk::PhysicalDeviceProtectedMemoryFeatures const& protectedMemoryFeatures = features2.get(); + vk::PhysicalDeviceProtectedMemoryFeatures const & protectedMemoryFeatures = + features2.get(); std::cout << "\tProtectedMemoryFeatures:\n"; - std::cout << "\t\tprotectedMemory : " << static_cast(protectedMemoryFeatures.protectedMemory) << "\n"; + std::cout << "\t\tprotectedMemory : " << static_cast( protectedMemoryFeatures.protectedMemory ) << "\n"; std::cout << "\n"; - if (vk::su::contains(extensionProperties, "VK_NV_representative_fragment_test")) + if ( vk::su::contains( extensionProperties, "VK_NV_representative_fragment_test" ) ) { - vk::PhysicalDeviceRepresentativeFragmentTestFeaturesNV const& representativeFragmentTestFeatures = features2.get(); + vk::PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & representativeFragmentTestFeatures = + features2.get(); std::cout << "\tRepresentativeFragmentTestFeatures:\n"; - std::cout << "\t\trepresentativeFragmentTest : " << static_cast(representativeFragmentTestFeatures.representativeFragmentTest) << "\n"; + std::cout << "\t\trepresentativeFragmentTest : " + << static_cast( representativeFragmentTestFeatures.representativeFragmentTest ) << "\n"; std::cout << "\n"; } - vk::PhysicalDeviceSamplerYcbcrConversionFeatures const& samplerYcbcrConversionFeatures = features2.get(); + vk::PhysicalDeviceSamplerYcbcrConversionFeatures const & samplerYcbcrConversionFeatures = + features2.get(); std::cout << "\tSamplerYcbcrConversionFeatures:\n"; - std::cout << "\t\tsamplerYcbcrConversion : " << static_cast(samplerYcbcrConversionFeatures.samplerYcbcrConversion) << "\n"; + std::cout << "\t\tsamplerYcbcrConversion : " + << static_cast( samplerYcbcrConversionFeatures.samplerYcbcrConversion ) << "\n"; std::cout << "\n"; - if (vk::su::contains(extensionProperties, "VK_EXT_scalar_block_layout")) + if ( vk::su::contains( extensionProperties, "VK_EXT_scalar_block_layout" ) ) { - vk::PhysicalDeviceScalarBlockLayoutFeaturesEXT const& scalarBlockLayoutFeatures = features2.get(); + vk::PhysicalDeviceScalarBlockLayoutFeaturesEXT const & scalarBlockLayoutFeatures = + features2.get(); std::cout << "\tScalarBlockLayoutFeatures:\n"; - std::cout << "\t\tscalarBlockLayout : " << static_cast(scalarBlockLayoutFeatures.scalarBlockLayout) << "\n"; + std::cout << "\t\tscalarBlockLayout : " << static_cast( scalarBlockLayoutFeatures.scalarBlockLayout ) + << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_KHR_shader_atomic_int64")) + if ( vk::su::contains( extensionProperties, "VK_KHR_shader_atomic_int64" ) ) { - vk::PhysicalDeviceShaderAtomicInt64FeaturesKHR const& shaderAtomicInt64Features = features2.get(); + vk::PhysicalDeviceShaderAtomicInt64FeaturesKHR const & shaderAtomicInt64Features = + features2.get(); std::cout << "\tShaderAtomicInt64Features:\n"; - std::cout << "\t\tshaderBufferInt64Atomics : " << static_cast(shaderAtomicInt64Features.shaderBufferInt64Atomics) << "\n"; - std::cout << "\t\tshaderSharedInt64Atomics : " << static_cast(shaderAtomicInt64Features.shaderSharedInt64Atomics) << "\n"; + std::cout << "\t\tshaderBufferInt64Atomics : " + << static_cast( shaderAtomicInt64Features.shaderBufferInt64Atomics ) << "\n"; + std::cout << "\t\tshaderSharedInt64Atomics : " + << static_cast( shaderAtomicInt64Features.shaderSharedInt64Atomics ) << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_EXT_shader_demote_to_helper_invocation")) + if ( vk::su::contains( extensionProperties, "VK_EXT_shader_demote_to_helper_invocation" ) ) { - vk::PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const& shaderDemoteToHelperInvocationFeatures = features2.get(); + vk::PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & shaderDemoteToHelperInvocationFeatures = + features2.get(); std::cout << "\tShaderDemoteToHelperInvocationFeatures:\n"; - std::cout << "\t\tshaderDemoteToHelperInvocation : " << static_cast(shaderDemoteToHelperInvocationFeatures.shaderDemoteToHelperInvocation) << "\n"; + std::cout << "\t\tshaderDemoteToHelperInvocation : " + << static_cast( shaderDemoteToHelperInvocationFeatures.shaderDemoteToHelperInvocation ) << "\n"; std::cout << "\n"; } - vk::PhysicalDeviceShaderDrawParametersFeatures const& shaderDrawParametersFeature = features2.get(); + vk::PhysicalDeviceShaderDrawParametersFeatures const & shaderDrawParametersFeature = + features2.get(); std::cout << "\tShaderDrawParametersFeature:\n"; - std::cout << "\t\tshaderDrawParameters : " << static_cast(shaderDrawParametersFeature.shaderDrawParameters) << "\n"; + std::cout << "\t\tshaderDrawParameters : " + << static_cast( shaderDrawParametersFeature.shaderDrawParameters ) << "\n"; std::cout << "\n"; - if (vk::su::contains(extensionProperties, "VK_KHR_shader_float16_int8")) + if ( vk::su::contains( extensionProperties, "VK_KHR_shader_float16_int8" ) ) { - vk::PhysicalDeviceShaderFloat16Int8FeaturesKHR const& shaderFloat16Int8Features = features2.get(); + vk::PhysicalDeviceShaderFloat16Int8FeaturesKHR const & shaderFloat16Int8Features = + features2.get(); std::cout << "\tShaderFloat16Int8Features:\n"; - std::cout << "\t\tshaderFloat16 : " << static_cast(shaderFloat16Int8Features.shaderFloat16) << "\n"; - std::cout << "\t\tshaderInt8 : " << static_cast(shaderFloat16Int8Features.shaderInt8) << "\n"; + std::cout << "\t\tshaderFloat16 : " << static_cast( shaderFloat16Int8Features.shaderFloat16 ) << "\n"; + std::cout << "\t\tshaderInt8 : " << static_cast( shaderFloat16Int8Features.shaderInt8 ) << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_NV_shader_image_footprint")) + if ( vk::su::contains( extensionProperties, "VK_NV_shader_image_footprint" ) ) { - vk::PhysicalDeviceShaderImageFootprintFeaturesNV const& shaderImageFootprintFeatures = features2.get(); + vk::PhysicalDeviceShaderImageFootprintFeaturesNV const & shaderImageFootprintFeatures = + features2.get(); std::cout << "\tShaderImageFootprintFeatures:\n"; - std::cout << "\t\timageFootprint : " << static_cast(shaderImageFootprintFeatures.imageFootprint) << "\n"; + std::cout << "\t\timageFootprint : " << static_cast( shaderImageFootprintFeatures.imageFootprint ) + << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_INTEL_shader_integer_functions2")) + if ( vk::su::contains( extensionProperties, "VK_INTEL_shader_integer_functions2" ) ) { - vk::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const& shaderIntegerFunctions2Features = features2.get(); + vk::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & shaderIntegerFunctions2Features = + features2.get(); std::cout << "\tShaderIntegerFunctions2Features:\n"; - std::cout << "\t\tshaderIntegerFunctions2 : " << static_cast(shaderIntegerFunctions2Features.shaderIntegerFunctions2) << "\n"; + std::cout << "\t\tshaderIntegerFunctions2 : " + << static_cast( shaderIntegerFunctions2Features.shaderIntegerFunctions2 ) << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_NV_shader_sm_builtins")) + if ( vk::su::contains( extensionProperties, "VK_NV_shader_sm_builtins" ) ) { - vk::PhysicalDeviceShaderSMBuiltinsFeaturesNV const& shaderSMBuiltinsFeatures = features2.get(); + vk::PhysicalDeviceShaderSMBuiltinsFeaturesNV const & shaderSMBuiltinsFeatures = + features2.get(); std::cout << "\tShaderSMBuiltinsFeatures:\n"; - std::cout << "\t\tshaderSMBuiltins : " << static_cast(shaderSMBuiltinsFeatures.shaderSMBuiltins) << "\n"; + std::cout << "\t\tshaderSMBuiltins : " << static_cast( shaderSMBuiltinsFeatures.shaderSMBuiltins ) + << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_KHR_shader_subgroup_extended_types")) + if ( vk::su::contains( extensionProperties, "VK_KHR_shader_subgroup_extended_types" ) ) { - vk::PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR const& shaderSubgroupExtendedTypesFeatures = features2.get(); + vk::PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR const & shaderSubgroupExtendedTypesFeatures = + features2.get(); std::cout << "\tShaderSubgroupExtendedTypeFeatures:\n"; - std::cout << "\t\tshaderSubgroupExtendedTypes : " << static_cast(shaderSubgroupExtendedTypesFeatures.shaderSubgroupExtendedTypes) << "\n"; + std::cout << "\t\tshaderSubgroupExtendedTypes : " + << static_cast( shaderSubgroupExtendedTypesFeatures.shaderSubgroupExtendedTypes ) << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_NV_shading_rate_image")) + if ( vk::su::contains( extensionProperties, "VK_NV_shading_rate_image" ) ) { - vk::PhysicalDeviceShadingRateImageFeaturesNV const& shadingRateImageFeatures = features2.get(); + vk::PhysicalDeviceShadingRateImageFeaturesNV const & shadingRateImageFeatures = + features2.get(); std::cout << "\tShadingRateImageFeatures:\n"; - std::cout << "\t\tshadingRateCoarseSampleOrder : " << static_cast(shadingRateImageFeatures.shadingRateCoarseSampleOrder) << "\n"; - std::cout << "\t\tshadingRateImage : " << static_cast(shadingRateImageFeatures.shadingRateImage) << "\n"; + std::cout << "\t\tshadingRateCoarseSampleOrder : " + << static_cast( shadingRateImageFeatures.shadingRateCoarseSampleOrder ) << "\n"; + std::cout << "\t\tshadingRateImage : " + << static_cast( shadingRateImageFeatures.shadingRateImage ) << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_EXT_subgroup_size_control")) + if ( vk::su::contains( extensionProperties, "VK_EXT_subgroup_size_control" ) ) { - vk::PhysicalDeviceSubgroupSizeControlFeaturesEXT const& subgroupSizeControlFeatures = features2.get(); + vk::PhysicalDeviceSubgroupSizeControlFeaturesEXT const & subgroupSizeControlFeatures = + features2.get(); std::cout << "\tSubgroupSizeControlFeatures:\n"; - std::cout << "\t\tcomputeFullSubgroups : " << static_cast(subgroupSizeControlFeatures.computeFullSubgroups) << "\n"; - std::cout << "\t\tsubgroupSizeControl : " << static_cast(subgroupSizeControlFeatures.subgroupSizeControl) << "\n"; + std::cout << "\t\tcomputeFullSubgroups : " + << static_cast( subgroupSizeControlFeatures.computeFullSubgroups ) << "\n"; + std::cout << "\t\tsubgroupSizeControl : " + << static_cast( subgroupSizeControlFeatures.subgroupSizeControl ) << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_EXT_texel_buffer_alignment")) + if ( vk::su::contains( extensionProperties, "VK_EXT_texel_buffer_alignment" ) ) { - vk::PhysicalDeviceTexelBufferAlignmentFeaturesEXT const& texelBufferAlignmentFeatures = features2.get(); + vk::PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & texelBufferAlignmentFeatures = + features2.get(); std::cout << "\tTexelBufferAlignmentFeatures:\n"; - std::cout << "\t\ttexelBufferAlignment : " << static_cast(texelBufferAlignmentFeatures.texelBufferAlignment) << "\n"; + std::cout << "\t\ttexelBufferAlignment : " + << static_cast( texelBufferAlignmentFeatures.texelBufferAlignment ) << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_EXT_texture_compression_astc_hdr")) + if ( vk::su::contains( extensionProperties, "VK_EXT_texture_compression_astc_hdr" ) ) { - vk::PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const& textureCompressionASTCHDRFeatures = features2.get(); + vk::PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & textureCompressionASTCHDRFeatures = + features2.get(); std::cout << "\tTextureCompressionASTCHHRFeatures:\n"; - std::cout << "\t\ttextureCompressionASTC_HDR : " << static_cast(textureCompressionASTCHDRFeatures.textureCompressionASTC_HDR) << "\n"; + std::cout << "\t\ttextureCompressionASTC_HDR : " + << static_cast( textureCompressionASTCHDRFeatures.textureCompressionASTC_HDR ) << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_KHR_timeline_semaphore")) + if ( vk::su::contains( extensionProperties, "VK_KHR_timeline_semaphore" ) ) { - vk::PhysicalDeviceTimelineSemaphoreFeaturesKHR const& timelineSemaphoreFeatures = features2.get(); + vk::PhysicalDeviceTimelineSemaphoreFeaturesKHR const & timelineSemaphoreFeatures = + features2.get(); std::cout << "\tTimelineSemaphoreFeatures:\n"; - std::cout << "\t\ttimelineSemaphore :" << static_cast(timelineSemaphoreFeatures.timelineSemaphore) << "\n"; + std::cout << "\t\ttimelineSemaphore :" << static_cast( timelineSemaphoreFeatures.timelineSemaphore ) + << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_EXT_transform_feedback")) + if ( vk::su::contains( extensionProperties, "VK_EXT_transform_feedback" ) ) { - vk::PhysicalDeviceTransformFeedbackFeaturesEXT const& transformFeedbackFeatures = features2.get(); + vk::PhysicalDeviceTransformFeedbackFeaturesEXT const & transformFeedbackFeatures = + features2.get(); std::cout << "\tTransformFeedbackFeatures:\n"; - std::cout << "\t\tgeometryStreams : " << static_cast(transformFeedbackFeatures.geometryStreams) << "\n"; - std::cout << "\t\ttransformFeedback : " << static_cast(transformFeedbackFeatures.transformFeedback) << "\n"; + std::cout << "\t\tgeometryStreams : " << static_cast( transformFeedbackFeatures.geometryStreams ) << "\n"; + std::cout << "\t\ttransformFeedback : " << static_cast( transformFeedbackFeatures.transformFeedback ) + << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_KHR_uniform_buffer_standard_layout")) + if ( vk::su::contains( extensionProperties, "VK_KHR_uniform_buffer_standard_layout" ) ) { - vk::PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR const& uniformBufferStandardLayoutFeatures = features2.get(); + vk::PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR const & uniformBufferStandardLayoutFeatures = + features2.get(); std::cout << "\tUniformBufferStandardLayoutFeatures:\n"; - std::cout << "\t\tuniformBufferStandardLayout : " << static_cast(uniformBufferStandardLayoutFeatures.uniformBufferStandardLayout) << "\n"; + std::cout << "\t\tuniformBufferStandardLayout : " + << static_cast( uniformBufferStandardLayoutFeatures.uniformBufferStandardLayout ) << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_KHR_variable_pointers")) + if ( vk::su::contains( extensionProperties, "VK_KHR_variable_pointers" ) ) { - vk::PhysicalDeviceVariablePointersFeatures const& variablePointersFeatures = features2.get(); + vk::PhysicalDeviceVariablePointersFeatures const & variablePointersFeatures = + features2.get(); std::cout << "\tVariablePointersFeatures:\n"; - std::cout << "\t\tvariablePointers : " << static_cast(variablePointersFeatures.variablePointers) << "\n"; - std::cout << "\t\tvariablePointersStorageBuffer : " << static_cast(variablePointersFeatures.variablePointersStorageBuffer) << "\n"; + std::cout << "\t\tvariablePointers : " + << static_cast( variablePointersFeatures.variablePointers ) << "\n"; + std::cout << "\t\tvariablePointersStorageBuffer : " + << static_cast( variablePointersFeatures.variablePointersStorageBuffer ) << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_EXT_vertex_attribute_divisor")) + if ( vk::su::contains( extensionProperties, "VK_EXT_vertex_attribute_divisor" ) ) { - vk::PhysicalDeviceVertexAttributeDivisorFeaturesEXT const& vertexAttributeDivisorFeatures = features2.get(); + vk::PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & vertexAttributeDivisorFeatures = + features2.get(); std::cout << "\tVertexAttributeDivisorFeature:\n"; - std::cout << "\t\tvertexAttributeInstanceRateDivisor : " << static_cast(vertexAttributeDivisorFeatures.vertexAttributeInstanceRateDivisor) << "\n"; - std::cout << "\t\tvertexAttributeInstanceRateZeroDivisor : " << static_cast(vertexAttributeDivisorFeatures.vertexAttributeInstanceRateZeroDivisor) << "\n"; + std::cout << "\t\tvertexAttributeInstanceRateDivisor : " + << static_cast( vertexAttributeDivisorFeatures.vertexAttributeInstanceRateDivisor ) << "\n"; + std::cout << "\t\tvertexAttributeInstanceRateZeroDivisor : " + << static_cast( vertexAttributeDivisorFeatures.vertexAttributeInstanceRateZeroDivisor ) << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_KHR_vulkan_memory_model")) + if ( vk::su::contains( extensionProperties, "VK_KHR_vulkan_memory_model" ) ) { - vk::PhysicalDeviceVulkanMemoryModelFeaturesKHR const& vulkanMemoryModelFeatures = features2.get(); + vk::PhysicalDeviceVulkanMemoryModelFeaturesKHR const & vulkanMemoryModelFeatures = + features2.get(); std::cout << "\tVulkanMemoryModelFeatures:\n"; - std::cout << "\t\tvulkanMemoryModel : " << static_cast(vulkanMemoryModelFeatures.vulkanMemoryModel) << "\n"; - std::cout << "\t\tvulkanMemoryModelAvailabilityVisibilityChains : " << static_cast(vulkanMemoryModelFeatures.vulkanMemoryModelAvailabilityVisibilityChains) << "\n"; - std::cout << "\t\tvulkanMemoryModelDeviceScope : " << static_cast(vulkanMemoryModelFeatures.vulkanMemoryModelDeviceScope) << "\n"; + std::cout << "\t\tvulkanMemoryModel : " + << static_cast( vulkanMemoryModelFeatures.vulkanMemoryModel ) << "\n"; + std::cout << "\t\tvulkanMemoryModelAvailabilityVisibilityChains : " + << static_cast( vulkanMemoryModelFeatures.vulkanMemoryModelAvailabilityVisibilityChains ) + << "\n"; + std::cout << "\t\tvulkanMemoryModelDeviceScope : " + << static_cast( vulkanMemoryModelFeatures.vulkanMemoryModelDeviceScope ) << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_KHR_sampler_ycbcr_conversion")) + if ( vk::su::contains( extensionProperties, "VK_KHR_sampler_ycbcr_conversion" ) ) { - vk::PhysicalDeviceYcbcrImageArraysFeaturesEXT const& ycbcrImageArraysFeatures = features2.get(); + vk::PhysicalDeviceYcbcrImageArraysFeaturesEXT const & ycbcrImageArraysFeatures = + features2.get(); std::cout << "\tYcbcrImageArraysFeatures:\n"; - std::cout << "\t\tycbcrImageArrays : " << static_cast(ycbcrImageArraysFeatures.ycbcrImageArrays) << "\n"; + std::cout << "\t\tycbcrImageArrays : " << static_cast( ycbcrImageArraysFeatures.ycbcrImageArrays ) + << "\n"; std::cout << "\n"; } } /* VULKAN_KEY_END */ } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (std::runtime_error& err) + catch ( std::runtime_error & err ) { std::cout << "std::runtime_error: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } - return 0; + return 0; } diff --git a/samples/PhysicalDeviceGroups/PhysicalDeviceGroups.cpp b/samples/PhysicalDeviceGroups/PhysicalDeviceGroups.cpp index 67730df..6779f67 100644 --- a/samples/PhysicalDeviceGroups/PhysicalDeviceGroups.cpp +++ b/samples/PhysicalDeviceGroups/PhysicalDeviceGroups.cpp @@ -17,18 +17,19 @@ #include "../utils/utils.hpp" #include "vulkan/vulkan.hpp" + #include -static char const* AppName = "PhysicalDeviceGroups"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "PhysicalDeviceGroups"; +static char const * EngineName = "Vulkan.hpp"; -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { - vk::UniqueInstance instance = vk::su::createInstance(AppName, EngineName); -#if !defined(NDEBUG) - vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger(instance); + vk::UniqueInstance instance = vk::su::createInstance( AppName, EngineName ); +#if !defined( NDEBUG ) + vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger( instance ); #endif /* VULKAN_KEY_START */ @@ -36,19 +37,22 @@ int main(int /*argc*/, char ** /*argv*/) std::vector groupProperties = instance->enumeratePhysicalDeviceGroups(); std::cout << std::boolalpha; - for (size_t i=0 ; i< groupProperties.size() ; i++) + for ( size_t i = 0; i < groupProperties.size(); i++ ) { std::cout << "Group Properties " << i << "\n"; - std::cout << "\t" << "physicalDeviceCount = " << groupProperties[i].physicalDeviceCount << "\n"; - std::cout << "\t" << "physicalDevices:\n"; - for (size_t j = 0; j < groupProperties[i].physicalDeviceCount; j++) + std::cout << "\t" + << "physicalDeviceCount = " << groupProperties[i].physicalDeviceCount << "\n"; + std::cout << "\t" + << "physicalDevices:\n"; + for ( size_t j = 0; j < groupProperties[i].physicalDeviceCount; j++ ) { std::cout << "\t\t" << j << " : " << groupProperties[i].physicalDevices[j].getProperties().deviceName << "\n"; } - std::cout << "\t" << "subsetAllocation = " << static_cast(groupProperties[i].subsetAllocation) << "\n"; + std::cout << "\t" + << "subsetAllocation = " << static_cast( groupProperties[i].subsetAllocation ) << "\n"; std::cout << "\n"; - if (1 < groupProperties[i].physicalDeviceCount) + if ( 1 < groupProperties[i].physicalDeviceCount ) { vk::PhysicalDevice physicalDevice = groupProperties[i].physicalDevices[0]; @@ -56,40 +60,44 @@ int main(int /*argc*/, char ** /*argv*/) std::vector queueFamilyProperties = physicalDevice.getQueueFamilyProperties(); // get the first index into queueFamiliyProperties which supports graphics - size_t graphicsQueueFamilyIndex = std::distance(queueFamilyProperties.begin(), - std::find_if(queueFamilyProperties.begin(), - queueFamilyProperties.end(), - [](vk::QueueFamilyProperties const& qfp) { return qfp.queueFlags & vk::QueueFlagBits::eGraphics; })); - assert(graphicsQueueFamilyIndex < queueFamilyProperties.size()); + size_t graphicsQueueFamilyIndex = std::distance( + queueFamilyProperties.begin(), + std::find_if( + queueFamilyProperties.begin(), queueFamilyProperties.end(), []( vk::QueueFamilyProperties const & qfp ) { + return qfp.queueFlags & vk::QueueFlagBits::eGraphics; + } ) ); + assert( graphicsQueueFamilyIndex < queueFamilyProperties.size() ); // create a UniqueDevice - float queuePriority = 0.0f; - vk::DeviceQueueCreateInfo deviceQueueCreateInfo(vk::DeviceQueueCreateFlags(), static_cast(graphicsQueueFamilyIndex), 1, &queuePriority); - vk::DeviceCreateInfo deviceCreateInfo(vk::DeviceCreateFlags(), 1, &deviceQueueCreateInfo); + float queuePriority = 0.0f; + vk::DeviceQueueCreateInfo deviceQueueCreateInfo( + vk::DeviceQueueCreateFlags(), static_cast( graphicsQueueFamilyIndex ), 1, &queuePriority ); + vk::DeviceCreateInfo deviceCreateInfo( vk::DeviceCreateFlags(), 1, &deviceQueueCreateInfo ); - vk::DeviceGroupDeviceCreateInfo deviceGroupDeviceCreateInfo(groupProperties[i].physicalDeviceCount, groupProperties[i].physicalDevices); + vk::DeviceGroupDeviceCreateInfo deviceGroupDeviceCreateInfo( groupProperties[i].physicalDeviceCount, + groupProperties[i].physicalDevices ); deviceCreateInfo.pNext = &deviceGroupDeviceCreateInfo; - vk::UniqueDevice device = physicalDevice.createDeviceUnique(deviceCreateInfo); + vk::UniqueDevice device = physicalDevice.createDeviceUnique( deviceCreateInfo ); } } /* VULKAN_KEY_END */ } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (std::runtime_error& err) + catch ( std::runtime_error & err ) { std::cout << "std::runtime_error: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/PhysicalDeviceMemoryProperties/PhysicalDeviceMemoryProperties.cpp b/samples/PhysicalDeviceMemoryProperties/PhysicalDeviceMemoryProperties.cpp index b3fcf81..2d425ec 100644 --- a/samples/PhysicalDeviceMemoryProperties/PhysicalDeviceMemoryProperties.cpp +++ b/samples/PhysicalDeviceMemoryProperties/PhysicalDeviceMemoryProperties.cpp @@ -17,41 +17,42 @@ #include "../utils/utils.hpp" #include "vulkan/vulkan.hpp" + #include #include -static char const* AppName = "PhysicalDeviceMemoryProperties"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "PhysicalDeviceMemoryProperties"; +static char const * EngineName = "Vulkan.hpp"; -std::string formatSize(vk::DeviceSize size) +std::string formatSize( vk::DeviceSize size ) { std::ostringstream oss; - if (size < 1024) + if ( size < 1024 ) { oss << size << " B"; } - else if (size < 1024 * 1024) + else if ( size < 1024 * 1024 ) { oss << size / 1024.f << " KB"; } - else if (size < 1024 * 1024 * 1024) + else if ( size < 1024 * 1024 * 1024 ) { - oss << size / (1024.0f * 1024.0f) << " MB"; + oss << size / ( 1024.0f * 1024.0f ) << " MB"; } else { - oss << size / (1024.0f * 1024.0f * 1024.0f) << " GB"; + oss << size / ( 1024.0f * 1024.0f * 1024.0f ) << " GB"; } return oss.str(); } -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { - vk::UniqueInstance instance = vk::su::createInstance(AppName, EngineName); -#if !defined(NDEBUG) - vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger(instance); + vk::UniqueInstance instance = vk::su::createInstance( AppName, EngineName ); +#if !defined( NDEBUG ) + vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger( instance ); #endif // enumerate the physicalDevices @@ -59,48 +60,56 @@ int main(int /*argc*/, char ** /*argv*/) /* VULKAN_KEY_START */ - for (size_t i=0 ; i extensionProperties = physicalDevices[i].enumerateDeviceExtensionProperties(); - bool containsMemoryBudget = vk::su::contains(extensionProperties, "VK_EXT_memory_budget"); + std::vector extensionProperties = + physicalDevices[i].enumerateDeviceExtensionProperties(); + bool containsMemoryBudget = vk::su::contains( extensionProperties, "VK_EXT_memory_budget" ); std::cout << "PhysicalDevice " << i << "\n"; - auto memoryProperties2 = physicalDevices[i].getMemoryProperties2(); - vk::PhysicalDeviceMemoryProperties const& memoryProperties = memoryProperties2.get().memoryProperties; - vk::PhysicalDeviceMemoryBudgetPropertiesEXT const& memoryBudgetProperties = memoryProperties2.get(); + auto memoryProperties2 = + physicalDevices[i] + .getMemoryProperties2(); + vk::PhysicalDeviceMemoryProperties const & memoryProperties = + memoryProperties2.get().memoryProperties; + vk::PhysicalDeviceMemoryBudgetPropertiesEXT const & memoryBudgetProperties = + memoryProperties2.get(); std::cout << "memoryHeapCount: " << memoryProperties.memoryHeapCount << "\n"; - for (uint32_t j = 0; j < memoryProperties.memoryHeapCount; j++) + for ( uint32_t j = 0; j < memoryProperties.memoryHeapCount; j++ ) { - std::cout << " " << j << ": size = " << formatSize(memoryProperties.memoryHeaps[j].size) << ", flags = " << vk::to_string(memoryProperties.memoryHeaps[j].flags) << "\n"; - if (containsMemoryBudget) + std::cout << " " << j << ": size = " << formatSize( memoryProperties.memoryHeaps[j].size ) + << ", flags = " << vk::to_string( memoryProperties.memoryHeaps[j].flags ) << "\n"; + if ( containsMemoryBudget ) { - std::cout << " heapBudget = " << formatSize(memoryBudgetProperties.heapBudget[j]) << ", heapUsage = " << formatSize(memoryBudgetProperties.heapUsage[j]) << "\n"; + std::cout << " heapBudget = " << formatSize( memoryBudgetProperties.heapBudget[j] ) + << ", heapUsage = " << formatSize( memoryBudgetProperties.heapUsage[j] ) << "\n"; } } std::cout << "memoryTypeCount: " << memoryProperties.memoryTypeCount << "\n"; - for (uint32_t j = 0; j < memoryProperties.memoryTypeCount; j++) + for ( uint32_t j = 0; j < memoryProperties.memoryTypeCount; j++ ) { - std::cout << " " << j << ": heapIndex = " << memoryProperties.memoryTypes[j].heapIndex << ", flags = " << vk::to_string(memoryProperties.memoryTypes[j].propertyFlags) << "\n"; + std::cout << " " << j << ": heapIndex = " << memoryProperties.memoryTypes[j].heapIndex + << ", flags = " << vk::to_string( memoryProperties.memoryTypes[j].propertyFlags ) << "\n"; } } /* VULKAN_KEY_END */ } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (std::runtime_error& err) + catch ( std::runtime_error & err ) { std::cout << "std::runtime_error: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/PhysicalDeviceProperties/PhysicalDeviceProperties.cpp b/samples/PhysicalDeviceProperties/PhysicalDeviceProperties.cpp index 37da09e..5a38130 100644 --- a/samples/PhysicalDeviceProperties/PhysicalDeviceProperties.cpp +++ b/samples/PhysicalDeviceProperties/PhysicalDeviceProperties.cpp @@ -17,52 +17,51 @@ #include "../utils/utils.hpp" #include "vulkan/vulkan.hpp" + #include #include #include -static char const* AppName = "PhysicalDeviceProperties"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "PhysicalDeviceProperties"; +static char const * EngineName = "Vulkan.hpp"; -std::string decodeAPIVersion(uint32_t apiVersion) +std::string decodeAPIVersion( uint32_t apiVersion ) { - return std::to_string(VK_VERSION_MAJOR(apiVersion)) + "." + std::to_string(VK_VERSION_MINOR(apiVersion)) + "." + std::to_string(VK_VERSION_PATCH(apiVersion)); + return std::to_string( VK_VERSION_MAJOR( apiVersion ) ) + "." + std::to_string( VK_VERSION_MINOR( apiVersion ) ) + + "." + std::to_string( VK_VERSION_PATCH( apiVersion ) ); } -std::string decodeDriverVersion(uint32_t driverVersion, uint32_t vendorID) +std::string decodeDriverVersion( uint32_t driverVersion, uint32_t vendorID ) { - switch (vendorID) + switch ( vendorID ) { case 4318: - return std::to_string((driverVersion >> 22) & 0x3FF) + "." + std::to_string((driverVersion >> 14) & 0xFF) + "." + std::to_string((driverVersion >> 6) & 0xFF) + "." + std::to_string(driverVersion & 0x3F); + return std::to_string( ( driverVersion >> 22 ) & 0x3FF ) + "." + + std::to_string( ( driverVersion >> 14 ) & 0xFF ) + "." + std::to_string( ( driverVersion >> 6 ) & 0xFF ) + + "." + std::to_string( driverVersion & 0x3F ); case 0x8086: - return std::to_string((driverVersion >> 14) & 0x3FFFF) + "." + std::to_string((driverVersion & 0x3FFF)); - default: - return decodeAPIVersion(driverVersion); + return std::to_string( ( driverVersion >> 14 ) & 0x3FFFF ) + "." + std::to_string( ( driverVersion & 0x3FFF ) ); + default: return decodeAPIVersion( driverVersion ); } } -std::string decodeVendorID(uint32_t vendorID) +std::string decodeVendorID( uint32_t vendorID ) { // below 0x10000 are the PCI vendor IDs (https://pcisig.com/membership/member-companies) - if (vendorID < 0x10000) + if ( vendorID < 0x10000 ) { - switch (vendorID) + switch ( vendorID ) { - case 0x1022: - return "Advanced Micro Devices"; - case 0x10DE: - return "NVidia Corporation"; - case 0x8086: - return "Intel Corporation"; - default: - return std::to_string(vendorID); + case 0x1022: return "Advanced Micro Devices"; + case 0x10DE: return "NVidia Corporation"; + case 0x8086: return "Intel Corporation"; + default: return std::to_string( vendorID ); } } else { // above 0x10000 should be vkVendorIDs - return vk::to_string(vk::VendorId(vendorID)); + return vk::to_string( vk::VendorId( vendorID ) ); } } @@ -73,38 +72,38 @@ namespace vk struct LUID { public: - LUID(uint8_t const data[VK_LUID_SIZE]) + LUID( uint8_t const data[VK_LUID_SIZE] ) { - memcpy(m_data, data, VK_LUID_SIZE * sizeof(uint8_t)); + memcpy( m_data, data, VK_LUID_SIZE * sizeof( uint8_t ) ); } uint8_t m_data[VK_LUID_SIZE]; }; - std::ostream& operator<<(std::ostream& os, LUID const& uuid) + std::ostream & operator<<( std::ostream & os, LUID const & uuid ) { - os << std::setfill('0') << std::hex; - for (int j = 0; j < VK_LUID_SIZE; ++j) + os << std::setfill( '0' ) << std::hex; + for ( int j = 0; j < VK_LUID_SIZE; ++j ) { - os << std::setw(2) << static_cast(uuid.m_data[j]); - if (j == 3 || j == 5) + os << std::setw( 2 ) << static_cast( uuid.m_data[j] ); + if ( j == 3 || j == 5 ) { std::cout << '-'; } } - os << std::setfill(' ') << std::dec; + os << std::setfill( ' ' ) << std::dec; return os; } - } -} + } // namespace su +} // namespace vk -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { - vk::UniqueInstance instance = vk::su::createInstance(AppName, EngineName); -#if !defined(NDEBUG) - vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger(instance); + vk::UniqueInstance instance = vk::su::createInstance( AppName, EngineName ); +#if !defined( NDEBUG ) + vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger( instance ); #endif // enumerate the physicalDevices @@ -113,557 +112,1176 @@ int main(int /*argc*/, char ** /*argv*/) /* VULKAN_KEY_START */ std::cout << std::boolalpha; - for (size_t i=0 ; i extensionProperties = physicalDevices[i].enumerateDeviceExtensionProperties(); + std::vector extensionProperties = + physicalDevices[i].enumerateDeviceExtensionProperties(); std::cout << "PhysicalDevice " << i << "\n"; - auto properties2 = physicalDevices[i].getProperties2(); - vk::PhysicalDeviceProperties const& properties = properties2.get().properties; - std::cout << "\t" << "Properties:\n"; - std::cout << "\t\t" << "apiVersion = " << decodeAPIVersion(properties.apiVersion) << "\n"; - std::cout << "\t\t" << "driverVersion = " << decodeDriverVersion(properties.driverVersion, properties.vendorID) << "\n"; - std::cout << "\t\t" << "vendorID = " << decodeVendorID(properties.vendorID) << "\n"; - std::cout << "\t\t" << "deviceID = " << properties.deviceID << "\n"; - std::cout << "\t\t" << "deviceType = " << vk::to_string(properties.deviceType) << "\n"; - std::cout << "\t\t" << "deviceName = " << properties.deviceName << "\n"; - std::cout << "\t\t" << "pipelineCacheUUID = " << vk::su::UUID(properties.pipelineCacheUUID) << "\n"; - std::cout << "\t\t" << "limits:\n"; - std::cout << "\t\t\t" << "bufferImageGranularity = " << properties.limits.bufferImageGranularity << "\n"; - std::cout << "\t\t\t" << "discreteQueuePriorities = " << properties.limits.discreteQueuePriorities << "\n"; - std::cout << "\t\t\t" << "framebufferColorSampleCounts = " << vk::to_string(properties.limits.framebufferColorSampleCounts) << "\n"; - std::cout << "\t\t\t" << "framebufferDepthSampleCounts = " << vk::to_string(properties.limits.framebufferDepthSampleCounts) << "\n"; - std::cout << "\t\t\t" << "framebufferNoAttachmentsSampleCounts = " << vk::to_string(properties.limits.framebufferNoAttachmentsSampleCounts) << "\n"; - std::cout << "\t\t\t" << "framebufferStencilSampleCounts = " << vk::to_string(properties.limits.framebufferStencilSampleCounts) << "\n"; - std::cout << "\t\t\t" << "lineWidthGranularity = " << properties.limits.lineWidthGranularity << "\n"; - std::cout << "\t\t\t" << "lineWidthRange = " << "[" << properties.limits.lineWidthRange[0] << ", " << properties.limits.lineWidthRange[1] << "]" << "\n"; - std::cout << "\t\t\t" << "maxBoundDescriptorSets = " << properties.limits.maxBoundDescriptorSets << "\n"; - std::cout << "\t\t\t" << "maxClipDistances = " << properties.limits.maxClipDistances << "\n"; - std::cout << "\t\t\t" << "maxColorAttachments = " << properties.limits.maxColorAttachments << "\n"; - std::cout << "\t\t\t" << "maxCombinedClipAndCullDistances = " << properties.limits.maxCombinedClipAndCullDistances << "\n"; - std::cout << "\t\t\t" << "maxComputeSharedMemorySize = " << properties.limits.maxComputeSharedMemorySize << "\n"; - std::cout << "\t\t\t" << "maxComputeWorkGroupCount = " << "[" << properties.limits.maxComputeWorkGroupCount[0] << ", " << properties.limits.maxComputeWorkGroupCount[1] << ", " << properties.limits.maxComputeWorkGroupCount[2] << "]" << "\n"; - std::cout << "\t\t\t" << "maxComputeWorkGroupInvocations = " << properties.limits.maxComputeWorkGroupInvocations << "\n"; - std::cout << "\t\t\t" << "maxComputeWorkGroupSize = " << "[" << properties.limits.maxComputeWorkGroupSize[0] << ", " << properties.limits.maxComputeWorkGroupSize[1] << ", " << properties.limits.maxComputeWorkGroupSize[2] << "]" << "\n"; - std::cout << "\t\t\t" << "maxCullDistances = " << properties.limits.maxCullDistances << "\n"; - std::cout << "\t\t\t" << "maxDescriptorSetInputAttachments = " << properties.limits.maxDescriptorSetInputAttachments << "\n"; - std::cout << "\t\t\t" << "maxDescriptorSetSampledImages = " << properties.limits.maxDescriptorSetSampledImages << "\n"; - std::cout << "\t\t\t" << "maxDescriptorSetSamplers = " << properties.limits.maxDescriptorSetSamplers << "\n"; - std::cout << "\t\t\t" << "maxDescriptorSetStorageBuffers = " << properties.limits.maxDescriptorSetStorageBuffers << "\n"; - std::cout << "\t\t\t" << "maxDescriptorSetStorageBuffersDynamic = " << properties.limits.maxDescriptorSetStorageBuffersDynamic << "\n"; - std::cout << "\t\t\t" << "maxDescriptorSetStorageImages = " << properties.limits.maxDescriptorSetStorageImages << "\n"; - std::cout << "\t\t\t" << "maxDescriptorSetUniformBuffers = " << properties.limits.maxDescriptorSetUniformBuffers << "\n"; - std::cout << "\t\t\t" << "maxDescriptorSetUniformBuffersDynamic = " << properties.limits.maxDescriptorSetUniformBuffersDynamic << "\n"; - std::cout << "\t\t\t" << "maxDrawIndexedIndexValue = " << properties.limits.maxDrawIndexedIndexValue << "\n"; - std::cout << "\t\t\t" << "maxDrawIndirectCount = " << properties.limits.maxDrawIndirectCount << "\n"; - std::cout << "\t\t\t" << "maxFragmentCombinedOutputResources = " << properties.limits.maxFragmentCombinedOutputResources << "\n"; - std::cout << "\t\t\t" << "maxFragmentDualSrcAttachments = " << properties.limits.maxFragmentDualSrcAttachments << "\n"; - std::cout << "\t\t\t" << "maxFragmentInputComponents = " << properties.limits.maxFragmentInputComponents << "\n"; - std::cout << "\t\t\t" << "maxFragmentOutputAttachments = " << properties.limits.maxFragmentOutputAttachments << "\n"; - std::cout << "\t\t\t" << "maxFramebufferHeight = " << properties.limits.maxFramebufferHeight << "\n"; - std::cout << "\t\t\t" << "maxFramebufferLayers = " << properties.limits.maxFramebufferLayers << "\n"; - std::cout << "\t\t\t" << "maxFramebufferWidth = " << properties.limits.maxFramebufferWidth << "\n"; - std::cout << "\t\t\t" << "maxGeometryInputComponents = " << properties.limits.maxGeometryInputComponents << "\n"; - std::cout << "\t\t\t" << "maxGeometryOutputComponents = " << properties.limits.maxGeometryOutputComponents << "\n"; - std::cout << "\t\t\t" << "maxGeometryOutputVertices = " << properties.limits.maxGeometryOutputVertices << "\n"; - std::cout << "\t\t\t" << "maxGeometryShaderInvocations = " << properties.limits.maxGeometryShaderInvocations << "\n"; - std::cout << "\t\t\t" << "maxGeometryTotalOutputComponents = " << properties.limits.maxGeometryTotalOutputComponents << "\n"; - std::cout << "\t\t\t" << "maxImageArrayLayers = " << properties.limits.maxImageArrayLayers << "\n"; - std::cout << "\t\t\t" << "maxImageDimension1D = " << properties.limits.maxImageDimension1D << "\n"; - std::cout << "\t\t\t" << "maxImageDimension2D = " << properties.limits.maxImageDimension2D << "\n"; - std::cout << "\t\t\t" << "maxImageDimension3D = " << properties.limits.maxImageDimension3D << "\n"; - std::cout << "\t\t\t" << "maxImageDimensionCube = " << properties.limits.maxImageDimensionCube << "\n"; - std::cout << "\t\t\t" << "maxInterpolationOffset = " << properties.limits.maxInterpolationOffset << "\n"; - std::cout << "\t\t\t" << "maxMemoryAllocationCount = " << properties.limits.maxMemoryAllocationCount << "\n"; - std::cout << "\t\t\t" << "maxPerStageDescriptorInputAttachments = " << properties.limits.maxPerStageDescriptorInputAttachments << "\n"; - std::cout << "\t\t\t" << "maxPerStageDescriptorSampledImages = " << properties.limits.maxPerStageDescriptorSampledImages << "\n"; - std::cout << "\t\t\t" << "maxPerStageDescriptorSamplers = " << properties.limits.maxPerStageDescriptorSamplers << "\n"; - std::cout << "\t\t\t" << "maxPerStageDescriptorStorageBuffers = " << properties.limits.maxPerStageDescriptorStorageBuffers << "\n"; - std::cout << "\t\t\t" << "maxPerStageDescriptorStorageImages = " << properties.limits.maxPerStageDescriptorStorageImages << "\n"; - std::cout << "\t\t\t" << "maxPerStageDescriptorUniformBuffers = " << properties.limits.maxPerStageDescriptorUniformBuffers << "\n"; - std::cout << "\t\t\t" << "maxPerStageResources = " << properties.limits.maxPerStageResources << "\n"; - std::cout << "\t\t\t" << "maxPushConstantsSize = " << properties.limits.maxPushConstantsSize << "\n"; - std::cout << "\t\t\t" << "maxSampleMaskWords = " << properties.limits.maxSampleMaskWords << "\n"; - std::cout << "\t\t\t" << "maxSamplerAllocationCount = " << properties.limits.maxSamplerAllocationCount << "\n"; - std::cout << "\t\t\t" << "maxSamplerAnisotropy = " << properties.limits.maxSamplerAnisotropy << "\n"; - std::cout << "\t\t\t" << "maxSamplerLodBias = " << properties.limits.maxSamplerLodBias << "\n"; - std::cout << "\t\t\t" << "maxStorageBufferRange = " << properties.limits.maxStorageBufferRange << "\n"; - std::cout << "\t\t\t" << "maxTessellationControlPerPatchOutputComponents = " << properties.limits.maxTessellationControlPerPatchOutputComponents << "\n"; - std::cout << "\t\t\t" << "maxTessellationControlPerVertexInputComponents = " << properties.limits.maxTessellationControlPerVertexInputComponents << "\n"; - std::cout << "\t\t\t" << "maxTessellationControlPerVertexOutputComponents = " << properties.limits.maxTessellationControlPerVertexOutputComponents << "\n"; - std::cout << "\t\t\t" << "maxTessellationControlTotalOutputComponents = " << properties.limits.maxTessellationControlTotalOutputComponents << "\n"; - std::cout << "\t\t\t" << "maxTessellationEvaluationInputComponents = " << properties.limits.maxTessellationEvaluationInputComponents << "\n"; - std::cout << "\t\t\t" << "maxTessellationEvaluationOutputComponents = " << properties.limits.maxTessellationEvaluationOutputComponents << "\n"; - std::cout << "\t\t\t" << "maxTessellationGenerationLevel = " << properties.limits.maxTessellationGenerationLevel << "\n"; - std::cout << "\t\t\t" << "maxTessellationPatchSize = " << properties.limits.maxTessellationPatchSize << "\n"; - std::cout << "\t\t\t" << "maxTexelBufferElements = " << properties.limits.maxTexelBufferElements << "\n"; - std::cout << "\t\t\t" << "maxTexelGatherOffset = " << properties.limits.maxTexelGatherOffset << "\n"; - std::cout << "\t\t\t" << "maxTexelOffset = " << properties.limits.maxTexelOffset << "\n"; - std::cout << "\t\t\t" << "maxUniformBufferRange = " << properties.limits.maxUniformBufferRange << "\n"; - std::cout << "\t\t\t" << "maxVertexInputAttributeOffset = " << properties.limits.maxVertexInputAttributeOffset << "\n"; - std::cout << "\t\t\t" << "maxVertexInputAttributes = " << properties.limits.maxVertexInputAttributes << "\n"; - std::cout << "\t\t\t" << "maxVertexInputBindings = " << properties.limits.maxVertexInputBindings << "\n"; - std::cout << "\t\t\t" << "maxVertexInputBindingStride = " << properties.limits.maxVertexInputBindingStride << "\n"; - std::cout << "\t\t\t" << "maxVertexOutputComponents = " << properties.limits.maxVertexOutputComponents << "\n"; - std::cout << "\t\t\t" << "maxViewportDimensions = " << "[" << properties.limits.maxViewportDimensions[0] << ", " << properties.limits.maxViewportDimensions[1] << "]" << "\n"; - std::cout << "\t\t\t" << "maxViewports = " << properties.limits.maxViewports << "\n"; - std::cout << "\t\t\t" << "minInterpolationOffset = " << properties.limits.minInterpolationOffset << "\n"; - std::cout << "\t\t\t" << "minMemoryMapAlignment = " << properties.limits.minMemoryMapAlignment << "\n"; - std::cout << "\t\t\t" << "minStorageBufferOffsetAlignment = " << properties.limits.minStorageBufferOffsetAlignment << "\n"; - std::cout << "\t\t\t" << "minTexelBufferOffsetAlignment = " << properties.limits.minTexelBufferOffsetAlignment << "\n"; - std::cout << "\t\t\t" << "minTexelGatherOffset = " << properties.limits.minTexelGatherOffset << "\n"; - std::cout << "\t\t\t" << "minTexelOffset = " << properties.limits.minTexelOffset << "\n"; - std::cout << "\t\t\t" << "minUniformBufferOffsetAlignment = " << properties.limits.minUniformBufferOffsetAlignment << "\n"; - std::cout << "\t\t\t" << "mipmapPrecisionBits = " << properties.limits.mipmapPrecisionBits << "\n"; - std::cout << "\t\t\t" << "nonCoherentAtomSize = " << properties.limits.nonCoherentAtomSize << "\n"; - std::cout << "\t\t\t" << "optimalBufferCopyOffsetAlignment = " << properties.limits.optimalBufferCopyOffsetAlignment << "\n"; - std::cout << "\t\t\t" << "optimalBufferCopyRowPitchAlignment = " << properties.limits.optimalBufferCopyRowPitchAlignment << "\n"; - std::cout << "\t\t\t" << "pointSizeGranularity = " << properties.limits.pointSizeGranularity << "\n"; - std::cout << "\t\t\t" << "pointSizeRange = " << "[" << properties.limits.pointSizeRange[0] << ", " << properties.limits.pointSizeRange[1] << "]" << "\n"; - std::cout << "\t\t\t" << "sampledImageColorSampleCounts = " << vk::to_string(properties.limits.sampledImageColorSampleCounts) << "\n"; - std::cout << "\t\t\t" << "sampledImageDepthSampleCounts = " << vk::to_string(properties.limits.sampledImageDepthSampleCounts) << "\n"; - std::cout << "\t\t\t" << "sampledImageIntegerSampleCounts = " << vk::to_string(properties.limits.sampledImageIntegerSampleCounts) << "\n"; - std::cout << "\t\t\t" << "sampledImageStencilSampleCounts = " << vk::to_string(properties.limits.sampledImageStencilSampleCounts) << "\n"; - std::cout << "\t\t\t" << "sparseAddressSpaceSize = " << properties.limits.sparseAddressSpaceSize << "\n"; - std::cout << "\t\t\t" << "standardSampleLocations = " << static_cast(properties.limits.standardSampleLocations) << "\n"; - std::cout << "\t\t\t" << "storageImageSampleCounts = " << vk::to_string(properties.limits.storageImageSampleCounts) << "\n"; - std::cout << "\t\t\t" << "strictLines = " << static_cast(properties.limits.strictLines) << "\n"; - std::cout << "\t\t\t" << "subPixelInterpolationOffsetBits = " << properties.limits.subPixelInterpolationOffsetBits << "\n"; - std::cout << "\t\t\t" << "subPixelPrecisionBits = " << properties.limits.subPixelPrecisionBits << "\n"; - std::cout << "\t\t\t" << "subTexelPrecisionBits = " << properties.limits.subTexelPrecisionBits << "\n"; - std::cout << "\t\t\t" << "timestampComputeAndGraphics = " << static_cast(properties.limits.timestampComputeAndGraphics) << "\n"; - std::cout << "\t\t\t" << "timestampPeriod = " << properties.limits.timestampPeriod << "\n"; - std::cout << "\t\t\t" << "viewportBoundsRange = " << "[" << properties.limits.viewportBoundsRange[0] << ", " << properties.limits.viewportBoundsRange[1] << "]" << "\n"; - std::cout << "\t\t\t" << "viewportSubPixelBits = " << properties.limits.viewportSubPixelBits << "\n"; - std::cout << "\t\t" << "sparseProperties:\n"; - std::cout << "\t\t\t" << "residencyAlignedMipSize = " << static_cast(properties.sparseProperties.residencyAlignedMipSize) << "\n"; - std::cout << "\t\t\t" << "residencyNonResidentStrict = " << static_cast(properties.sparseProperties.residencyNonResidentStrict) << "\n"; - std::cout << "\t\t\t" << "residencyStandard2DBlockShape = " << static_cast(properties.sparseProperties.residencyStandard2DBlockShape) << "\n"; - std::cout << "\t\t\t" << "residencyStandard2DMultisampleBlockShape = " << static_cast(properties.sparseProperties.residencyStandard2DMultisampleBlockShape) << "\n"; - std::cout << "\t\t\t" << "residencyStandard3DBlockShape = " << static_cast(properties.sparseProperties.residencyStandard3DBlockShape) << "\n"; + auto properties2 = physicalDevices[i] + .getProperties2(); + vk::PhysicalDeviceProperties const & properties = properties2.get().properties; + std::cout << "\t" + << "Properties:\n"; + std::cout << "\t\t" + << "apiVersion = " << decodeAPIVersion( properties.apiVersion ) << "\n"; + std::cout << "\t\t" + << "driverVersion = " << decodeDriverVersion( properties.driverVersion, properties.vendorID ) + << "\n"; + std::cout << "\t\t" + << "vendorID = " << decodeVendorID( properties.vendorID ) << "\n"; + std::cout << "\t\t" + << "deviceID = " << properties.deviceID << "\n"; + std::cout << "\t\t" + << "deviceType = " << vk::to_string( properties.deviceType ) << "\n"; + std::cout << "\t\t" + << "deviceName = " << properties.deviceName << "\n"; + std::cout << "\t\t" + << "pipelineCacheUUID = " << vk::su::UUID( properties.pipelineCacheUUID ) << "\n"; + std::cout << "\t\t" + << "limits:\n"; + std::cout << "\t\t\t" + << "bufferImageGranularity = " << properties.limits.bufferImageGranularity + << "\n"; + std::cout << "\t\t\t" + << "discreteQueuePriorities = " << properties.limits.discreteQueuePriorities + << "\n"; + std::cout << "\t\t\t" + << "framebufferColorSampleCounts = " + << vk::to_string( properties.limits.framebufferColorSampleCounts ) << "\n"; + std::cout << "\t\t\t" + << "framebufferDepthSampleCounts = " + << vk::to_string( properties.limits.framebufferDepthSampleCounts ) << "\n"; + std::cout << "\t\t\t" + << "framebufferNoAttachmentsSampleCounts = " + << vk::to_string( properties.limits.framebufferNoAttachmentsSampleCounts ) << "\n"; + std::cout << "\t\t\t" + << "framebufferStencilSampleCounts = " + << vk::to_string( properties.limits.framebufferStencilSampleCounts ) << "\n"; + std::cout << "\t\t\t" + << "lineWidthGranularity = " << properties.limits.lineWidthGranularity + << "\n"; + std::cout << "\t\t\t" + << "lineWidthRange = " + << "[" << properties.limits.lineWidthRange[0] << ", " << properties.limits.lineWidthRange[1] << "]" + << "\n"; + std::cout << "\t\t\t" + << "maxBoundDescriptorSets = " << properties.limits.maxBoundDescriptorSets + << "\n"; + std::cout << "\t\t\t" + << "maxClipDistances = " << properties.limits.maxClipDistances << "\n"; + std::cout << "\t\t\t" + << "maxColorAttachments = " << properties.limits.maxColorAttachments + << "\n"; + std::cout << "\t\t\t" + << "maxCombinedClipAndCullDistances = " + << properties.limits.maxCombinedClipAndCullDistances << "\n"; + std::cout << "\t\t\t" + << "maxComputeSharedMemorySize = " << properties.limits.maxComputeSharedMemorySize + << "\n"; + std::cout << "\t\t\t" + << "maxComputeWorkGroupCount = " + << "[" << properties.limits.maxComputeWorkGroupCount[0] << ", " + << properties.limits.maxComputeWorkGroupCount[1] << ", " + << properties.limits.maxComputeWorkGroupCount[2] << "]" + << "\n"; + std::cout << "\t\t\t" + << "maxComputeWorkGroupInvocations = " + << properties.limits.maxComputeWorkGroupInvocations << "\n"; + std::cout << "\t\t\t" + << "maxComputeWorkGroupSize = " + << "[" << properties.limits.maxComputeWorkGroupSize[0] << ", " + << properties.limits.maxComputeWorkGroupSize[1] << ", " << properties.limits.maxComputeWorkGroupSize[2] + << "]" + << "\n"; + std::cout << "\t\t\t" + << "maxCullDistances = " << properties.limits.maxCullDistances << "\n"; + std::cout << "\t\t\t" + << "maxDescriptorSetInputAttachments = " + << properties.limits.maxDescriptorSetInputAttachments << "\n"; + std::cout << "\t\t\t" + << "maxDescriptorSetSampledImages = " + << properties.limits.maxDescriptorSetSampledImages << "\n"; + std::cout << "\t\t\t" + << "maxDescriptorSetSamplers = " << properties.limits.maxDescriptorSetSamplers + << "\n"; + std::cout << "\t\t\t" + << "maxDescriptorSetStorageBuffers = " + << properties.limits.maxDescriptorSetStorageBuffers << "\n"; + std::cout << "\t\t\t" + << "maxDescriptorSetStorageBuffersDynamic = " + << properties.limits.maxDescriptorSetStorageBuffersDynamic << "\n"; + std::cout << "\t\t\t" + << "maxDescriptorSetStorageImages = " + << properties.limits.maxDescriptorSetStorageImages << "\n"; + std::cout << "\t\t\t" + << "maxDescriptorSetUniformBuffers = " + << properties.limits.maxDescriptorSetUniformBuffers << "\n"; + std::cout << "\t\t\t" + << "maxDescriptorSetUniformBuffersDynamic = " + << properties.limits.maxDescriptorSetUniformBuffersDynamic << "\n"; + std::cout << "\t\t\t" + << "maxDrawIndexedIndexValue = " << properties.limits.maxDrawIndexedIndexValue + << "\n"; + std::cout << "\t\t\t" + << "maxDrawIndirectCount = " << properties.limits.maxDrawIndirectCount + << "\n"; + std::cout << "\t\t\t" + << "maxFragmentCombinedOutputResources = " + << properties.limits.maxFragmentCombinedOutputResources << "\n"; + std::cout << "\t\t\t" + << "maxFragmentDualSrcAttachments = " + << properties.limits.maxFragmentDualSrcAttachments << "\n"; + std::cout << "\t\t\t" + << "maxFragmentInputComponents = " << properties.limits.maxFragmentInputComponents + << "\n"; + std::cout << "\t\t\t" + << "maxFragmentOutputAttachments = " + << properties.limits.maxFragmentOutputAttachments << "\n"; + std::cout << "\t\t\t" + << "maxFramebufferHeight = " << properties.limits.maxFramebufferHeight + << "\n"; + std::cout << "\t\t\t" + << "maxFramebufferLayers = " << properties.limits.maxFramebufferLayers + << "\n"; + std::cout << "\t\t\t" + << "maxFramebufferWidth = " << properties.limits.maxFramebufferWidth + << "\n"; + std::cout << "\t\t\t" + << "maxGeometryInputComponents = " << properties.limits.maxGeometryInputComponents + << "\n"; + std::cout << "\t\t\t" + << "maxGeometryOutputComponents = " << properties.limits.maxGeometryOutputComponents + << "\n"; + std::cout << "\t\t\t" + << "maxGeometryOutputVertices = " << properties.limits.maxGeometryOutputVertices + << "\n"; + std::cout << "\t\t\t" + << "maxGeometryShaderInvocations = " + << properties.limits.maxGeometryShaderInvocations << "\n"; + std::cout << "\t\t\t" + << "maxGeometryTotalOutputComponents = " + << properties.limits.maxGeometryTotalOutputComponents << "\n"; + std::cout << "\t\t\t" + << "maxImageArrayLayers = " << properties.limits.maxImageArrayLayers + << "\n"; + std::cout << "\t\t\t" + << "maxImageDimension1D = " << properties.limits.maxImageDimension1D + << "\n"; + std::cout << "\t\t\t" + << "maxImageDimension2D = " << properties.limits.maxImageDimension2D + << "\n"; + std::cout << "\t\t\t" + << "maxImageDimension3D = " << properties.limits.maxImageDimension3D + << "\n"; + std::cout << "\t\t\t" + << "maxImageDimensionCube = " << properties.limits.maxImageDimensionCube + << "\n"; + std::cout << "\t\t\t" + << "maxInterpolationOffset = " << properties.limits.maxInterpolationOffset + << "\n"; + std::cout << "\t\t\t" + << "maxMemoryAllocationCount = " << properties.limits.maxMemoryAllocationCount + << "\n"; + std::cout << "\t\t\t" + << "maxPerStageDescriptorInputAttachments = " + << properties.limits.maxPerStageDescriptorInputAttachments << "\n"; + std::cout << "\t\t\t" + << "maxPerStageDescriptorSampledImages = " + << properties.limits.maxPerStageDescriptorSampledImages << "\n"; + std::cout << "\t\t\t" + << "maxPerStageDescriptorSamplers = " + << properties.limits.maxPerStageDescriptorSamplers << "\n"; + std::cout << "\t\t\t" + << "maxPerStageDescriptorStorageBuffers = " + << properties.limits.maxPerStageDescriptorStorageBuffers << "\n"; + std::cout << "\t\t\t" + << "maxPerStageDescriptorStorageImages = " + << properties.limits.maxPerStageDescriptorStorageImages << "\n"; + std::cout << "\t\t\t" + << "maxPerStageDescriptorUniformBuffers = " + << properties.limits.maxPerStageDescriptorUniformBuffers << "\n"; + std::cout << "\t\t\t" + << "maxPerStageResources = " << properties.limits.maxPerStageResources + << "\n"; + std::cout << "\t\t\t" + << "maxPushConstantsSize = " << properties.limits.maxPushConstantsSize + << "\n"; + std::cout << "\t\t\t" + << "maxSampleMaskWords = " << properties.limits.maxSampleMaskWords << "\n"; + std::cout << "\t\t\t" + << "maxSamplerAllocationCount = " << properties.limits.maxSamplerAllocationCount + << "\n"; + std::cout << "\t\t\t" + << "maxSamplerAnisotropy = " << properties.limits.maxSamplerAnisotropy + << "\n"; + std::cout << "\t\t\t" + << "maxSamplerLodBias = " << properties.limits.maxSamplerLodBias << "\n"; + std::cout << "\t\t\t" + << "maxStorageBufferRange = " << properties.limits.maxStorageBufferRange + << "\n"; + std::cout << "\t\t\t" + << "maxTessellationControlPerPatchOutputComponents = " + << properties.limits.maxTessellationControlPerPatchOutputComponents << "\n"; + std::cout << "\t\t\t" + << "maxTessellationControlPerVertexInputComponents = " + << properties.limits.maxTessellationControlPerVertexInputComponents << "\n"; + std::cout << "\t\t\t" + << "maxTessellationControlPerVertexOutputComponents = " + << properties.limits.maxTessellationControlPerVertexOutputComponents << "\n"; + std::cout << "\t\t\t" + << "maxTessellationControlTotalOutputComponents = " + << properties.limits.maxTessellationControlTotalOutputComponents << "\n"; + std::cout << "\t\t\t" + << "maxTessellationEvaluationInputComponents = " + << properties.limits.maxTessellationEvaluationInputComponents << "\n"; + std::cout << "\t\t\t" + << "maxTessellationEvaluationOutputComponents = " + << properties.limits.maxTessellationEvaluationOutputComponents << "\n"; + std::cout << "\t\t\t" + << "maxTessellationGenerationLevel = " + << properties.limits.maxTessellationGenerationLevel << "\n"; + std::cout << "\t\t\t" + << "maxTessellationPatchSize = " << properties.limits.maxTessellationPatchSize + << "\n"; + std::cout << "\t\t\t" + << "maxTexelBufferElements = " << properties.limits.maxTexelBufferElements + << "\n"; + std::cout << "\t\t\t" + << "maxTexelGatherOffset = " << properties.limits.maxTexelGatherOffset + << "\n"; + std::cout << "\t\t\t" + << "maxTexelOffset = " << properties.limits.maxTexelOffset << "\n"; + std::cout << "\t\t\t" + << "maxUniformBufferRange = " << properties.limits.maxUniformBufferRange + << "\n"; + std::cout << "\t\t\t" + << "maxVertexInputAttributeOffset = " + << properties.limits.maxVertexInputAttributeOffset << "\n"; + std::cout << "\t\t\t" + << "maxVertexInputAttributes = " << properties.limits.maxVertexInputAttributes + << "\n"; + std::cout << "\t\t\t" + << "maxVertexInputBindings = " << properties.limits.maxVertexInputBindings + << "\n"; + std::cout << "\t\t\t" + << "maxVertexInputBindingStride = " << properties.limits.maxVertexInputBindingStride + << "\n"; + std::cout << "\t\t\t" + << "maxVertexOutputComponents = " << properties.limits.maxVertexOutputComponents + << "\n"; + std::cout << "\t\t\t" + << "maxViewportDimensions = " + << "[" << properties.limits.maxViewportDimensions[0] << ", " + << properties.limits.maxViewportDimensions[1] << "]" + << "\n"; + std::cout << "\t\t\t" + << "maxViewports = " << properties.limits.maxViewports << "\n"; + std::cout << "\t\t\t" + << "minInterpolationOffset = " << properties.limits.minInterpolationOffset + << "\n"; + std::cout << "\t\t\t" + << "minMemoryMapAlignment = " << properties.limits.minMemoryMapAlignment + << "\n"; + std::cout << "\t\t\t" + << "minStorageBufferOffsetAlignment = " + << properties.limits.minStorageBufferOffsetAlignment << "\n"; + std::cout << "\t\t\t" + << "minTexelBufferOffsetAlignment = " + << properties.limits.minTexelBufferOffsetAlignment << "\n"; + std::cout << "\t\t\t" + << "minTexelGatherOffset = " << properties.limits.minTexelGatherOffset + << "\n"; + std::cout << "\t\t\t" + << "minTexelOffset = " << properties.limits.minTexelOffset << "\n"; + std::cout << "\t\t\t" + << "minUniformBufferOffsetAlignment = " + << properties.limits.minUniformBufferOffsetAlignment << "\n"; + std::cout << "\t\t\t" + << "mipmapPrecisionBits = " << properties.limits.mipmapPrecisionBits + << "\n"; + std::cout << "\t\t\t" + << "nonCoherentAtomSize = " << properties.limits.nonCoherentAtomSize + << "\n"; + std::cout << "\t\t\t" + << "optimalBufferCopyOffsetAlignment = " + << properties.limits.optimalBufferCopyOffsetAlignment << "\n"; + std::cout << "\t\t\t" + << "optimalBufferCopyRowPitchAlignment = " + << properties.limits.optimalBufferCopyRowPitchAlignment << "\n"; + std::cout << "\t\t\t" + << "pointSizeGranularity = " << properties.limits.pointSizeGranularity + << "\n"; + std::cout << "\t\t\t" + << "pointSizeRange = " + << "[" << properties.limits.pointSizeRange[0] << ", " << properties.limits.pointSizeRange[1] << "]" + << "\n"; + std::cout << "\t\t\t" + << "sampledImageColorSampleCounts = " + << vk::to_string( properties.limits.sampledImageColorSampleCounts ) << "\n"; + std::cout << "\t\t\t" + << "sampledImageDepthSampleCounts = " + << vk::to_string( properties.limits.sampledImageDepthSampleCounts ) << "\n"; + std::cout << "\t\t\t" + << "sampledImageIntegerSampleCounts = " + << vk::to_string( properties.limits.sampledImageIntegerSampleCounts ) << "\n"; + std::cout << "\t\t\t" + << "sampledImageStencilSampleCounts = " + << vk::to_string( properties.limits.sampledImageStencilSampleCounts ) << "\n"; + std::cout << "\t\t\t" + << "sparseAddressSpaceSize = " << properties.limits.sparseAddressSpaceSize + << "\n"; + std::cout << "\t\t\t" + << "standardSampleLocations = " + << static_cast( properties.limits.standardSampleLocations ) << "\n"; + std::cout << "\t\t\t" + << "storageImageSampleCounts = " + << vk::to_string( properties.limits.storageImageSampleCounts ) << "\n"; + std::cout << "\t\t\t" + << "strictLines = " + << static_cast( properties.limits.strictLines ) << "\n"; + std::cout << "\t\t\t" + << "subPixelInterpolationOffsetBits = " + << properties.limits.subPixelInterpolationOffsetBits << "\n"; + std::cout << "\t\t\t" + << "subPixelPrecisionBits = " << properties.limits.subPixelPrecisionBits + << "\n"; + std::cout << "\t\t\t" + << "subTexelPrecisionBits = " << properties.limits.subTexelPrecisionBits + << "\n"; + std::cout << "\t\t\t" + << "timestampComputeAndGraphics = " + << static_cast( properties.limits.timestampComputeAndGraphics ) << "\n"; + std::cout << "\t\t\t" + << "timestampPeriod = " << properties.limits.timestampPeriod << "\n"; + std::cout << "\t\t\t" + << "viewportBoundsRange = " + << "[" << properties.limits.viewportBoundsRange[0] << ", " << properties.limits.viewportBoundsRange[1] + << "]" + << "\n"; + std::cout << "\t\t\t" + << "viewportSubPixelBits = " << properties.limits.viewportSubPixelBits + << "\n"; + std::cout << "\t\t" + << "sparseProperties:\n"; + std::cout << "\t\t\t" + << "residencyAlignedMipSize = " + << static_cast( properties.sparseProperties.residencyAlignedMipSize ) << "\n"; + std::cout << "\t\t\t" + << "residencyNonResidentStrict = " + << static_cast( properties.sparseProperties.residencyNonResidentStrict ) << "\n"; + std::cout << "\t\t\t" + << "residencyStandard2DBlockShape = " + << static_cast( properties.sparseProperties.residencyStandard2DBlockShape ) << "\n"; + std::cout << "\t\t\t" + << "residencyStandard2DMultisampleBlockShape = " + << static_cast( properties.sparseProperties.residencyStandard2DMultisampleBlockShape ) << "\n"; + std::cout << "\t\t\t" + << "residencyStandard3DBlockShape = " + << static_cast( properties.sparseProperties.residencyStandard3DBlockShape ) << "\n"; std::cout << "\n"; - if (vk::su::contains(extensionProperties, "VK_EXT_blend_operation_advanced")) + if ( vk::su::contains( extensionProperties, "VK_EXT_blend_operation_advanced" ) ) { - vk::PhysicalDeviceBlendOperationAdvancedPropertiesEXT const& blendOperationAdvancedProperties = properties2.get(); - std::cout << "\t" << "BlendOperationAdvancedProperties:\n"; - std::cout << "\t\t" << "advancedBlendAllOperations = " << static_cast(blendOperationAdvancedProperties.advancedBlendAllOperations) << "\n"; - std::cout << "\t\t" << "advancedBlendCorrelatedOverlap = " << static_cast(blendOperationAdvancedProperties.advancedBlendCorrelatedOverlap) << "\n"; - std::cout << "\t\t" << "advancedBlendIndependentBlend = " << static_cast(blendOperationAdvancedProperties.advancedBlendIndependentBlend) << "\n"; - std::cout << "\t\t" << "advancedBlendMaxColorAttachments = " << blendOperationAdvancedProperties.advancedBlendMaxColorAttachments << "\n"; - std::cout << "\t\t" << "advancedBlendNonPremultipliedDstColor = " << static_cast(blendOperationAdvancedProperties.advancedBlendNonPremultipliedDstColor) << "\n"; - std::cout << "\t\t" << "advancedBlendNonPremultipliedSrcColor = " << static_cast(blendOperationAdvancedProperties.advancedBlendNonPremultipliedSrcColor) << "\n"; + vk::PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & blendOperationAdvancedProperties = + properties2.get(); + std::cout << "\t" + << "BlendOperationAdvancedProperties:\n"; + std::cout << "\t\t" + << "advancedBlendAllOperations = " + << static_cast( blendOperationAdvancedProperties.advancedBlendAllOperations ) << "\n"; + std::cout << "\t\t" + << "advancedBlendCorrelatedOverlap = " + << static_cast( blendOperationAdvancedProperties.advancedBlendCorrelatedOverlap ) << "\n"; + std::cout << "\t\t" + << "advancedBlendIndependentBlend = " + << static_cast( blendOperationAdvancedProperties.advancedBlendIndependentBlend ) << "\n"; + std::cout << "\t\t" + << "advancedBlendMaxColorAttachments = " + << blendOperationAdvancedProperties.advancedBlendMaxColorAttachments << "\n"; + std::cout << "\t\t" + << "advancedBlendNonPremultipliedDstColor = " + << static_cast( blendOperationAdvancedProperties.advancedBlendNonPremultipliedDstColor ) + << "\n"; + std::cout << "\t\t" + << "advancedBlendNonPremultipliedSrcColor = " + << static_cast( blendOperationAdvancedProperties.advancedBlendNonPremultipliedSrcColor ) + << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_EXT_conservative_rasterization")) + if ( vk::su::contains( extensionProperties, "VK_EXT_conservative_rasterization" ) ) { - vk::PhysicalDeviceConservativeRasterizationPropertiesEXT const& conservativeRasterizationProperties = properties2.get(); - std::cout << "\t" << "ConservativeRasterizationProperties:\n"; - std::cout << "\t\t" << "conservativePointAndLineRasterization = " << static_cast(conservativeRasterizationProperties.conservativePointAndLineRasterization) << "\n"; - std::cout << "\t\t" << "conservativeRasterizationPostDepthCoverage = " << static_cast(conservativeRasterizationProperties.conservativeRasterizationPostDepthCoverage) << "\n"; - std::cout << "\t\t" << "degenerateLinesRasterized = " << static_cast(conservativeRasterizationProperties.degenerateLinesRasterized) << "\n"; - std::cout << "\t\t" << "degenerateTrianglesRasterized = " << static_cast(conservativeRasterizationProperties.degenerateTrianglesRasterized) << "\n"; - std::cout << "\t\t" << "extraPrimitiveOverestimationSizeGranularity = " << conservativeRasterizationProperties.extraPrimitiveOverestimationSizeGranularity << "\n"; - std::cout << "\t\t" << "fullyCoveredFragmentShaderInputVariable = " << static_cast(conservativeRasterizationProperties.fullyCoveredFragmentShaderInputVariable) << "\n"; - std::cout << "\t\t" << "maxExtraPrimitiveOverestimationSize = " << conservativeRasterizationProperties.maxExtraPrimitiveOverestimationSize << "\n"; - std::cout << "\t\t" << "primitiveOverestimationSize = " << conservativeRasterizationProperties.primitiveOverestimationSize << "\n"; - std::cout << "\t\t" << "primitiveUnderestimation = " << static_cast(conservativeRasterizationProperties.primitiveUnderestimation) << "\n"; + vk::PhysicalDeviceConservativeRasterizationPropertiesEXT const & conservativeRasterizationProperties = + properties2.get(); + std::cout << "\t" + << "ConservativeRasterizationProperties:\n"; + std::cout << "\t\t" + << "conservativePointAndLineRasterization = " + << static_cast( conservativeRasterizationProperties.conservativePointAndLineRasterization ) + << "\n"; + std::cout << "\t\t" + << "conservativeRasterizationPostDepthCoverage = " + << static_cast( conservativeRasterizationProperties.conservativeRasterizationPostDepthCoverage ) + << "\n"; + std::cout << "\t\t" + << "degenerateLinesRasterized = " + << static_cast( conservativeRasterizationProperties.degenerateLinesRasterized ) << "\n"; + std::cout << "\t\t" + << "degenerateTrianglesRasterized = " + << static_cast( conservativeRasterizationProperties.degenerateTrianglesRasterized ) << "\n"; + std::cout << "\t\t" + << "extraPrimitiveOverestimationSizeGranularity = " + << conservativeRasterizationProperties.extraPrimitiveOverestimationSizeGranularity << "\n"; + std::cout << "\t\t" + << "fullyCoveredFragmentShaderInputVariable = " + << static_cast( conservativeRasterizationProperties.fullyCoveredFragmentShaderInputVariable ) + << "\n"; + std::cout << "\t\t" + << "maxExtraPrimitiveOverestimationSize = " + << conservativeRasterizationProperties.maxExtraPrimitiveOverestimationSize << "\n"; + std::cout << "\t\t" + << "primitiveOverestimationSize = " + << conservativeRasterizationProperties.primitiveOverestimationSize << "\n"; + std::cout << "\t\t" + << "primitiveUnderestimation = " + << static_cast( conservativeRasterizationProperties.primitiveUnderestimation ) << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_NV_cooperative_matrix")) + if ( vk::su::contains( extensionProperties, "VK_NV_cooperative_matrix" ) ) { - vk::PhysicalDeviceCooperativeMatrixPropertiesNV const& cooperativeMatrixProperties = properties2.get(); - std::cout << "\t" << "CooperativeMatrixProperties:\n"; - std::cout << "\t\t" << "cooperativeMatrixSupportedStages = " << vk::to_string(cooperativeMatrixProperties.cooperativeMatrixSupportedStages) << "\n"; + vk::PhysicalDeviceCooperativeMatrixPropertiesNV const & cooperativeMatrixProperties = + properties2.get(); + std::cout << "\t" + << "CooperativeMatrixProperties:\n"; + std::cout << "\t\t" + << "cooperativeMatrixSupportedStages = " + << vk::to_string( cooperativeMatrixProperties.cooperativeMatrixSupportedStages ) << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_KHR_depth_stencil_resolve")) + if ( vk::su::contains( extensionProperties, "VK_KHR_depth_stencil_resolve" ) ) { - vk::PhysicalDeviceDepthStencilResolvePropertiesKHR const& depthStencilResolveProperties = properties2.get(); - std::cout << "\t" << "DepthStencilResolveProperties:\n"; - std::cout << "\t\t" << "independentResolve = " << static_cast(depthStencilResolveProperties.independentResolve) << "\n"; - std::cout << "\t\t" << "independentResolveNone = " << static_cast(depthStencilResolveProperties.independentResolveNone) << "\n"; - std::cout << "\t\t" << "supportedDepthResolveModes = " << vk::to_string(depthStencilResolveProperties.supportedDepthResolveModes) << "\n"; - std::cout << "\t\t" << "supportedStencilResolveModes = " << vk::to_string(depthStencilResolveProperties.supportedStencilResolveModes) << "\n"; + vk::PhysicalDeviceDepthStencilResolvePropertiesKHR const & depthStencilResolveProperties = + properties2.get(); + std::cout << "\t" + << "DepthStencilResolveProperties:\n"; + std::cout << "\t\t" + << "independentResolve = " + << static_cast( depthStencilResolveProperties.independentResolve ) << "\n"; + std::cout << "\t\t" + << "independentResolveNone = " + << static_cast( depthStencilResolveProperties.independentResolveNone ) << "\n"; + std::cout << "\t\t" + << "supportedDepthResolveModes = " + << vk::to_string( depthStencilResolveProperties.supportedDepthResolveModes ) << "\n"; + std::cout << "\t\t" + << "supportedStencilResolveModes = " + << vk::to_string( depthStencilResolveProperties.supportedStencilResolveModes ) << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_EXT_descriptor_indexing")) + if ( vk::su::contains( extensionProperties, "VK_EXT_descriptor_indexing" ) ) { - vk::PhysicalDeviceDescriptorIndexingPropertiesEXT const& descriptorIndexingProperties = properties2.get(); - std::cout << "\t" << "DescriptorIndexingProperties:\n"; - std::cout << "\t\t" << "maxDescriptorSetUpdateAfterBindInputAttachments = " << descriptorIndexingProperties.maxDescriptorSetUpdateAfterBindInputAttachments << "\n"; - std::cout << "\t\t" << "maxDescriptorSetUpdateAfterBindSampledImages = " << descriptorIndexingProperties.maxDescriptorSetUpdateAfterBindSampledImages << "\n"; - std::cout << "\t\t" << "maxDescriptorSetUpdateAfterBindSamplers = " << descriptorIndexingProperties.maxDescriptorSetUpdateAfterBindSamplers << "\n"; - std::cout << "\t\t" << "maxDescriptorSetUpdateAfterBindStorageBuffers = " << descriptorIndexingProperties.maxDescriptorSetUpdateAfterBindStorageBuffers << "\n"; - std::cout << "\t\t" << "maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = " << descriptorIndexingProperties.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic << "\n"; - std::cout << "\t\t" << "maxDescriptorSetUpdateAfterBindStorageImages = " << descriptorIndexingProperties.maxDescriptorSetUpdateAfterBindStorageImages << "\n"; - std::cout << "\t\t" << "maxDescriptorSetUpdateAfterBindUniformBuffers = " << descriptorIndexingProperties.maxDescriptorSetUpdateAfterBindUniformBuffers << "\n"; - std::cout << "\t\t" << "maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = " << descriptorIndexingProperties.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic << "\n"; - std::cout << "\t\t" << "maxPerStageDescriptorUpdateAfterBindInputAttachments = " << descriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindInputAttachments << "\n"; - std::cout << "\t\t" << "maxPerStageDescriptorUpdateAfterBindSampledImages = " << descriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindSampledImages << "\n"; - std::cout << "\t\t" << "maxPerStageDescriptorUpdateAfterBindSamplers = " << descriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindSamplers << "\n"; - std::cout << "\t\t" << "maxPerStageDescriptorUpdateAfterBindStorageBuffers = " << descriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindStorageBuffers << "\n"; - std::cout << "\t\t" << "maxPerStageDescriptorUpdateAfterBindStorageImages = " << descriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindStorageImages << "\n"; - std::cout << "\t\t" << "maxPerStageDescriptorUpdateAfterBindUniformBuffers = " << descriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindUniformBuffers << "\n"; - std::cout << "\t\t" << "maxPerStageUpdateAfterBindResources = " << descriptorIndexingProperties.maxPerStageUpdateAfterBindResources << "\n"; - std::cout << "\t\t" << "maxUpdateAfterBindDescriptorsInAllPools = " << descriptorIndexingProperties.maxUpdateAfterBindDescriptorsInAllPools << "\n"; - std::cout << "\t\t" << "quadDivergentImplicitLod = " << static_cast(descriptorIndexingProperties.quadDivergentImplicitLod) << "\n"; - std::cout << "\t\t" << "robustBufferAccessUpdateAfterBind = " << static_cast(descriptorIndexingProperties.robustBufferAccessUpdateAfterBind) << "\n"; - std::cout << "\t\t" << "shaderInputAttachmentArrayNonUniformIndexingNative = " << static_cast(descriptorIndexingProperties.shaderInputAttachmentArrayNonUniformIndexingNative) << "\n"; - std::cout << "\t\t" << "shaderSampledImageArrayNonUniformIndexingNative = " << static_cast(descriptorIndexingProperties.shaderSampledImageArrayNonUniformIndexingNative) << "\n"; - std::cout << "\t\t" << "shaderStorageBufferArrayNonUniformIndexingNative = " << static_cast(descriptorIndexingProperties.shaderStorageBufferArrayNonUniformIndexingNative) << "\n"; - std::cout << "\t\t" << "shaderStorageImageArrayNonUniformIndexingNative = " << static_cast(descriptorIndexingProperties.shaderStorageImageArrayNonUniformIndexingNative) << "\n"; - std::cout << "\t\t" << "shaderUniformBufferArrayNonUniformIndexingNative = " << static_cast(descriptorIndexingProperties.shaderUniformBufferArrayNonUniformIndexingNative) << "\n"; + vk::PhysicalDeviceDescriptorIndexingPropertiesEXT const & descriptorIndexingProperties = + properties2.get(); + std::cout << "\t" + << "DescriptorIndexingProperties:\n"; + std::cout << "\t\t" + << "maxDescriptorSetUpdateAfterBindInputAttachments = " + << descriptorIndexingProperties.maxDescriptorSetUpdateAfterBindInputAttachments << "\n"; + std::cout << "\t\t" + << "maxDescriptorSetUpdateAfterBindSampledImages = " + << descriptorIndexingProperties.maxDescriptorSetUpdateAfterBindSampledImages << "\n"; + std::cout << "\t\t" + << "maxDescriptorSetUpdateAfterBindSamplers = " + << descriptorIndexingProperties.maxDescriptorSetUpdateAfterBindSamplers << "\n"; + std::cout << "\t\t" + << "maxDescriptorSetUpdateAfterBindStorageBuffers = " + << descriptorIndexingProperties.maxDescriptorSetUpdateAfterBindStorageBuffers << "\n"; + std::cout << "\t\t" + << "maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = " + << descriptorIndexingProperties.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic << "\n"; + std::cout << "\t\t" + << "maxDescriptorSetUpdateAfterBindStorageImages = " + << descriptorIndexingProperties.maxDescriptorSetUpdateAfterBindStorageImages << "\n"; + std::cout << "\t\t" + << "maxDescriptorSetUpdateAfterBindUniformBuffers = " + << descriptorIndexingProperties.maxDescriptorSetUpdateAfterBindUniformBuffers << "\n"; + std::cout << "\t\t" + << "maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = " + << descriptorIndexingProperties.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic << "\n"; + std::cout << "\t\t" + << "maxPerStageDescriptorUpdateAfterBindInputAttachments = " + << descriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindInputAttachments << "\n"; + std::cout << "\t\t" + << "maxPerStageDescriptorUpdateAfterBindSampledImages = " + << descriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindSampledImages << "\n"; + std::cout << "\t\t" + << "maxPerStageDescriptorUpdateAfterBindSamplers = " + << descriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindSamplers << "\n"; + std::cout << "\t\t" + << "maxPerStageDescriptorUpdateAfterBindStorageBuffers = " + << descriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindStorageBuffers << "\n"; + std::cout << "\t\t" + << "maxPerStageDescriptorUpdateAfterBindStorageImages = " + << descriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindStorageImages << "\n"; + std::cout << "\t\t" + << "maxPerStageDescriptorUpdateAfterBindUniformBuffers = " + << descriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindUniformBuffers << "\n"; + std::cout << "\t\t" + << "maxPerStageUpdateAfterBindResources = " + << descriptorIndexingProperties.maxPerStageUpdateAfterBindResources << "\n"; + std::cout << "\t\t" + << "maxUpdateAfterBindDescriptorsInAllPools = " + << descriptorIndexingProperties.maxUpdateAfterBindDescriptorsInAllPools << "\n"; + std::cout << "\t\t" + << "quadDivergentImplicitLod = " + << static_cast( descriptorIndexingProperties.quadDivergentImplicitLod ) << "\n"; + std::cout << "\t\t" + << "robustBufferAccessUpdateAfterBind = " + << static_cast( descriptorIndexingProperties.robustBufferAccessUpdateAfterBind ) << "\n"; + std::cout << "\t\t" + << "shaderInputAttachmentArrayNonUniformIndexingNative = " + << static_cast( + descriptorIndexingProperties.shaderInputAttachmentArrayNonUniformIndexingNative ) + << "\n"; + std::cout << "\t\t" + << "shaderSampledImageArrayNonUniformIndexingNative = " + << static_cast( descriptorIndexingProperties.shaderSampledImageArrayNonUniformIndexingNative ) + << "\n"; + std::cout << "\t\t" + << "shaderStorageBufferArrayNonUniformIndexingNative = " + << static_cast( descriptorIndexingProperties.shaderStorageBufferArrayNonUniformIndexingNative ) + << "\n"; + std::cout << "\t\t" + << "shaderStorageImageArrayNonUniformIndexingNative = " + << static_cast( descriptorIndexingProperties.shaderStorageImageArrayNonUniformIndexingNative ) + << "\n"; + std::cout << "\t\t" + << "shaderUniformBufferArrayNonUniformIndexingNative = " + << static_cast( descriptorIndexingProperties.shaderUniformBufferArrayNonUniformIndexingNative ) + << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_EXT_discard_rectangles")) + if ( vk::su::contains( extensionProperties, "VK_EXT_discard_rectangles" ) ) { - vk::PhysicalDeviceDiscardRectanglePropertiesEXT const& discardRectangleProperties = properties2.get(); - std::cout << "\t" << "DiscardRectangleProperties:\n"; - std::cout << "\t\t" << "maxDiscardRectangles = " << discardRectangleProperties.maxDiscardRectangles << "\n"; + vk::PhysicalDeviceDiscardRectanglePropertiesEXT const & discardRectangleProperties = + properties2.get(); + std::cout << "\t" + << "DiscardRectangleProperties:\n"; + std::cout << "\t\t" + << "maxDiscardRectangles = " << discardRectangleProperties.maxDiscardRectangles << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_KHR_driver_properties")) + if ( vk::su::contains( extensionProperties, "VK_KHR_driver_properties" ) ) { - vk::PhysicalDeviceDriverPropertiesKHR const& driverProperties = properties2.get(); - std::cout << "\t" << "DriverProperties:\n"; - std::cout << "\t\t" << "driverID = " << vk::to_string(driverProperties.driverID) << "\n"; - std::cout << "\t\t" << "driverName = " << driverProperties.driverName << "\n"; - std::cout << "\t\t" << "driverInfo = " << driverProperties.driverInfo << "\n"; - std::cout << "\t\t" << "conformanceVersion = " << static_cast(driverProperties.conformanceVersion.major) << "." << static_cast(driverProperties.conformanceVersion.minor) << "." << static_cast(driverProperties.conformanceVersion.subminor) << "." << static_cast(driverProperties.conformanceVersion.patch) << std::dec << "\n"; + vk::PhysicalDeviceDriverPropertiesKHR const & driverProperties = + properties2.get(); + std::cout << "\t" + << "DriverProperties:\n"; + std::cout << "\t\t" + << "driverID = " << vk::to_string( driverProperties.driverID ) << "\n"; + std::cout << "\t\t" + << "driverName = " << driverProperties.driverName << "\n"; + std::cout << "\t\t" + << "driverInfo = " << driverProperties.driverInfo << "\n"; + std::cout << "\t\t" + << "conformanceVersion = " << static_cast( driverProperties.conformanceVersion.major ) + << "." << static_cast( driverProperties.conformanceVersion.minor ) << "." + << static_cast( driverProperties.conformanceVersion.subminor ) << "." + << static_cast( driverProperties.conformanceVersion.patch ) << std::dec << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_EXT_external_memory_host")) + if ( vk::su::contains( extensionProperties, "VK_EXT_external_memory_host" ) ) { - vk::PhysicalDeviceExternalMemoryHostPropertiesEXT const& externalMemoryHostProperties = properties2.get(); - std::cout << "\t" << "ExternalMemoryHostProperties:\n"; - std::cout << "\t\t" << "minImportedHostPointerAlignment = " << externalMemoryHostProperties.minImportedHostPointerAlignment << "\n"; + vk::PhysicalDeviceExternalMemoryHostPropertiesEXT const & externalMemoryHostProperties = + properties2.get(); + std::cout << "\t" + << "ExternalMemoryHostProperties:\n"; + std::cout << "\t\t" + << "minImportedHostPointerAlignment = " + << externalMemoryHostProperties.minImportedHostPointerAlignment << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_KHR_shader_float_controls")) + if ( vk::su::contains( extensionProperties, "VK_KHR_shader_float_controls" ) ) { - vk::PhysicalDeviceFloatControlsPropertiesKHR const& floatControlsProperties = properties2.get(); - std::cout << "\t" << "FloatControlsProperties:\n"; - std::cout << "\t\t" << "denormBehaviorIndependence = " << vk::to_string(floatControlsProperties.denormBehaviorIndependence) << "\n"; - std::cout << "\t\t" << "roundingModeIndependence = " << vk::to_string(floatControlsProperties.roundingModeIndependence) << "\n"; - std::cout << "\t\t" << "shaderDenormFlushToZeroFloat16 = " << static_cast(floatControlsProperties.shaderDenormFlushToZeroFloat16) << "\n"; - std::cout << "\t\t" << "shaderDenormFlushToZeroFloat32 = " << static_cast(floatControlsProperties.shaderDenormFlushToZeroFloat32) << "\n"; - std::cout << "\t\t" << "shaderDenormFlushToZeroFloat64 = " << static_cast(floatControlsProperties.shaderDenormFlushToZeroFloat64) << "\n"; - std::cout << "\t\t" << "shaderDenormPreserveFloat16 = " << static_cast(floatControlsProperties.shaderDenormPreserveFloat16) << "\n"; - std::cout << "\t\t" << "shaderDenormPreserveFloat32 = " << static_cast(floatControlsProperties.shaderDenormPreserveFloat32) << "\n"; - std::cout << "\t\t" << "shaderDenormPreserveFloat64 = " << static_cast(floatControlsProperties.shaderDenormPreserveFloat64) << "\n"; - std::cout << "\t\t" << "shaderRoundingModeRTEFloat16 = " << static_cast(floatControlsProperties.shaderRoundingModeRTEFloat16) << "\n"; - std::cout << "\t\t" << "shaderRoundingModeRTEFloat32 = " << static_cast(floatControlsProperties.shaderRoundingModeRTEFloat32) << "\n"; - std::cout << "\t\t" << "shaderRoundingModeRTEFloat64 = " << static_cast(floatControlsProperties.shaderRoundingModeRTEFloat64) << "\n"; - std::cout << "\t\t" << "shaderRoundingModeRTZFloat16 = " << static_cast(floatControlsProperties.shaderRoundingModeRTZFloat16) << "\n"; - std::cout << "\t\t" << "shaderRoundingModeRTZFloat32 = " << static_cast(floatControlsProperties.shaderRoundingModeRTZFloat32) << "\n"; - std::cout << "\t\t" << "shaderRoundingModeRTZFloat64 = " << static_cast(floatControlsProperties.shaderRoundingModeRTZFloat64) << "\n"; - std::cout << "\t\t" << "shaderSignedZeroInfNanPreserveFloat16 = " << static_cast(floatControlsProperties.shaderSignedZeroInfNanPreserveFloat16) << "\n"; - std::cout << "\t\t" << "shaderSignedZeroInfNanPreserveFloat32 = " << static_cast(floatControlsProperties.shaderSignedZeroInfNanPreserveFloat32) << "\n"; - std::cout << "\t\t" << "shaderSignedZeroInfNanPreserveFloat64 = " << static_cast(floatControlsProperties.shaderSignedZeroInfNanPreserveFloat64) << "\n"; + vk::PhysicalDeviceFloatControlsPropertiesKHR const & floatControlsProperties = + properties2.get(); + std::cout << "\t" + << "FloatControlsProperties:\n"; + std::cout << "\t\t" + << "denormBehaviorIndependence = " + << vk::to_string( floatControlsProperties.denormBehaviorIndependence ) << "\n"; + std::cout << "\t\t" + << "roundingModeIndependence = " + << vk::to_string( floatControlsProperties.roundingModeIndependence ) << "\n"; + std::cout << "\t\t" + << "shaderDenormFlushToZeroFloat16 = " + << static_cast( floatControlsProperties.shaderDenormFlushToZeroFloat16 ) << "\n"; + std::cout << "\t\t" + << "shaderDenormFlushToZeroFloat32 = " + << static_cast( floatControlsProperties.shaderDenormFlushToZeroFloat32 ) << "\n"; + std::cout << "\t\t" + << "shaderDenormFlushToZeroFloat64 = " + << static_cast( floatControlsProperties.shaderDenormFlushToZeroFloat64 ) << "\n"; + std::cout << "\t\t" + << "shaderDenormPreserveFloat16 = " + << static_cast( floatControlsProperties.shaderDenormPreserveFloat16 ) << "\n"; + std::cout << "\t\t" + << "shaderDenormPreserveFloat32 = " + << static_cast( floatControlsProperties.shaderDenormPreserveFloat32 ) << "\n"; + std::cout << "\t\t" + << "shaderDenormPreserveFloat64 = " + << static_cast( floatControlsProperties.shaderDenormPreserveFloat64 ) << "\n"; + std::cout << "\t\t" + << "shaderRoundingModeRTEFloat16 = " + << static_cast( floatControlsProperties.shaderRoundingModeRTEFloat16 ) << "\n"; + std::cout << "\t\t" + << "shaderRoundingModeRTEFloat32 = " + << static_cast( floatControlsProperties.shaderRoundingModeRTEFloat32 ) << "\n"; + std::cout << "\t\t" + << "shaderRoundingModeRTEFloat64 = " + << static_cast( floatControlsProperties.shaderRoundingModeRTEFloat64 ) << "\n"; + std::cout << "\t\t" + << "shaderRoundingModeRTZFloat16 = " + << static_cast( floatControlsProperties.shaderRoundingModeRTZFloat16 ) << "\n"; + std::cout << "\t\t" + << "shaderRoundingModeRTZFloat32 = " + << static_cast( floatControlsProperties.shaderRoundingModeRTZFloat32 ) << "\n"; + std::cout << "\t\t" + << "shaderRoundingModeRTZFloat64 = " + << static_cast( floatControlsProperties.shaderRoundingModeRTZFloat64 ) << "\n"; + std::cout << "\t\t" + << "shaderSignedZeroInfNanPreserveFloat16 = " + << static_cast( floatControlsProperties.shaderSignedZeroInfNanPreserveFloat16 ) << "\n"; + std::cout << "\t\t" + << "shaderSignedZeroInfNanPreserveFloat32 = " + << static_cast( floatControlsProperties.shaderSignedZeroInfNanPreserveFloat32 ) << "\n"; + std::cout << "\t\t" + << "shaderSignedZeroInfNanPreserveFloat64 = " + << static_cast( floatControlsProperties.shaderSignedZeroInfNanPreserveFloat64 ) << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_EXT_fragment_density_map")) + if ( vk::su::contains( extensionProperties, "VK_EXT_fragment_density_map" ) ) { - vk::PhysicalDeviceFragmentDensityMapPropertiesEXT const& fragmentDensityMapProperties = properties2.get(); - std::cout << "\t" << "FragmentDensityProperties:\n"; - std::cout << "\t\t" << "fragmentDensityInvocations = " << static_cast(fragmentDensityMapProperties.fragmentDensityInvocations) << "\n"; - std::cout << "\t\t" << "maxFragmentDensityTexelSize = " << fragmentDensityMapProperties.maxFragmentDensityTexelSize.width << " x " << fragmentDensityMapProperties.maxFragmentDensityTexelSize.height << "\n"; - std::cout << "\t\t" << "minFragmentDensityTexelSize = " << fragmentDensityMapProperties.minFragmentDensityTexelSize.width << " x " << fragmentDensityMapProperties.minFragmentDensityTexelSize.height << "\n"; + vk::PhysicalDeviceFragmentDensityMapPropertiesEXT const & fragmentDensityMapProperties = + properties2.get(); + std::cout << "\t" + << "FragmentDensityProperties:\n"; + std::cout << "\t\t" + << "fragmentDensityInvocations = " + << static_cast( fragmentDensityMapProperties.fragmentDensityInvocations ) << "\n"; + std::cout << "\t\t" + << "maxFragmentDensityTexelSize = " << fragmentDensityMapProperties.maxFragmentDensityTexelSize.width + << " x " << fragmentDensityMapProperties.maxFragmentDensityTexelSize.height << "\n"; + std::cout << "\t\t" + << "minFragmentDensityTexelSize = " << fragmentDensityMapProperties.minFragmentDensityTexelSize.width + << " x " << fragmentDensityMapProperties.minFragmentDensityTexelSize.height << "\n"; std::cout << "\n"; } - vk::PhysicalDeviceIDProperties const& idProperties = properties2.get(); - std::cout << "\t" << "IDProperties:\n"; - std::cout << "\t\t" << "deviceUUID = " << vk::su::UUID(idProperties.deviceUUID) << "\n"; - std::cout << "\t\t" << "driverUUID = " << vk::su::UUID(idProperties.driverUUID) << "\n"; - std::cout << "\t\t" << "deviceLUID = " << vk::su::LUID(idProperties.deviceLUID) << "\n"; - std::cout << "\t\t" << "deviceNodeMask = " << std::hex << idProperties.deviceNodeMask << std::dec << "\n"; - std::cout << "\t\t" << "deviceLUIDValid = " << static_cast(idProperties.deviceLUIDValid) << "\n"; + vk::PhysicalDeviceIDProperties const & idProperties = properties2.get(); + std::cout << "\t" + << "IDProperties:\n"; + std::cout << "\t\t" + << "deviceUUID = " << vk::su::UUID( idProperties.deviceUUID ) << "\n"; + std::cout << "\t\t" + << "driverUUID = " << vk::su::UUID( idProperties.driverUUID ) << "\n"; + std::cout << "\t\t" + << "deviceLUID = " << vk::su::LUID( idProperties.deviceLUID ) << "\n"; + std::cout << "\t\t" + << "deviceNodeMask = " << std::hex << idProperties.deviceNodeMask << std::dec << "\n"; + std::cout << "\t\t" + << "deviceLUIDValid = " << static_cast( idProperties.deviceLUIDValid ) << "\n"; std::cout << "\n"; - if (vk::su::contains(extensionProperties, "VK_EXT_inline_uniform_block")) + if ( vk::su::contains( extensionProperties, "VK_EXT_inline_uniform_block" ) ) { - vk::PhysicalDeviceInlineUniformBlockPropertiesEXT const& inlineUniformBlockProperties = properties2.get(); - std::cout << "\t" << "InlineUniformBlockProperties:\n"; - std::cout << "\t\t" << "maxDescriptorSetInlineUniformBlocks = " << inlineUniformBlockProperties.maxDescriptorSetInlineUniformBlocks << "\n"; - std::cout << "\t\t" << "maxDescriptorSetUpdateAfterBindInlineUniformBlocks = " << inlineUniformBlockProperties.maxDescriptorSetUpdateAfterBindInlineUniformBlocks << "\n"; - std::cout << "\t\t" << "maxInlineUniformBlockSize = " << inlineUniformBlockProperties.maxInlineUniformBlockSize << "\n"; - std::cout << "\t\t" << "maxPerStageDescriptorInlineUniformBlocks = " << inlineUniformBlockProperties.maxPerStageDescriptorInlineUniformBlocks << "\n"; - std::cout << "\t\t" << "maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = " << inlineUniformBlockProperties.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks << "\n"; + vk::PhysicalDeviceInlineUniformBlockPropertiesEXT const & inlineUniformBlockProperties = + properties2.get(); + std::cout << "\t" + << "InlineUniformBlockProperties:\n"; + std::cout << "\t\t" + << "maxDescriptorSetInlineUniformBlocks = " + << inlineUniformBlockProperties.maxDescriptorSetInlineUniformBlocks << "\n"; + std::cout << "\t\t" + << "maxDescriptorSetUpdateAfterBindInlineUniformBlocks = " + << inlineUniformBlockProperties.maxDescriptorSetUpdateAfterBindInlineUniformBlocks << "\n"; + std::cout << "\t\t" + << "maxInlineUniformBlockSize = " + << inlineUniformBlockProperties.maxInlineUniformBlockSize << "\n"; + std::cout << "\t\t" + << "maxPerStageDescriptorInlineUniformBlocks = " + << inlineUniformBlockProperties.maxPerStageDescriptorInlineUniformBlocks << "\n"; + std::cout << "\t\t" + << "maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = " + << inlineUniformBlockProperties.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_EXT_line_rasterization")) + if ( vk::su::contains( extensionProperties, "VK_EXT_line_rasterization" ) ) { - vk::PhysicalDeviceLineRasterizationPropertiesEXT const& lineRasterizationProperties = properties2.get(); - std::cout << "\t" << "LineRasterizationProperties:\n"; - std::cout << "\t\t" << "lineSubPixelPrecisionBits = " << lineRasterizationProperties.lineSubPixelPrecisionBits << "\n"; + vk::PhysicalDeviceLineRasterizationPropertiesEXT const & lineRasterizationProperties = + properties2.get(); + std::cout << "\t" + << "LineRasterizationProperties:\n"; + std::cout << "\t\t" + << "lineSubPixelPrecisionBits = " << lineRasterizationProperties.lineSubPixelPrecisionBits << "\n"; std::cout << "\n"; } - vk::PhysicalDeviceMaintenance3Properties const& maintenance3Properties = properties2.get(); - std::cout << "\t" << "Maintenance3Properties:\n"; - std::cout << "\t\t" << "maxMemoryAllocationSize = " << maintenance3Properties.maxMemoryAllocationSize << "\n"; - std::cout << "\t\t" << "maxPerSetDescriptors = " << maintenance3Properties.maxPerSetDescriptors << "\n"; + vk::PhysicalDeviceMaintenance3Properties const & maintenance3Properties = + properties2.get(); + std::cout << "\t" + << "Maintenance3Properties:\n"; + std::cout << "\t\t" + << "maxMemoryAllocationSize = " << maintenance3Properties.maxMemoryAllocationSize << "\n"; + std::cout << "\t\t" + << "maxPerSetDescriptors = " << maintenance3Properties.maxPerSetDescriptors << "\n"; std::cout << "\n"; - if (vk::su::contains(extensionProperties, "VK_NV_mesh_shader")) + if ( vk::su::contains( extensionProperties, "VK_NV_mesh_shader" ) ) { - vk::PhysicalDeviceMeshShaderPropertiesNV const& meshShaderProperties = properties2.get(); - std::cout << "\t" << "MeshShaderProperties:\n"; - std::cout << "\t\t" << "maxDrawMeshTasksCount = " << meshShaderProperties.maxDrawMeshTasksCount << "\n"; - std::cout << "\t\t" << "maxMeshMultiviewViewCount = " << meshShaderProperties.maxMeshMultiviewViewCount << "\n"; - std::cout << "\t\t" << "maxMeshOutputPrimitives = " << meshShaderProperties.maxMeshOutputPrimitives << "\n"; - std::cout << "\t\t" << "maxMeshOutputVertices = " << meshShaderProperties.maxMeshOutputVertices << "\n"; - std::cout << "\t\t" << "maxMeshTotalMemorySize = " << meshShaderProperties.maxMeshTotalMemorySize << "\n"; - std::cout << "\t\t" << "maxMeshWorkGroupInvocations = " << meshShaderProperties.maxMeshWorkGroupInvocations << "\n"; - std::cout << "\t\t" << "maxMeshWorkGroupSize = " << "[" << meshShaderProperties.maxMeshWorkGroupSize[0] << ", " << meshShaderProperties.maxMeshWorkGroupSize[1] << ", " << meshShaderProperties.maxMeshWorkGroupSize[2] << "]" << "\n"; - std::cout << "\t\t" << "maxTaskOutputCount = " << meshShaderProperties.maxTaskOutputCount << "\n"; - std::cout << "\t\t" << "maxTaskTotalMemorySize = " << meshShaderProperties.maxTaskTotalMemorySize << "\n"; - std::cout << "\t\t" << "maxTaskWorkGroupInvocations = " << meshShaderProperties.maxTaskWorkGroupInvocations << "\n"; - std::cout << "\t\t" << "maxTaskWorkGroupSize = " << "[" << meshShaderProperties.maxTaskWorkGroupSize[0] << ", " << meshShaderProperties.maxTaskWorkGroupSize[1] << ", " << meshShaderProperties.maxTaskWorkGroupSize[2] << "]" << "\n"; - std::cout << "\t\t" << "meshOutputPerPrimitiveGranularity = " << meshShaderProperties.meshOutputPerPrimitiveGranularity << "\n"; - std::cout << "\t\t" << "meshOutputPerVertexGranularity = " << meshShaderProperties.meshOutputPerVertexGranularity << "\n"; + vk::PhysicalDeviceMeshShaderPropertiesNV const & meshShaderProperties = + properties2.get(); + std::cout << "\t" + << "MeshShaderProperties:\n"; + std::cout << "\t\t" + << "maxDrawMeshTasksCount = " << meshShaderProperties.maxDrawMeshTasksCount << "\n"; + std::cout << "\t\t" + << "maxMeshMultiviewViewCount = " << meshShaderProperties.maxMeshMultiviewViewCount << "\n"; + std::cout << "\t\t" + << "maxMeshOutputPrimitives = " << meshShaderProperties.maxMeshOutputPrimitives << "\n"; + std::cout << "\t\t" + << "maxMeshOutputVertices = " << meshShaderProperties.maxMeshOutputVertices << "\n"; + std::cout << "\t\t" + << "maxMeshTotalMemorySize = " << meshShaderProperties.maxMeshTotalMemorySize << "\n"; + std::cout << "\t\t" + << "maxMeshWorkGroupInvocations = " << meshShaderProperties.maxMeshWorkGroupInvocations << "\n"; + std::cout << "\t\t" + << "maxMeshWorkGroupSize = " + << "[" << meshShaderProperties.maxMeshWorkGroupSize[0] << ", " + << meshShaderProperties.maxMeshWorkGroupSize[1] << ", " + << meshShaderProperties.maxMeshWorkGroupSize[2] << "]" + << "\n"; + std::cout << "\t\t" + << "maxTaskOutputCount = " << meshShaderProperties.maxTaskOutputCount << "\n"; + std::cout << "\t\t" + << "maxTaskTotalMemorySize = " << meshShaderProperties.maxTaskTotalMemorySize << "\n"; + std::cout << "\t\t" + << "maxTaskWorkGroupInvocations = " << meshShaderProperties.maxTaskWorkGroupInvocations << "\n"; + std::cout << "\t\t" + << "maxTaskWorkGroupSize = " + << "[" << meshShaderProperties.maxTaskWorkGroupSize[0] << ", " + << meshShaderProperties.maxTaskWorkGroupSize[1] << ", " + << meshShaderProperties.maxTaskWorkGroupSize[2] << "]" + << "\n"; + std::cout << "\t\t" + << "meshOutputPerPrimitiveGranularity = " << meshShaderProperties.meshOutputPerPrimitiveGranularity + << "\n"; + std::cout << "\t\t" + << "meshOutputPerVertexGranularity = " << meshShaderProperties.meshOutputPerVertexGranularity + << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_NVX_multiview_per_view_attributes")) + if ( vk::su::contains( extensionProperties, "VK_NVX_multiview_per_view_attributes" ) ) { - vk::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const& multiviewPerViewAttributesProperties = properties2.get(); - std::cout << "\t" << "MultiviewPerViewAttributesProperties:\n"; - std::cout << "\t\t" << "perViewPositionAllComponents = " << static_cast(multiviewPerViewAttributesProperties.perViewPositionAllComponents) << "\n"; + vk::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & multiviewPerViewAttributesProperties = + properties2.get(); + std::cout << "\t" + << "MultiviewPerViewAttributesProperties:\n"; + std::cout << "\t\t" + << "perViewPositionAllComponents = " + << static_cast( multiviewPerViewAttributesProperties.perViewPositionAllComponents ) << "\n"; std::cout << "\n"; } - vk::PhysicalDeviceMultiviewProperties const& multiviewProperties = properties2.get(); - std::cout << "\t" << "MultiviewProperties:\n"; - std::cout << "\t\t" << "maxMultiviewInstanceIndex = " << multiviewProperties.maxMultiviewInstanceIndex << "\n"; - std::cout << "\t\t" << "maxMultiviewViewCount = " << multiviewProperties.maxMultiviewViewCount << "\n"; + vk::PhysicalDeviceMultiviewProperties const & multiviewProperties = + properties2.get(); + std::cout << "\t" + << "MultiviewProperties:\n"; + std::cout << "\t\t" + << "maxMultiviewInstanceIndex = " << multiviewProperties.maxMultiviewInstanceIndex << "\n"; + std::cout << "\t\t" + << "maxMultiviewViewCount = " << multiviewProperties.maxMultiviewViewCount << "\n"; std::cout << "\n"; - if (vk::su::contains(extensionProperties, "VK_EXT_pci_bus_info")) + if ( vk::su::contains( extensionProperties, "VK_EXT_pci_bus_info" ) ) { - vk::PhysicalDevicePCIBusInfoPropertiesEXT const& pciBusInfoProperties = properties2.get(); - std::cout << "\t" << "PCIBusInfoProperties:\n"; - std::cout << "\t\t" << "pciDomain = " << pciBusInfoProperties.pciDomain << "\n"; - std::cout << "\t\t" << "pciBus = " << pciBusInfoProperties.pciBus << "\n"; - std::cout << "\t\t" << "pciDevice = " << pciBusInfoProperties.pciDevice << "\n"; - std::cout << "\t\t" << "pciFunction = " << pciBusInfoProperties.pciFunction << "\n"; + vk::PhysicalDevicePCIBusInfoPropertiesEXT const & pciBusInfoProperties = + properties2.get(); + std::cout << "\t" + << "PCIBusInfoProperties:\n"; + std::cout << "\t\t" + << "pciDomain = " << pciBusInfoProperties.pciDomain << "\n"; + std::cout << "\t\t" + << "pciBus = " << pciBusInfoProperties.pciBus << "\n"; + std::cout << "\t\t" + << "pciDevice = " << pciBusInfoProperties.pciDevice << "\n"; + std::cout << "\t\t" + << "pciFunction = " << pciBusInfoProperties.pciFunction << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_KHR_maintenance2")) + if ( vk::su::contains( extensionProperties, "VK_KHR_maintenance2" ) ) { - vk::PhysicalDevicePointClippingProperties const& pointClippingProperties = properties2.get(); - std::cout << "\t" << "PointClippingProperties:\n"; - std::cout << "\t\t" << "pointClippingBehavior = " << vk::to_string(pointClippingProperties.pointClippingBehavior) << "\n"; + vk::PhysicalDevicePointClippingProperties const & pointClippingProperties = + properties2.get(); + std::cout << "\t" + << "PointClippingProperties:\n"; + std::cout << "\t\t" + << "pointClippingBehavior = " << vk::to_string( pointClippingProperties.pointClippingBehavior ) + << "\n"; std::cout << "\n"; } - vk::PhysicalDeviceProtectedMemoryProperties const& protectedMemoryProperties = properties2.get(); - std::cout << "\t" << "ProtectedMemoryProperties:\n"; - std::cout << "\t\t" << "protectedNoFault = " << static_cast(protectedMemoryProperties.protectedNoFault) << "\n"; + vk::PhysicalDeviceProtectedMemoryProperties const & protectedMemoryProperties = + properties2.get(); + std::cout << "\t" + << "ProtectedMemoryProperties:\n"; + std::cout << "\t\t" + << "protectedNoFault = " << static_cast( protectedMemoryProperties.protectedNoFault ) << "\n"; std::cout << "\n"; - if (vk::su::contains(extensionProperties, "VK_KHR_push_descriptor")) + if ( vk::su::contains( extensionProperties, "VK_KHR_push_descriptor" ) ) { - vk::PhysicalDevicePushDescriptorPropertiesKHR const& pushDescriptorProperties = properties2.get(); - std::cout << "\t" << "PushDescriptorProperties:\n"; - std::cout << "\t\t" << "maxPushDescriptors = " << pushDescriptorProperties.maxPushDescriptors << "\n"; + vk::PhysicalDevicePushDescriptorPropertiesKHR const & pushDescriptorProperties = + properties2.get(); + std::cout << "\t" + << "PushDescriptorProperties:\n"; + std::cout << "\t\t" + << "maxPushDescriptors = " << pushDescriptorProperties.maxPushDescriptors << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_NV_ray_tracing")) + if ( vk::su::contains( extensionProperties, "VK_NV_ray_tracing" ) ) { - vk::PhysicalDeviceRayTracingPropertiesNV const& rayTracingProperties = properties2.get(); - std::cout << "\t" << "RayTracingProperties:\n"; - std::cout << "\t\t" << "maxDescriptorSetAccelerationStructures = " << rayTracingProperties.maxDescriptorSetAccelerationStructures << "\n"; - std::cout << "\t\t" << "maxGeometryCount = " << rayTracingProperties.maxGeometryCount << "\n"; - std::cout << "\t\t" << "maxInstanceCount = " << rayTracingProperties.maxInstanceCount << "\n"; - std::cout << "\t\t" << "maxRecursionDepth = " << rayTracingProperties.maxRecursionDepth << "\n"; - std::cout << "\t\t" << "maxShaderGroupStride = " << rayTracingProperties.maxShaderGroupStride << "\n"; - std::cout << "\t\t" << "maxTriangleCount = " << rayTracingProperties.maxTriangleCount << "\n"; - std::cout << "\t\t" << "shaderGroupBaseAlignment = " << rayTracingProperties.shaderGroupBaseAlignment << "\n"; - std::cout << "\t\t" << "shaderGroupHandleSize = " << rayTracingProperties.shaderGroupHandleSize << "\n"; + vk::PhysicalDeviceRayTracingPropertiesNV const & rayTracingProperties = + properties2.get(); + std::cout << "\t" + << "RayTracingProperties:\n"; + std::cout << "\t\t" + << "maxDescriptorSetAccelerationStructures = " + << rayTracingProperties.maxDescriptorSetAccelerationStructures << "\n"; + std::cout << "\t\t" + << "maxGeometryCount = " << rayTracingProperties.maxGeometryCount << "\n"; + std::cout << "\t\t" + << "maxInstanceCount = " << rayTracingProperties.maxInstanceCount << "\n"; + std::cout << "\t\t" + << "maxRecursionDepth = " << rayTracingProperties.maxRecursionDepth << "\n"; + std::cout << "\t\t" + << "maxShaderGroupStride = " << rayTracingProperties.maxShaderGroupStride << "\n"; + std::cout << "\t\t" + << "maxTriangleCount = " << rayTracingProperties.maxTriangleCount << "\n"; + std::cout << "\t\t" + << "shaderGroupBaseAlignment = " << rayTracingProperties.shaderGroupBaseAlignment + << "\n"; + std::cout << "\t\t" + << "shaderGroupHandleSize = " << rayTracingProperties.shaderGroupHandleSize << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_EXT_sample_locations")) + if ( vk::su::contains( extensionProperties, "VK_EXT_sample_locations" ) ) { - vk::PhysicalDeviceSampleLocationsPropertiesEXT const& sampleLocationProperties = properties2.get(); - std::cout << "\t" << "SampleLocationProperties:\n"; - std::cout << "\t\t" << "maxSampleLocationGridSize = " << sampleLocationProperties.maxSampleLocationGridSize.width << " x " << sampleLocationProperties.maxSampleLocationGridSize.height << "\n"; - std::cout << "\t\t" << "sampleLocationCoordinateRange = " << "[" << sampleLocationProperties.sampleLocationCoordinateRange[0] << ", " << sampleLocationProperties.sampleLocationCoordinateRange[1] << "]" << "\n"; - std::cout << "\t\t" << "sampleLocationSampleCounts = " << vk::to_string(sampleLocationProperties.sampleLocationSampleCounts) << "\n"; - std::cout << "\t\t" << "sampleLocationSubPixelBits = " << sampleLocationProperties.sampleLocationSubPixelBits << "\n"; - std::cout << "\t\t" << "variableSampleLocations = " << static_cast(sampleLocationProperties.variableSampleLocations) << "\n"; + vk::PhysicalDeviceSampleLocationsPropertiesEXT const & sampleLocationProperties = + properties2.get(); + std::cout << "\t" + << "SampleLocationProperties:\n"; + std::cout << "\t\t" + << "maxSampleLocationGridSize = " << sampleLocationProperties.maxSampleLocationGridSize.width + << " x " << sampleLocationProperties.maxSampleLocationGridSize.height << "\n"; + std::cout << "\t\t" + << "sampleLocationCoordinateRange = " + << "[" << sampleLocationProperties.sampleLocationCoordinateRange[0] << ", " + << sampleLocationProperties.sampleLocationCoordinateRange[1] << "]" + << "\n"; + std::cout << "\t\t" + << "sampleLocationSampleCounts = " + << vk::to_string( sampleLocationProperties.sampleLocationSampleCounts ) << "\n"; + std::cout << "\t\t" + << "sampleLocationSubPixelBits = " << sampleLocationProperties.sampleLocationSubPixelBits << "\n"; + std::cout << "\t\t" + << "variableSampleLocations = " + << static_cast( sampleLocationProperties.variableSampleLocations ) << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_EXT_sampler_filter_minmax")) + if ( vk::su::contains( extensionProperties, "VK_EXT_sampler_filter_minmax" ) ) { - vk::PhysicalDeviceSamplerFilterMinmaxPropertiesEXT const& samplerFilterMinmaxProperties = properties2.get(); - std::cout << "\t" << "SamplerFilterMinmaxProperties:\n"; - std::cout << "\t\t" << "filterMinmaxImageComponentMapping = " << static_cast(samplerFilterMinmaxProperties.filterMinmaxImageComponentMapping) << "\n"; - std::cout << "\t\t" << "filterMinmaxSingleComponentFormats = " << static_cast(samplerFilterMinmaxProperties.filterMinmaxSingleComponentFormats) << "\n"; + vk::PhysicalDeviceSamplerFilterMinmaxPropertiesEXT const & samplerFilterMinmaxProperties = + properties2.get(); + std::cout << "\t" + << "SamplerFilterMinmaxProperties:\n"; + std::cout << "\t\t" + << "filterMinmaxImageComponentMapping = " + << static_cast( samplerFilterMinmaxProperties.filterMinmaxImageComponentMapping ) << "\n"; + std::cout << "\t\t" + << "filterMinmaxSingleComponentFormats = " + << static_cast( samplerFilterMinmaxProperties.filterMinmaxSingleComponentFormats ) << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_AMD_shader_core_properties2")) + if ( vk::su::contains( extensionProperties, "VK_AMD_shader_core_properties2" ) ) { - vk::PhysicalDeviceShaderCoreProperties2AMD const& shaderCoreProperties2 = properties2.get(); - std::cout << "\t" << "ShaderCoreProperties2:\n"; - std::cout << "\t\t" << "activeComputeUnitCount = " << shaderCoreProperties2.activeComputeUnitCount << "\n"; - std::cout << "\t\t" << "shaderCoreFeatures = " << vk::to_string(shaderCoreProperties2.shaderCoreFeatures) << "\n"; + vk::PhysicalDeviceShaderCoreProperties2AMD const & shaderCoreProperties2 = + properties2.get(); + std::cout << "\t" + << "ShaderCoreProperties2:\n"; + std::cout << "\t\t" + << "activeComputeUnitCount = " << shaderCoreProperties2.activeComputeUnitCount << "\n"; + std::cout << "\t\t" + << "shaderCoreFeatures = " << vk::to_string( shaderCoreProperties2.shaderCoreFeatures ) << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_AMD_shader_core_properties2")) + if ( vk::su::contains( extensionProperties, "VK_AMD_shader_core_properties2" ) ) { - vk::PhysicalDeviceShaderCorePropertiesAMD const& shaderCoreProperties = properties2.get(); - std::cout << "\t" << "ShaderCoreProperties:\n"; - std::cout << "\t\t" << "computeUnitsPerShaderArray = " << shaderCoreProperties.computeUnitsPerShaderArray << "\n"; - std::cout << "\t\t" << "maxSgprAllocation = " << shaderCoreProperties.maxSgprAllocation << "\n"; - std::cout << "\t\t" << "maxVgprAllocation = " << shaderCoreProperties.maxVgprAllocation << "\n"; - std::cout << "\t\t" << "minSgprAllocation = " << shaderCoreProperties.minSgprAllocation << "\n"; - std::cout << "\t\t" << "minVgprAllocation = " << shaderCoreProperties.minVgprAllocation << "\n"; - std::cout << "\t\t" << "sgprAllocationGranularity = " << shaderCoreProperties.sgprAllocationGranularity << "\n"; - std::cout << "\t\t" << "sgprsPerSimd = " << shaderCoreProperties.sgprsPerSimd << "\n"; - std::cout << "\t\t" << "shaderArraysPerEngineCount = " << shaderCoreProperties.shaderArraysPerEngineCount << "\n"; - std::cout << "\t\t" << "shaderEngineCount = " << shaderCoreProperties.shaderEngineCount << "\n"; - std::cout << "\t\t" << "simdPerComputeUnit = " << shaderCoreProperties.simdPerComputeUnit << "\n"; - std::cout << "\t\t" << "vgprAllocationGranularity = " << shaderCoreProperties.vgprAllocationGranularity << "\n"; - std::cout << "\t\t" << "vgprsPerSimd = " << shaderCoreProperties.vgprsPerSimd << "\n"; - std::cout << "\t\t" << "wavefrontSize = " << shaderCoreProperties.wavefrontSize << "\n"; - std::cout << "\t\t" << "wavefrontsPerSimd = " << shaderCoreProperties.wavefrontsPerSimd << "\n"; + vk::PhysicalDeviceShaderCorePropertiesAMD const & shaderCoreProperties = + properties2.get(); + std::cout << "\t" + << "ShaderCoreProperties:\n"; + std::cout << "\t\t" + << "computeUnitsPerShaderArray = " << shaderCoreProperties.computeUnitsPerShaderArray << "\n"; + std::cout << "\t\t" + << "maxSgprAllocation = " << shaderCoreProperties.maxSgprAllocation << "\n"; + std::cout << "\t\t" + << "maxVgprAllocation = " << shaderCoreProperties.maxVgprAllocation << "\n"; + std::cout << "\t\t" + << "minSgprAllocation = " << shaderCoreProperties.minSgprAllocation << "\n"; + std::cout << "\t\t" + << "minVgprAllocation = " << shaderCoreProperties.minVgprAllocation << "\n"; + std::cout << "\t\t" + << "sgprAllocationGranularity = " << shaderCoreProperties.sgprAllocationGranularity << "\n"; + std::cout << "\t\t" + << "sgprsPerSimd = " << shaderCoreProperties.sgprsPerSimd << "\n"; + std::cout << "\t\t" + << "shaderArraysPerEngineCount = " << shaderCoreProperties.shaderArraysPerEngineCount << "\n"; + std::cout << "\t\t" + << "shaderEngineCount = " << shaderCoreProperties.shaderEngineCount << "\n"; + std::cout << "\t\t" + << "simdPerComputeUnit = " << shaderCoreProperties.simdPerComputeUnit << "\n"; + std::cout << "\t\t" + << "vgprAllocationGranularity = " << shaderCoreProperties.vgprAllocationGranularity << "\n"; + std::cout << "\t\t" + << "vgprsPerSimd = " << shaderCoreProperties.vgprsPerSimd << "\n"; + std::cout << "\t\t" + << "wavefrontSize = " << shaderCoreProperties.wavefrontSize << "\n"; + std::cout << "\t\t" + << "wavefrontsPerSimd = " << shaderCoreProperties.wavefrontsPerSimd << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_NV_shader_sm_builtins")) + if ( vk::su::contains( extensionProperties, "VK_NV_shader_sm_builtins" ) ) { - vk::PhysicalDeviceShaderSMBuiltinsPropertiesNV const& shaderSMBuiltinsProperties = properties2.get(); - std::cout << "\t" << "ShaderSMBuiltinsProperties:\n"; - std::cout << "\t\t" << "shaderSMCount = " << shaderSMBuiltinsProperties.shaderSMCount << "\n"; - std::cout << "\t\t" << "shaderWarpsPerSM = " << shaderSMBuiltinsProperties.shaderWarpsPerSM << "\n"; + vk::PhysicalDeviceShaderSMBuiltinsPropertiesNV const & shaderSMBuiltinsProperties = + properties2.get(); + std::cout << "\t" + << "ShaderSMBuiltinsProperties:\n"; + std::cout << "\t\t" + << "shaderSMCount = " << shaderSMBuiltinsProperties.shaderSMCount << "\n"; + std::cout << "\t\t" + << "shaderWarpsPerSM = " << shaderSMBuiltinsProperties.shaderWarpsPerSM << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_NV_shading_rate_image")) + if ( vk::su::contains( extensionProperties, "VK_NV_shading_rate_image" ) ) { - vk::PhysicalDeviceShadingRateImagePropertiesNV const& shadingRageImageProperties = properties2.get(); - std::cout << "\t" << "ShadingRateImageProperties:\n"; - std::cout << "\t\t" << "shadingRateMaxCoarseSamples = " << shadingRageImageProperties.shadingRateMaxCoarseSamples << "\n"; - std::cout << "\t\t" << "shadingRatePaletteSize = " << shadingRageImageProperties.shadingRatePaletteSize << "\n"; - std::cout << "\t\t" << "shadingRatePaletteSize = " << "[" << shadingRageImageProperties.shadingRateTexelSize.width << " x " << shadingRageImageProperties.shadingRateTexelSize.height << "]" << "\n"; + vk::PhysicalDeviceShadingRateImagePropertiesNV const & shadingRageImageProperties = + properties2.get(); + std::cout << "\t" + << "ShadingRateImageProperties:\n"; + std::cout << "\t\t" + << "shadingRateMaxCoarseSamples = " << shadingRageImageProperties.shadingRateMaxCoarseSamples << "\n"; + std::cout << "\t\t" + << "shadingRatePaletteSize = " << shadingRageImageProperties.shadingRatePaletteSize << "\n"; + std::cout << "\t\t" + << "shadingRatePaletteSize = " + << "[" << shadingRageImageProperties.shadingRateTexelSize.width << " x " + << shadingRageImageProperties.shadingRateTexelSize.height << "]" + << "\n"; std::cout << "\n"; } - vk::PhysicalDeviceSubgroupProperties const& subgroupProperties = properties2.get(); - std::cout << "\t" << "SubgroupProperties:\n"; - std::cout << "\t\t" << "quadOperationsInAllStages = " << static_cast(subgroupProperties.quadOperationsInAllStages) << "\n"; - std::cout << "\t\t" << "subgroupSize = " << subgroupProperties.subgroupSize << "\n"; - std::cout << "\t\t" << "supportedOperations = " << vk::to_string(subgroupProperties.supportedOperations) << "\n"; - std::cout << "\t\t" << "supportedStages = " << vk::to_string(subgroupProperties.supportedStages) << "\n"; + vk::PhysicalDeviceSubgroupProperties const & subgroupProperties = + properties2.get(); + std::cout << "\t" + << "SubgroupProperties:\n"; + std::cout << "\t\t" + << "quadOperationsInAllStages = " << static_cast( subgroupProperties.quadOperationsInAllStages ) + << "\n"; + std::cout << "\t\t" + << "subgroupSize = " << subgroupProperties.subgroupSize << "\n"; + std::cout << "\t\t" + << "supportedOperations = " << vk::to_string( subgroupProperties.supportedOperations ) << "\n"; + std::cout << "\t\t" + << "supportedStages = " << vk::to_string( subgroupProperties.supportedStages ) << "\n"; std::cout << "\n"; - if (vk::su::contains(extensionProperties, "VK_EXT_subgroup_size_control")) + if ( vk::su::contains( extensionProperties, "VK_EXT_subgroup_size_control" ) ) { - vk::PhysicalDeviceSubgroupSizeControlPropertiesEXT const& subgroupSizeControlProperties = properties2.get(); - std::cout << "\t" << "SubgroupSizeControlProperties:\n"; - std::cout << "\t\t" << "maxComputeWorkgroupSubgroups = " << subgroupSizeControlProperties.maxComputeWorkgroupSubgroups << "\n"; - std::cout << "\t\t" << "maxSubgroupSize = " << subgroupSizeControlProperties.maxSubgroupSize << "\n"; - std::cout << "\t\t" << "minSubgroupSize = " << subgroupSizeControlProperties.minSubgroupSize << "\n"; - std::cout << "\t\t" << "requiredSubgroupSizeStages = " << vk::to_string(subgroupSizeControlProperties.requiredSubgroupSizeStages) << "\n"; + vk::PhysicalDeviceSubgroupSizeControlPropertiesEXT const & subgroupSizeControlProperties = + properties2.get(); + std::cout << "\t" + << "SubgroupSizeControlProperties:\n"; + std::cout << "\t\t" + << "maxComputeWorkgroupSubgroups = " << subgroupSizeControlProperties.maxComputeWorkgroupSubgroups + << "\n"; + std::cout << "\t\t" + << "maxSubgroupSize = " << subgroupSizeControlProperties.maxSubgroupSize << "\n"; + std::cout << "\t\t" + << "minSubgroupSize = " << subgroupSizeControlProperties.minSubgroupSize << "\n"; + std::cout << "\t\t" + << "requiredSubgroupSizeStages = " + << vk::to_string( subgroupSizeControlProperties.requiredSubgroupSizeStages ) << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_KHR_timeline_semaphore")) + if ( vk::su::contains( extensionProperties, "VK_KHR_timeline_semaphore" ) ) { - vk::PhysicalDeviceTimelineSemaphorePropertiesKHR const& timelineSemaphoreProperties = properties2.get(); - std::cout << "\t" << "TimelineSemaphoreProperties:\n"; - std::cout << "\t\t" << "maxTimelineSemaphoreValueDifference = " << timelineSemaphoreProperties.maxTimelineSemaphoreValueDifference << "\n"; + vk::PhysicalDeviceTimelineSemaphorePropertiesKHR const & timelineSemaphoreProperties = + properties2.get(); + std::cout << "\t" + << "TimelineSemaphoreProperties:\n"; + std::cout << "\t\t" + << "maxTimelineSemaphoreValueDifference = " + << timelineSemaphoreProperties.maxTimelineSemaphoreValueDifference << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_EXT_texel_buffer_alignment")) + if ( vk::su::contains( extensionProperties, "VK_EXT_texel_buffer_alignment" ) ) { - vk::PhysicalDeviceTexelBufferAlignmentPropertiesEXT const& texelBufferAlignmentProperties = properties2.get(); - std::cout << "\t" << "TexelBufferAlignmentProperties:\n"; - std::cout << "\t\t" << "storageTexelBufferOffsetAlignmentBytes = " << texelBufferAlignmentProperties.storageTexelBufferOffsetAlignmentBytes << "\n"; - std::cout << "\t\t" << "storageTexelBufferOffsetSingleTexelAlignment = " << static_cast(texelBufferAlignmentProperties.storageTexelBufferOffsetSingleTexelAlignment) << "\n"; - std::cout << "\t\t" << "uniformTexelBufferOffsetAlignmentBytes = " << texelBufferAlignmentProperties.uniformTexelBufferOffsetAlignmentBytes << "\n"; - std::cout << "\t\t" << "uniformTexelBufferOffsetSingleTexelAlignment = " << static_cast(texelBufferAlignmentProperties.uniformTexelBufferOffsetSingleTexelAlignment) << "\n"; + vk::PhysicalDeviceTexelBufferAlignmentPropertiesEXT const & texelBufferAlignmentProperties = + properties2.get(); + std::cout << "\t" + << "TexelBufferAlignmentProperties:\n"; + std::cout << "\t\t" + << "storageTexelBufferOffsetAlignmentBytes = " + << texelBufferAlignmentProperties.storageTexelBufferOffsetAlignmentBytes << "\n"; + std::cout << "\t\t" + << "storageTexelBufferOffsetSingleTexelAlignment = " + << static_cast( texelBufferAlignmentProperties.storageTexelBufferOffsetSingleTexelAlignment ) + << "\n"; + std::cout << "\t\t" + << "uniformTexelBufferOffsetAlignmentBytes = " + << texelBufferAlignmentProperties.uniformTexelBufferOffsetAlignmentBytes << "\n"; + std::cout << "\t\t" + << "uniformTexelBufferOffsetSingleTexelAlignment = " + << static_cast( texelBufferAlignmentProperties.uniformTexelBufferOffsetSingleTexelAlignment ) + << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_EXT_transform_feedback")) + if ( vk::su::contains( extensionProperties, "VK_EXT_transform_feedback" ) ) { - vk::PhysicalDeviceTransformFeedbackPropertiesEXT const& transformFeedbackProperties = properties2.get(); - std::cout << "\t" << "TransformFeedbackProperties:\n"; - std::cout << "\t\t" << "maxTransformFeedbackBufferDataSize = " << transformFeedbackProperties.maxTransformFeedbackBufferDataSize << "\n"; - std::cout << "\t\t" << "maxTransformFeedbackBufferDataStride = " << transformFeedbackProperties.maxTransformFeedbackBufferDataStride << "\n"; - std::cout << "\t\t" << "maxTransformFeedbackBuffers = " << transformFeedbackProperties.maxTransformFeedbackBuffers << "\n"; - std::cout << "\t\t" << "maxTransformFeedbackBufferSize = " << transformFeedbackProperties.maxTransformFeedbackBufferSize << "\n"; - std::cout << "\t\t" << "maxTransformFeedbackStreamDataSize = " << transformFeedbackProperties.maxTransformFeedbackStreamDataSize << "\n"; - std::cout << "\t\t" << "maxTransformFeedbackStreams = " << transformFeedbackProperties.maxTransformFeedbackStreams << "\n"; - std::cout << "\t\t" << "transformFeedbackDraw = " << static_cast(transformFeedbackProperties.transformFeedbackDraw) << "\n"; - std::cout << "\t\t" << "transformFeedbackQueries = " << static_cast(transformFeedbackProperties.transformFeedbackQueries) << "\n"; - std::cout << "\t\t" << "transformFeedbackRasterizationStreamSelect = " << static_cast(transformFeedbackProperties.transformFeedbackRasterizationStreamSelect) << "\n"; - std::cout << "\t\t" << "transformFeedbackStreamsLinesTriangles = " << static_cast(transformFeedbackProperties.transformFeedbackStreamsLinesTriangles) << "\n"; + vk::PhysicalDeviceTransformFeedbackPropertiesEXT const & transformFeedbackProperties = + properties2.get(); + std::cout << "\t" + << "TransformFeedbackProperties:\n"; + std::cout << "\t\t" + << "maxTransformFeedbackBufferDataSize = " + << transformFeedbackProperties.maxTransformFeedbackBufferDataSize << "\n"; + std::cout << "\t\t" + << "maxTransformFeedbackBufferDataStride = " + << transformFeedbackProperties.maxTransformFeedbackBufferDataStride << "\n"; + std::cout << "\t\t" + << "maxTransformFeedbackBuffers = " + << transformFeedbackProperties.maxTransformFeedbackBuffers << "\n"; + std::cout << "\t\t" + << "maxTransformFeedbackBufferSize = " + << transformFeedbackProperties.maxTransformFeedbackBufferSize << "\n"; + std::cout << "\t\t" + << "maxTransformFeedbackStreamDataSize = " + << transformFeedbackProperties.maxTransformFeedbackStreamDataSize << "\n"; + std::cout << "\t\t" + << "maxTransformFeedbackStreams = " + << transformFeedbackProperties.maxTransformFeedbackStreams << "\n"; + std::cout << "\t\t" + << "transformFeedbackDraw = " + << static_cast( transformFeedbackProperties.transformFeedbackDraw ) << "\n"; + std::cout << "\t\t" + << "transformFeedbackQueries = " + << static_cast( transformFeedbackProperties.transformFeedbackQueries ) << "\n"; + std::cout << "\t\t" + << "transformFeedbackRasterizationStreamSelect = " + << static_cast( transformFeedbackProperties.transformFeedbackRasterizationStreamSelect ) + << "\n"; + std::cout << "\t\t" + << "transformFeedbackStreamsLinesTriangles = " + << static_cast( transformFeedbackProperties.transformFeedbackStreamsLinesTriangles ) << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_EXT_vertex_attribute_divisor")) + if ( vk::su::contains( extensionProperties, "VK_EXT_vertex_attribute_divisor" ) ) { - vk::PhysicalDeviceVertexAttributeDivisorPropertiesEXT const& vertexAttributeDivisorProperties = properties2.get(); - std::cout << "\t" << "VertexAttributeDivisorProperties:\n"; - std::cout << "\t\t" << "maxVertexAttribDivisor = " << vertexAttributeDivisorProperties.maxVertexAttribDivisor << "\n"; + vk::PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & vertexAttributeDivisorProperties = + properties2.get(); + std::cout << "\t" + << "VertexAttributeDivisorProperties:\n"; + std::cout << "\t\t" + << "maxVertexAttribDivisor = " << vertexAttributeDivisorProperties.maxVertexAttribDivisor << "\n"; std::cout << "\n"; } } /* VULKAN_KEY_END */ } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (std::runtime_error& err) + catch ( std::runtime_error & err ) { std::cout << "std::runtime_error: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/PhysicalDeviceQueueFamilyProperties/PhysicalDeviceQueueFamilyProperties.cpp b/samples/PhysicalDeviceQueueFamilyProperties/PhysicalDeviceQueueFamilyProperties.cpp index 68e470f..64810dd 100644 --- a/samples/PhysicalDeviceQueueFamilyProperties/PhysicalDeviceQueueFamilyProperties.cpp +++ b/samples/PhysicalDeviceQueueFamilyProperties/PhysicalDeviceQueueFamilyProperties.cpp @@ -17,20 +17,21 @@ #include "../utils/utils.hpp" #include "vulkan/vulkan.hpp" + #include #include #include -static char const* AppName = "PhysicalDeviceQueueFamilyProperties"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "PhysicalDeviceQueueFamilyProperties"; +static char const * EngineName = "Vulkan.hpp"; -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { - vk::UniqueInstance instance = vk::su::createInstance(AppName, EngineName); -#if !defined(NDEBUG) - vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger(instance); + vk::UniqueInstance instance = vk::su::createInstance( AppName, EngineName ); +#if !defined( NDEBUG ) + vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger( instance ); #endif // enumerate the physicalDevices @@ -39,32 +40,47 @@ int main(int /*argc*/, char ** /*argv*/) /* VULKAN_KEY_START */ std::cout << std::boolalpha; - for (size_t i=0 ; i extensionProperties = physicalDevices[i].enumerateDeviceExtensionProperties(); + std::vector extensionProperties = + physicalDevices[i].enumerateDeviceExtensionProperties(); std::cout << "PhysicalDevice " << i << "\n"; // need to explicitly specify all the template arguments for getQueueFamilyProperties2 to make the compiler happy using Chain = vk::StructureChain; - auto queueFamilyProperties2 = physicalDevices[i].getQueueFamilyProperties2, vk::DispatchLoaderDynamic>(); - for (size_t j = 0; j < queueFamilyProperties2.size(); j++) + auto queueFamilyProperties2 = + physicalDevices[i].getQueueFamilyProperties2, vk::DispatchLoaderDynamic>(); + for ( size_t j = 0; j < queueFamilyProperties2.size(); j++ ) { - std::cout << "\t" << "QueueFamily " << j << "\n"; - vk::QueueFamilyProperties const& properties = queueFamilyProperties2[j].get().queueFamilyProperties; - std::cout << "\t\t" << "QueueFamilyProperties:\n"; - std::cout << "\t\t\t" << "queueFlags = " << vk::to_string(properties.queueFlags) << "\n"; - std::cout << "\t\t\t" << "queueCount = " << properties.queueCount << "\n"; - std::cout << "\t\t\t" << "timestampValidBits = " << properties.timestampValidBits << "\n"; - std::cout << "\t\t\t" << "minImageTransferGranularity = " << properties.minImageTransferGranularity.width << " x " << properties.minImageTransferGranularity.height << " x " << properties.minImageTransferGranularity.depth << "\n"; + std::cout << "\t" + << "QueueFamily " << j << "\n"; + vk::QueueFamilyProperties const & properties = + queueFamilyProperties2[j].get().queueFamilyProperties; + std::cout << "\t\t" + << "QueueFamilyProperties:\n"; + std::cout << "\t\t\t" + << "queueFlags = " << vk::to_string( properties.queueFlags ) << "\n"; + std::cout << "\t\t\t" + << "queueCount = " << properties.queueCount << "\n"; + std::cout << "\t\t\t" + << "timestampValidBits = " << properties.timestampValidBits << "\n"; + std::cout << "\t\t\t" + << "minImageTransferGranularity = " << properties.minImageTransferGranularity.width << " x " + << properties.minImageTransferGranularity.height << " x " + << properties.minImageTransferGranularity.depth << "\n"; std::cout << "\n"; - if (vk::su::contains(extensionProperties, "VK_NV_device_diagnostic_checkpoints")) + if ( vk::su::contains( extensionProperties, "VK_NV_device_diagnostic_checkpoints" ) ) { - vk::QueueFamilyCheckpointPropertiesNV const& checkpointProperties = queueFamilyProperties2[j].get(); - std::cout << "\t\t" << "CheckPointPropertiesNV:\n"; - std::cout << "\t\t\t" << "checkpointExecutionStageMask = " << vk::to_string(checkpointProperties.checkpointExecutionStageMask) << "\n"; + vk::QueueFamilyCheckpointPropertiesNV const & checkpointProperties = + queueFamilyProperties2[j].get(); + std::cout << "\t\t" + << "CheckPointPropertiesNV:\n"; + std::cout << "\t\t\t" + << "checkpointExecutionStageMask = " + << vk::to_string( checkpointProperties.checkpointExecutionStageMask ) << "\n"; std::cout << "\n"; } } @@ -72,20 +88,20 @@ int main(int /*argc*/, char ** /*argv*/) /* VULKAN_KEY_END */ } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (std::runtime_error& err) + catch ( std::runtime_error & err ) { std::cout << "std::runtime_error: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/PipelineCache/PipelineCache.cpp b/samples/PipelineCache/PipelineCache.cpp index db56626..24d5360 100644 --- a/samples/PipelineCache/PipelineCache.cpp +++ b/samples/PipelineCache/PipelineCache.cpp @@ -19,30 +19,31 @@ #include "../utils/math.hpp" #include "../utils/shaders.hpp" #include "../utils/utils.hpp" -#include "vulkan/vulkan.hpp" #include "SPIRV/GlslangToSpv.h" +#include "vulkan/vulkan.hpp" + #include #include #include // For timestamp code (getMilliseconds) #ifdef WIN32 -#include +# include #else -#include +# include #endif typedef unsigned long long timestamp_t; -timestamp_t getMilliseconds() +timestamp_t getMilliseconds() { #ifdef WIN32 LARGE_INTEGER frequency; - BOOL useQPC = QueryPerformanceFrequency(&frequency); - if (useQPC) + BOOL useQPC = QueryPerformanceFrequency( &frequency ); + if ( useQPC ) { LARGE_INTEGER now; - QueryPerformanceCounter(&now); - return (1000LL * now.QuadPart) / frequency.QuadPart; + QueryPerformanceCounter( &now ); + return ( 1000LL * now.QuadPart ) / frequency.QuadPart; } else { @@ -50,91 +51,124 @@ timestamp_t getMilliseconds() } #else struct timeval now; - gettimeofday(&now, NULL); - return (now.tv_usec / 1000) + (timestamp_t)now.tv_sec; + gettimeofday( &now, NULL ); + return ( now.tv_usec / 1000 ) + (timestamp_t)now.tv_sec; #endif } +static char const * AppName = "PipelineCache"; +static char const * EngineName = "Vulkan.hpp"; -static char const* AppName = "PipelineCache"; -static char const* EngineName = "Vulkan.hpp"; - -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { - vk::UniqueInstance instance = vk::su::createInstance(AppName, EngineName, {}, vk::su::getInstanceExtensions()); -#if !defined(NDEBUG) - vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger(instance); + vk::UniqueInstance instance = vk::su::createInstance( AppName, EngineName, {}, vk::su::getInstanceExtensions() ); +#if !defined( NDEBUG ) + vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger( instance ); #endif - vk::PhysicalDevice physicalDevice = instance->enumeratePhysicalDevices().front(); - vk::PhysicalDeviceProperties properties = physicalDevice.getProperties(); + vk::PhysicalDevice physicalDevice = instance->enumeratePhysicalDevices().front(); + vk::PhysicalDeviceProperties properties = physicalDevice.getProperties(); - vk::su::SurfaceData surfaceData(instance, AppName, vk::Extent2D(500, 500)); + vk::su::SurfaceData surfaceData( instance, AppName, vk::Extent2D( 500, 500 ) ); - std::pair graphicsAndPresentQueueFamilyIndex = vk::su::findGraphicsAndPresentQueueFamilyIndex(physicalDevice, *surfaceData.surface); - vk::UniqueDevice device = vk::su::createDevice(physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions()); + std::pair graphicsAndPresentQueueFamilyIndex = + vk::su::findGraphicsAndPresentQueueFamilyIndex( physicalDevice, *surfaceData.surface ); + vk::UniqueDevice device = + vk::su::createDevice( physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions() ); - vk::UniqueCommandPool commandPool = vk::su::createCommandPool(device, graphicsAndPresentQueueFamilyIndex.first); - vk::UniqueCommandBuffer commandBuffer = std::move(device->allocateCommandBuffersUnique(vk::CommandBufferAllocateInfo(commandPool.get(), vk::CommandBufferLevel::ePrimary, 1)).front()); + vk::UniqueCommandPool commandPool = vk::su::createCommandPool( device, graphicsAndPresentQueueFamilyIndex.first ); + vk::UniqueCommandBuffer commandBuffer = std::move( device + ->allocateCommandBuffersUnique( vk::CommandBufferAllocateInfo( + commandPool.get(), vk::CommandBufferLevel::ePrimary, 1 ) ) + .front() ); - vk::Queue graphicsQueue = device->getQueue(graphicsAndPresentQueueFamilyIndex.first, 0); - vk::Queue presentQueue = device->getQueue(graphicsAndPresentQueueFamilyIndex.second, 0); + vk::Queue graphicsQueue = device->getQueue( graphicsAndPresentQueueFamilyIndex.first, 0 ); + vk::Queue presentQueue = device->getQueue( graphicsAndPresentQueueFamilyIndex.second, 0 ); - vk::su::SwapChainData swapChainData(physicalDevice, device, *surfaceData.surface, surfaceData.extent, vk::ImageUsageFlagBits::eColorAttachment | vk::ImageUsageFlagBits::eTransferSrc, - vk::UniqueSwapchainKHR(), graphicsAndPresentQueueFamilyIndex.first, graphicsAndPresentQueueFamilyIndex.second); + vk::su::SwapChainData swapChainData( physicalDevice, + device, + *surfaceData.surface, + surfaceData.extent, + vk::ImageUsageFlagBits::eColorAttachment | + vk::ImageUsageFlagBits::eTransferSrc, + vk::UniqueSwapchainKHR(), + graphicsAndPresentQueueFamilyIndex.first, + graphicsAndPresentQueueFamilyIndex.second ); - vk::su::DepthBufferData depthBufferData(physicalDevice, device, vk::Format::eD16Unorm, surfaceData.extent); + vk::su::DepthBufferData depthBufferData( physicalDevice, device, vk::Format::eD16Unorm, surfaceData.extent ); - vk::su::TextureData textureData(physicalDevice, device); - commandBuffer->begin(vk::CommandBufferBeginInfo()); - textureData.setImage(device, commandBuffer, vk::su::MonochromeImageGenerator({ 118, 185, 0 })); + vk::su::TextureData textureData( physicalDevice, device ); + commandBuffer->begin( vk::CommandBufferBeginInfo() ); + textureData.setImage( device, commandBuffer, vk::su::MonochromeImageGenerator( { 118, 185, 0 } ) ); - vk::su::BufferData uniformBufferData(physicalDevice, device, sizeof(glm::mat4x4), vk::BufferUsageFlagBits::eUniformBuffer); - vk::su::copyToDevice(device, uniformBufferData.deviceMemory, vk::su::createModelViewProjectionClipMatrix(surfaceData.extent)); + vk::su::BufferData uniformBufferData( + physicalDevice, device, sizeof( glm::mat4x4 ), vk::BufferUsageFlagBits::eUniformBuffer ); + vk::su::copyToDevice( + device, uniformBufferData.deviceMemory, vk::su::createModelViewProjectionClipMatrix( surfaceData.extent ) ); - vk::UniqueDescriptorSetLayout descriptorSetLayout = vk::su::createDescriptorSetLayout(device, - { {vk::DescriptorType::eUniformBuffer, 1, vk::ShaderStageFlagBits::eVertex}, {vk::DescriptorType::eCombinedImageSampler, 1, vk::ShaderStageFlagBits::eFragment} }); - vk::UniquePipelineLayout pipelineLayout = device->createPipelineLayoutUnique(vk::PipelineLayoutCreateInfo(vk::PipelineLayoutCreateFlags(), 1, &descriptorSetLayout.get())); + vk::UniqueDescriptorSetLayout descriptorSetLayout = vk::su::createDescriptorSetLayout( + device, + { { vk::DescriptorType::eUniformBuffer, 1, vk::ShaderStageFlagBits::eVertex }, + { vk::DescriptorType::eCombinedImageSampler, 1, vk::ShaderStageFlagBits::eFragment } } ); + vk::UniquePipelineLayout pipelineLayout = device->createPipelineLayoutUnique( + vk::PipelineLayoutCreateInfo( vk::PipelineLayoutCreateFlags(), 1, &descriptorSetLayout.get() ) ); - vk::UniqueRenderPass renderPass = vk::su::createRenderPass(device, vk::su::pickSurfaceFormat(physicalDevice.getSurfaceFormatsKHR(surfaceData.surface.get())).format, depthBufferData.format); + vk::UniqueRenderPass renderPass = vk::su::createRenderPass( + device, + vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( surfaceData.surface.get() ) ).format, + depthBufferData.format ); glslang::InitializeProcess(); - vk::UniqueShaderModule vertexShaderModule = vk::su::createShaderModule(device, vk::ShaderStageFlagBits::eVertex, vertexShaderText_PT_T); - vk::UniqueShaderModule fragmentShaderModule = vk::su::createShaderModule(device, vk::ShaderStageFlagBits::eFragment, fragmentShaderText_T_C); + vk::UniqueShaderModule vertexShaderModule = + vk::su::createShaderModule( device, vk::ShaderStageFlagBits::eVertex, vertexShaderText_PT_T ); + vk::UniqueShaderModule fragmentShaderModule = + vk::su::createShaderModule( device, vk::ShaderStageFlagBits::eFragment, fragmentShaderText_T_C ); glslang::FinalizeProcess(); - std::vector framebuffers = vk::su::createFramebuffers(device, renderPass, swapChainData.imageViews, depthBufferData.imageView, surfaceData.extent); + std::vector framebuffers = vk::su::createFramebuffers( + device, renderPass, swapChainData.imageViews, depthBufferData.imageView, surfaceData.extent ); - vk::su::BufferData vertexBufferData(physicalDevice, device, sizeof(texturedCubeData), vk::BufferUsageFlagBits::eVertexBuffer); - vk::su::copyToDevice(device, vertexBufferData.deviceMemory, texturedCubeData, sizeof(texturedCubeData) / sizeof(texturedCubeData[0])); + vk::su::BufferData vertexBufferData( + physicalDevice, device, sizeof( texturedCubeData ), vk::BufferUsageFlagBits::eVertexBuffer ); + vk::su::copyToDevice( device, + vertexBufferData.deviceMemory, + texturedCubeData, + sizeof( texturedCubeData ) / sizeof( texturedCubeData[0] ) ); - vk::UniqueDescriptorPool descriptorPool = vk::su::createDescriptorPool(device, { {vk::DescriptorType::eUniformBuffer, 1}, {vk::DescriptorType::eCombinedImageSampler, 1} }); - vk::UniqueDescriptorSet descriptorSet = std::move(device->allocateDescriptorSetsUnique(vk::DescriptorSetAllocateInfo(*descriptorPool, 1, &*descriptorSetLayout)).front()); + vk::UniqueDescriptorPool descriptorPool = vk::su::createDescriptorPool( + device, { { vk::DescriptorType::eUniformBuffer, 1 }, { vk::DescriptorType::eCombinedImageSampler, 1 } } ); + vk::UniqueDescriptorSet descriptorSet = std::move( + device->allocateDescriptorSetsUnique( vk::DescriptorSetAllocateInfo( *descriptorPool, 1, &*descriptorSetLayout ) ) + .front() ); - vk::su::updateDescriptorSets(device, descriptorSet, {{vk::DescriptorType::eUniformBuffer, uniformBufferData.buffer, vk::UniqueBufferView()}}, textureData); + vk::su::updateDescriptorSets( + device, + descriptorSet, + { { vk::DescriptorType::eUniformBuffer, uniformBufferData.buffer, vk::UniqueBufferView() } }, + textureData ); /* VULKAN_KEY_START */ // Check disk for existing cache data size_t startCacheSize = 0; - char *startCacheData = nullptr; + char * startCacheData = nullptr; - std::string cacheFileName = "pipeline_cache_data.bin"; - std::ifstream readCacheStream(cacheFileName, std::ios_base::in | std::ios_base::binary); - if (readCacheStream.good()) + std::string cacheFileName = "pipeline_cache_data.bin"; + std::ifstream readCacheStream( cacheFileName, std::ios_base::in | std::ios_base::binary ); + if ( readCacheStream.good() ) { // Determine cache size - readCacheStream.seekg(0, readCacheStream.end); - startCacheSize = vk::su::checked_cast(readCacheStream.tellg()); - readCacheStream.seekg(0, readCacheStream.beg); + readCacheStream.seekg( 0, readCacheStream.end ); + startCacheSize = vk::su::checked_cast( readCacheStream.tellg() ); + readCacheStream.seekg( 0, readCacheStream.beg ); // Allocate memory to hold the initial cache data - startCacheData = (char *)std::malloc(startCacheSize); + startCacheData = (char *)std::malloc( startCacheSize ); // Read the data into our buffer - readCacheStream.read(startCacheData, startCacheSize); + readCacheStream.read( startCacheData, startCacheSize ); // Clean up and print results readCacheStream.close(); @@ -147,7 +181,7 @@ int main(int /*argc*/, char ** /*argv*/) std::cout << " Pipeline cache miss!\n"; } - if (startCacheData != nullptr) + if ( startCacheData != nullptr ) { // Check for cache validity // @@ -177,136 +211,160 @@ int main(int /*argc*/, char ** /*argv*/) // as a stream of bytes, with the least significant byte first // 16 VK_UUID_SIZE a pipeline cache ID equal to VkPhysicalDeviceProperties::pipelineCacheUUID - uint32_t headerLength = 0; - uint32_t cacheHeaderVersion = 0; - uint32_t vendorID = 0; - uint32_t deviceID = 0; - uint8_t pipelineCacheUUID[VK_UUID_SIZE] = {}; + uint32_t headerLength = 0; + uint32_t cacheHeaderVersion = 0; + uint32_t vendorID = 0; + uint32_t deviceID = 0; + uint8_t pipelineCacheUUID[VK_UUID_SIZE] = {}; - memcpy(&headerLength, (uint8_t *)startCacheData + 0, 4); - memcpy(&cacheHeaderVersion, (uint8_t *)startCacheData + 4, 4); - memcpy(&vendorID, (uint8_t *)startCacheData + 8, 4); - memcpy(&deviceID, (uint8_t *)startCacheData + 12, 4); - memcpy(pipelineCacheUUID, (uint8_t *)startCacheData + 16, VK_UUID_SIZE); + memcpy( &headerLength, (uint8_t *)startCacheData + 0, 4 ); + memcpy( &cacheHeaderVersion, (uint8_t *)startCacheData + 4, 4 ); + memcpy( &vendorID, (uint8_t *)startCacheData + 8, 4 ); + memcpy( &deviceID, (uint8_t *)startCacheData + 12, 4 ); + memcpy( pipelineCacheUUID, (uint8_t *)startCacheData + 16, VK_UUID_SIZE ); // Check each field and report bad values before freeing existing cache bool badCache = false; - if (headerLength <= 0) + if ( headerLength <= 0 ) { badCache = true; std::cout << " Bad header length in " << cacheFileName << ".\n"; - std::cout << " Cache contains: " << std::hex << std::setw(8) << headerLength << "\n"; + std::cout << " Cache contains: " << std::hex << std::setw( 8 ) << headerLength << "\n"; } - if (cacheHeaderVersion != VK_PIPELINE_CACHE_HEADER_VERSION_ONE) + if ( cacheHeaderVersion != VK_PIPELINE_CACHE_HEADER_VERSION_ONE ) { badCache = true; std::cout << " Unsupported cache header version in " << cacheFileName << ".\n"; - std::cout << " Cache contains: " << std::hex << std::setw(8) << cacheHeaderVersion << "\n"; + std::cout << " Cache contains: " << std::hex << std::setw( 8 ) << cacheHeaderVersion << "\n"; } - if (vendorID != properties.vendorID) + if ( vendorID != properties.vendorID ) { badCache = true; std::cout << " Vender ID mismatch in " << cacheFileName << ".\n"; - std::cout << " Cache contains: " << std::hex << std::setw(8) << vendorID << "\n"; - std::cout << " Driver expects: " << std::hex << std::setw(8) << properties.vendorID << "\n"; + std::cout << " Cache contains: " << std::hex << std::setw( 8 ) << vendorID << "\n"; + std::cout << " Driver expects: " << std::hex << std::setw( 8 ) << properties.vendorID << "\n"; } - if (deviceID != properties.deviceID) + if ( deviceID != properties.deviceID ) { badCache = true; std::cout << " Device ID mismatch in " << cacheFileName << ".\n"; - std::cout << " Cache contains: " << std::hex << std::setw(8) << deviceID << "\n"; - std::cout << " Driver expects: " << std::hex << std::setw(8) << properties.deviceID << "\n"; + std::cout << " Cache contains: " << std::hex << std::setw( 8 ) << deviceID << "\n"; + std::cout << " Driver expects: " << std::hex << std::setw( 8 ) << properties.deviceID << "\n"; } - if (memcmp(pipelineCacheUUID, properties.pipelineCacheUUID, sizeof(pipelineCacheUUID)) != 0) + if ( memcmp( pipelineCacheUUID, properties.pipelineCacheUUID, sizeof( pipelineCacheUUID ) ) != 0 ) { badCache = true; std::cout << " UUID mismatch in " << cacheFileName << ".\n"; - std::cout << " Cache contains: " << vk::su::UUID(pipelineCacheUUID) << "\n"; - std::cout << " Driver expects: " << vk::su::UUID(properties.pipelineCacheUUID) << "\n"; + std::cout << " Cache contains: " << vk::su::UUID( pipelineCacheUUID ) << "\n"; + std::cout << " Driver expects: " << vk::su::UUID( properties.pipelineCacheUUID ) << "\n"; } - if (badCache) + if ( badCache ) { // Don't submit initial cache data if any version info is incorrect - free(startCacheData); + free( startCacheData ); startCacheSize = 0; startCacheData = nullptr; // And clear out the old cache file for use in next run std::cout << " Deleting cache entry " << cacheFileName << " to repopulate.\n"; - if (remove(cacheFileName.c_str()) != 0) + if ( remove( cacheFileName.c_str() ) != 0 ) { std::cerr << "Reading error"; - exit(EXIT_FAILURE); + exit( EXIT_FAILURE ); } } } // Feed the initial cache data into cache creation - vk::UniquePipelineCache pipelineCache = device->createPipelineCacheUnique(vk::PipelineCacheCreateInfo(vk::PipelineCacheCreateFlags(), startCacheSize, startCacheData)); + vk::UniquePipelineCache pipelineCache = device->createPipelineCacheUnique( + vk::PipelineCacheCreateInfo( vk::PipelineCacheCreateFlags(), startCacheSize, startCacheData ) ); // Free our initialData now that pipeline cache has been created - free(startCacheData); + free( startCacheData ); startCacheData = NULL; // Time (roughly) taken to create the graphics pipeline - timestamp_t start = getMilliseconds(); - vk::UniquePipeline graphicsPipeline = vk::su::createGraphicsPipeline(device, pipelineCache, std::make_pair(*vertexShaderModule, nullptr), std::make_pair(*fragmentShaderModule, nullptr), - sizeof(texturedCubeData[0]), { { vk::Format::eR32G32B32A32Sfloat, 0 }, { vk::Format::eR32G32Sfloat, 16 } }, - vk::FrontFace::eClockwise, true, pipelineLayout, renderPass); + timestamp_t start = getMilliseconds(); + vk::UniquePipeline graphicsPipeline = + vk::su::createGraphicsPipeline( device, + pipelineCache, + std::make_pair( *vertexShaderModule, nullptr ), + std::make_pair( *fragmentShaderModule, nullptr ), + sizeof( texturedCubeData[0] ), + { { vk::Format::eR32G32B32A32Sfloat, 0 }, { vk::Format::eR32G32Sfloat, 16 } }, + vk::FrontFace::eClockwise, + true, + pipelineLayout, + renderPass ); timestamp_t elapsed = getMilliseconds() - start; std::cout << " vkCreateGraphicsPipeline time: " << (double)elapsed << " ms\n"; - vk::UniqueSemaphore imageAcquiredSemaphore = device->createSemaphoreUnique(vk::SemaphoreCreateInfo(vk::SemaphoreCreateFlags())); + vk::UniqueSemaphore imageAcquiredSemaphore = + device->createSemaphoreUnique( vk::SemaphoreCreateInfo( vk::SemaphoreCreateFlags() ) ); // Get the index of the next available swapchain image: - vk::ResultValue currentBuffer = device->acquireNextImageKHR(swapChainData.swapChain.get(), UINT64_MAX, imageAcquiredSemaphore.get(), nullptr); - assert(currentBuffer.result == vk::Result::eSuccess); - assert(currentBuffer.value < framebuffers.size()); + vk::ResultValue currentBuffer = + device->acquireNextImageKHR( swapChainData.swapChain.get(), UINT64_MAX, imageAcquiredSemaphore.get(), nullptr ); + assert( currentBuffer.result == vk::Result::eSuccess ); + assert( currentBuffer.value < framebuffers.size() ); vk::ClearValue clearValues[2]; - clearValues[0].color = vk::ClearColorValue(std::array({ 0.2f, 0.2f, 0.2f, 0.2f })); - clearValues[1].depthStencil = vk::ClearDepthStencilValue(1.0f, 0); + clearValues[0].color = vk::ClearColorValue( std::array( { 0.2f, 0.2f, 0.2f, 0.2f } ) ); + clearValues[1].depthStencil = vk::ClearDepthStencilValue( 1.0f, 0 ); - commandBuffer->beginRenderPass(vk::RenderPassBeginInfo(renderPass.get(), framebuffers[currentBuffer.value].get(), vk::Rect2D(vk::Offset2D(), surfaceData.extent), 2, clearValues), vk::SubpassContents::eInline); - commandBuffer->bindPipeline(vk::PipelineBindPoint::eGraphics, graphicsPipeline.get()); - commandBuffer->bindDescriptorSets(vk::PipelineBindPoint::eGraphics, pipelineLayout.get(), 0, descriptorSet.get(), {}); + commandBuffer->beginRenderPass( vk::RenderPassBeginInfo( renderPass.get(), + framebuffers[currentBuffer.value].get(), + vk::Rect2D( vk::Offset2D(), surfaceData.extent ), + 2, + clearValues ), + vk::SubpassContents::eInline ); + commandBuffer->bindPipeline( vk::PipelineBindPoint::eGraphics, graphicsPipeline.get() ); + commandBuffer->bindDescriptorSets( + vk::PipelineBindPoint::eGraphics, pipelineLayout.get(), 0, descriptorSet.get(), {} ); - commandBuffer->bindVertexBuffers(0, *vertexBufferData.buffer, {0}); - commandBuffer->setViewport(0, vk::Viewport(0.0f, 0.0f, static_cast(surfaceData.extent.width), static_cast(surfaceData.extent.height), 0.0f, 1.0f)); - commandBuffer->setScissor(0, vk::Rect2D(vk::Offset2D(0, 0), surfaceData.extent)); + commandBuffer->bindVertexBuffers( 0, *vertexBufferData.buffer, { 0 } ); + commandBuffer->setViewport( 0, + vk::Viewport( 0.0f, + 0.0f, + static_cast( surfaceData.extent.width ), + static_cast( surfaceData.extent.height ), + 0.0f, + 1.0f ) ); + commandBuffer->setScissor( 0, vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ) ); - commandBuffer->draw(12 * 3, 1, 0, 0); + commandBuffer->draw( 12 * 3, 1, 0, 0 ); commandBuffer->endRenderPass(); commandBuffer->end(); - vk::UniqueFence drawFence = device->createFenceUnique(vk::FenceCreateInfo()); + vk::UniqueFence drawFence = device->createFenceUnique( vk::FenceCreateInfo() ); - vk::PipelineStageFlags waitDestinationStageMask(vk::PipelineStageFlagBits::eColorAttachmentOutput); - vk::SubmitInfo submitInfo(1, &imageAcquiredSemaphore.get(), &waitDestinationStageMask, 1, &commandBuffer.get()); - graphicsQueue.submit(submitInfo, drawFence.get()); + vk::PipelineStageFlags waitDestinationStageMask( vk::PipelineStageFlagBits::eColorAttachmentOutput ); + vk::SubmitInfo submitInfo( 1, &imageAcquiredSemaphore.get(), &waitDestinationStageMask, 1, &commandBuffer.get() ); + graphicsQueue.submit( submitInfo, drawFence.get() ); - while (vk::Result::eTimeout == device->waitForFences(drawFence.get(), VK_TRUE, vk::su::FenceTimeout)) + while ( vk::Result::eTimeout == device->waitForFences( drawFence.get(), VK_TRUE, vk::su::FenceTimeout ) ) ; - presentQueue.presentKHR(vk::PresentInfoKHR(0, nullptr, 1, &swapChainData.swapChain.get(), ¤tBuffer.value)); - std::this_thread::sleep_for(std::chrono::milliseconds(1000)); + presentQueue.presentKHR( + vk::PresentInfoKHR( 0, nullptr, 1, &swapChainData.swapChain.get(), ¤tBuffer.value ) ); + std::this_thread::sleep_for( std::chrono::milliseconds( 1000 ) ); // Store away the cache that we've populated. This could conceivably happen // earlier, depends on when the pipeline cache stops being populated // internally. - std::vector endCacheData = device->getPipelineCacheData(pipelineCache.get()); + std::vector endCacheData = device->getPipelineCacheData( pipelineCache.get() ); // Write the file to disk, overwriting whatever was there - std::ofstream writeCacheStream(cacheFileName, std::ios_base::out | std::ios_base::binary); - if (writeCacheStream.good()) + std::ofstream writeCacheStream( cacheFileName, std::ios_base::out | std::ios_base::binary ); + if ( writeCacheStream.good() ) { - writeCacheStream.write(reinterpret_cast(endCacheData.data()), endCacheData.size()); + writeCacheStream.write( reinterpret_cast( endCacheData.data() ), endCacheData.size() ); writeCacheStream.close(); std::cout << " cacheData written to " << cacheFileName << "\n"; } @@ -318,20 +376,20 @@ int main(int /*argc*/, char ** /*argv*/) /* VULKAN_KEY_END */ } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (std::runtime_error& err) + catch ( std::runtime_error & err ) { std::cout << "std::runtime_error: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/PipelineDerivative/PipelineDerivative.cpp b/samples/PipelineDerivative/PipelineDerivative.cpp index c281ce9..133e077 100644 --- a/samples/PipelineDerivative/PipelineDerivative.cpp +++ b/samples/PipelineDerivative/PipelineDerivative.cpp @@ -19,69 +19,104 @@ #include "../utils/math.hpp" #include "../utils/shaders.hpp" #include "../utils/utils.hpp" -#include "vulkan/vulkan.hpp" #include "SPIRV/GlslangToSpv.h" +#include "vulkan/vulkan.hpp" + #include -static char const* AppName = "PipelineDerivative"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "PipelineDerivative"; +static char const * EngineName = "Vulkan.hpp"; -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { - vk::UniqueInstance instance = vk::su::createInstance(AppName, EngineName, {}, vk::su::getInstanceExtensions()); -#if !defined(NDEBUG) - vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger(instance); + vk::UniqueInstance instance = vk::su::createInstance( AppName, EngineName, {}, vk::su::getInstanceExtensions() ); +#if !defined( NDEBUG ) + vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger( instance ); #endif vk::PhysicalDevice physicalDevice = instance->enumeratePhysicalDevices().front(); - vk::su::SurfaceData surfaceData(instance, AppName, vk::Extent2D(500, 500)); + vk::su::SurfaceData surfaceData( instance, AppName, vk::Extent2D( 500, 500 ) ); - std::pair graphicsAndPresentQueueFamilyIndex = vk::su::findGraphicsAndPresentQueueFamilyIndex(physicalDevice, *surfaceData.surface); - vk::UniqueDevice device = vk::su::createDevice(physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions()); + std::pair graphicsAndPresentQueueFamilyIndex = + vk::su::findGraphicsAndPresentQueueFamilyIndex( physicalDevice, *surfaceData.surface ); + vk::UniqueDevice device = + vk::su::createDevice( physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions() ); - vk::UniqueCommandPool commandPool = vk::su::createCommandPool(device, graphicsAndPresentQueueFamilyIndex.first); - vk::UniqueCommandBuffer commandBuffer = std::move(device->allocateCommandBuffersUnique(vk::CommandBufferAllocateInfo(commandPool.get(), vk::CommandBufferLevel::ePrimary, 1)).front()); + vk::UniqueCommandPool commandPool = vk::su::createCommandPool( device, graphicsAndPresentQueueFamilyIndex.first ); + vk::UniqueCommandBuffer commandBuffer = std::move( device + ->allocateCommandBuffersUnique( vk::CommandBufferAllocateInfo( + commandPool.get(), vk::CommandBufferLevel::ePrimary, 1 ) ) + .front() ); - vk::Queue graphicsQueue = device->getQueue(graphicsAndPresentQueueFamilyIndex.first, 0); - vk::Queue presentQueue = device->getQueue(graphicsAndPresentQueueFamilyIndex.second, 0); + vk::Queue graphicsQueue = device->getQueue( graphicsAndPresentQueueFamilyIndex.first, 0 ); + vk::Queue presentQueue = device->getQueue( graphicsAndPresentQueueFamilyIndex.second, 0 ); - vk::su::SwapChainData swapChainData(physicalDevice, device, *surfaceData.surface, surfaceData.extent, vk::ImageUsageFlagBits::eColorAttachment | vk::ImageUsageFlagBits::eTransferSrc, - vk::UniqueSwapchainKHR(), graphicsAndPresentQueueFamilyIndex.first, graphicsAndPresentQueueFamilyIndex.second); + vk::su::SwapChainData swapChainData( physicalDevice, + device, + *surfaceData.surface, + surfaceData.extent, + vk::ImageUsageFlagBits::eColorAttachment | + vk::ImageUsageFlagBits::eTransferSrc, + vk::UniqueSwapchainKHR(), + graphicsAndPresentQueueFamilyIndex.first, + graphicsAndPresentQueueFamilyIndex.second ); - vk::su::DepthBufferData depthBufferData(physicalDevice, device, vk::Format::eD16Unorm, surfaceData.extent); + vk::su::DepthBufferData depthBufferData( physicalDevice, device, vk::Format::eD16Unorm, surfaceData.extent ); - vk::su::TextureData textureData(physicalDevice, device); - commandBuffer->begin(vk::CommandBufferBeginInfo()); - textureData.setImage(device, commandBuffer, vk::su::CheckerboardImageGenerator()); + vk::su::TextureData textureData( physicalDevice, device ); + commandBuffer->begin( vk::CommandBufferBeginInfo() ); + textureData.setImage( device, commandBuffer, vk::su::CheckerboardImageGenerator() ); - vk::su::BufferData uniformBufferData(physicalDevice, device, sizeof(glm::mat4x4), vk::BufferUsageFlagBits::eUniformBuffer); - vk::su::copyToDevice(device, uniformBufferData.deviceMemory, vk::su::createModelViewProjectionClipMatrix(surfaceData.extent)); + vk::su::BufferData uniformBufferData( + physicalDevice, device, sizeof( glm::mat4x4 ), vk::BufferUsageFlagBits::eUniformBuffer ); + vk::su::copyToDevice( + device, uniformBufferData.deviceMemory, vk::su::createModelViewProjectionClipMatrix( surfaceData.extent ) ); - vk::UniqueDescriptorSetLayout descriptorSetLayout = vk::su::createDescriptorSetLayout(device, - { {vk::DescriptorType::eUniformBuffer, 1, vk::ShaderStageFlagBits::eVertex}, {vk::DescriptorType::eCombinedImageSampler, 1, vk::ShaderStageFlagBits::eFragment} }); - vk::UniquePipelineLayout pipelineLayout = device->createPipelineLayoutUnique(vk::PipelineLayoutCreateInfo(vk::PipelineLayoutCreateFlags(), 1, &descriptorSetLayout.get())); + vk::UniqueDescriptorSetLayout descriptorSetLayout = vk::su::createDescriptorSetLayout( + device, + { { vk::DescriptorType::eUniformBuffer, 1, vk::ShaderStageFlagBits::eVertex }, + { vk::DescriptorType::eCombinedImageSampler, 1, vk::ShaderStageFlagBits::eFragment } } ); + vk::UniquePipelineLayout pipelineLayout = device->createPipelineLayoutUnique( + vk::PipelineLayoutCreateInfo( vk::PipelineLayoutCreateFlags(), 1, &descriptorSetLayout.get() ) ); - vk::UniqueRenderPass renderPass = vk::su::createRenderPass(device, vk::su::pickSurfaceFormat(physicalDevice.getSurfaceFormatsKHR(surfaceData.surface.get())).format, depthBufferData.format); + vk::UniqueRenderPass renderPass = vk::su::createRenderPass( + device, + vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( surfaceData.surface.get() ) ).format, + depthBufferData.format ); glslang::InitializeProcess(); - vk::UniqueShaderModule vertexShaderModule = vk::su::createShaderModule(device, vk::ShaderStageFlagBits::eVertex, vertexShaderText_PT_T); - vk::UniqueShaderModule fragmentShaderModule = vk::su::createShaderModule(device, vk::ShaderStageFlagBits::eFragment, fragmentShaderText_T_C); + vk::UniqueShaderModule vertexShaderModule = + vk::su::createShaderModule( device, vk::ShaderStageFlagBits::eVertex, vertexShaderText_PT_T ); + vk::UniqueShaderModule fragmentShaderModule = + vk::su::createShaderModule( device, vk::ShaderStageFlagBits::eFragment, fragmentShaderText_T_C ); glslang::FinalizeProcess(); - std::vector framebuffers = vk::su::createFramebuffers(device, renderPass, swapChainData.imageViews, depthBufferData.imageView, surfaceData.extent); + std::vector framebuffers = vk::su::createFramebuffers( + device, renderPass, swapChainData.imageViews, depthBufferData.imageView, surfaceData.extent ); - vk::su::BufferData vertexBufferData(physicalDevice, device, sizeof(texturedCubeData), vk::BufferUsageFlagBits::eVertexBuffer); - vk::su::copyToDevice(device, vertexBufferData.deviceMemory, texturedCubeData, sizeof(texturedCubeData) / sizeof(texturedCubeData[0])); + vk::su::BufferData vertexBufferData( + physicalDevice, device, sizeof( texturedCubeData ), vk::BufferUsageFlagBits::eVertexBuffer ); + vk::su::copyToDevice( device, + vertexBufferData.deviceMemory, + texturedCubeData, + sizeof( texturedCubeData ) / sizeof( texturedCubeData[0] ) ); - vk::UniqueDescriptorPool descriptorPool = vk::su::createDescriptorPool(device, { {vk::DescriptorType::eUniformBuffer, 1}, {vk::DescriptorType::eCombinedImageSampler, 1} }); - vk::UniqueDescriptorSet descriptorSet = std::move(device->allocateDescriptorSetsUnique(vk::DescriptorSetAllocateInfo(*descriptorPool, 1, &*descriptorSetLayout)).front()); + vk::UniqueDescriptorPool descriptorPool = vk::su::createDescriptorPool( + device, { { vk::DescriptorType::eUniformBuffer, 1 }, { vk::DescriptorType::eCombinedImageSampler, 1 } } ); + vk::UniqueDescriptorSet descriptorSet = std::move( + device->allocateDescriptorSetsUnique( vk::DescriptorSetAllocateInfo( *descriptorPool, 1, &*descriptorSetLayout ) ) + .front() ); - vk::su::updateDescriptorSets(device, descriptorSet, {{vk::DescriptorType::eUniformBuffer, uniformBufferData.buffer, vk::UniqueBufferView()}}, textureData); + vk::su::updateDescriptorSets( + device, + descriptorSet, + { { vk::DescriptorType::eUniformBuffer, uniformBufferData.buffer, vk::UniqueBufferView() } }, + textureData ); - vk::UniquePipelineCache pipelineCache = device->createPipelineCacheUnique(vk::PipelineCacheCreateInfo()); + vk::UniquePipelineCache pipelineCache = device->createPipelineCacheUnique( vk::PipelineCacheCreateInfo() ); /* VULKAN_KEY_START */ @@ -90,43 +125,96 @@ int main(int /*argc*/, char ** /*argv*/) // First pipeline has VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT set. // Second pipeline has a modified fragment shader and sets the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag. - vk::PipelineShaderStageCreateInfo pipelineShaderStageCreateInfos[2] = - { - vk::PipelineShaderStageCreateInfo(vk::PipelineShaderStageCreateFlags(), vk::ShaderStageFlagBits::eVertex, vertexShaderModule.get(), "main"), - vk::PipelineShaderStageCreateInfo(vk::PipelineShaderStageCreateFlags(), vk::ShaderStageFlagBits::eFragment, fragmentShaderModule.get(), "main") + vk::PipelineShaderStageCreateInfo pipelineShaderStageCreateInfos[2] = { + vk::PipelineShaderStageCreateInfo( + vk::PipelineShaderStageCreateFlags(), vk::ShaderStageFlagBits::eVertex, vertexShaderModule.get(), "main" ), + vk::PipelineShaderStageCreateInfo( + vk::PipelineShaderStageCreateFlags(), vk::ShaderStageFlagBits::eFragment, fragmentShaderModule.get(), "main" ) }; - vk::VertexInputBindingDescription vertexInputBindingDescription(0, sizeof(texturedCubeData[0])); - vk::VertexInputAttributeDescription vertexInputAttributeDescriptions[2] = - { - vk::VertexInputAttributeDescription(0, 0, vk::Format::eR32G32B32A32Sfloat, 0), - vk::VertexInputAttributeDescription(1, 0, vk::Format::eR32G32B32A32Sfloat, 16) + vk::VertexInputBindingDescription vertexInputBindingDescription( 0, sizeof( texturedCubeData[0] ) ); + vk::VertexInputAttributeDescription vertexInputAttributeDescriptions[2] = { + vk::VertexInputAttributeDescription( 0, 0, vk::Format::eR32G32B32A32Sfloat, 0 ), + vk::VertexInputAttributeDescription( 1, 0, vk::Format::eR32G32B32A32Sfloat, 16 ) }; - vk::PipelineVertexInputStateCreateInfo pipelineVertexInputStateCreateInfo(vk::PipelineVertexInputStateCreateFlags(), 1, &vertexInputBindingDescription, 2, vertexInputAttributeDescriptions); + vk::PipelineVertexInputStateCreateInfo pipelineVertexInputStateCreateInfo( + vk::PipelineVertexInputStateCreateFlags(), + 1, + &vertexInputBindingDescription, + 2, + vertexInputAttributeDescriptions ); - vk::PipelineInputAssemblyStateCreateInfo pipelineInputAssemblyStateCreateInfo(vk::PipelineInputAssemblyStateCreateFlags(), vk::PrimitiveTopology::eTriangleList); + vk::PipelineInputAssemblyStateCreateInfo pipelineInputAssemblyStateCreateInfo( + vk::PipelineInputAssemblyStateCreateFlags(), vk::PrimitiveTopology::eTriangleList ); - vk::PipelineViewportStateCreateInfo pipelineViewportStateCreateInfo(vk::PipelineViewportStateCreateFlags(), 1, nullptr, 1, nullptr); + vk::PipelineViewportStateCreateInfo pipelineViewportStateCreateInfo( + vk::PipelineViewportStateCreateFlags(), 1, nullptr, 1, nullptr ); - vk::PipelineRasterizationStateCreateInfo pipelineRasterizationStateCreateInfo(vk::PipelineRasterizationStateCreateFlags(), false, false, vk::PolygonMode::eFill, vk::CullModeFlagBits::eBack, vk::FrontFace::eClockwise, false, 0.0f, 0.0f, 0.0f, 1.0f); + vk::PipelineRasterizationStateCreateInfo pipelineRasterizationStateCreateInfo( + vk::PipelineRasterizationStateCreateFlags(), + false, + false, + vk::PolygonMode::eFill, + vk::CullModeFlagBits::eBack, + vk::FrontFace::eClockwise, + false, + 0.0f, + 0.0f, + 0.0f, + 1.0f ); - vk::PipelineMultisampleStateCreateInfo pipelineMultisampleStateCreateInfo({}, vk::SampleCountFlagBits::e1); + vk::PipelineMultisampleStateCreateInfo pipelineMultisampleStateCreateInfo( {}, vk::SampleCountFlagBits::e1 ); - vk::StencilOpState stencilOpState(vk::StencilOp::eKeep, vk::StencilOp::eKeep, vk::StencilOp::eKeep, vk::CompareOp::eAlways); - vk::PipelineDepthStencilStateCreateInfo pipelineDepthStencilStateCreateInfo(vk::PipelineDepthStencilStateCreateFlags(), true, true, vk::CompareOp::eLessOrEqual, false, false, stencilOpState, stencilOpState); + vk::StencilOpState stencilOpState( + vk::StencilOp::eKeep, vk::StencilOp::eKeep, vk::StencilOp::eKeep, vk::CompareOp::eAlways ); + vk::PipelineDepthStencilStateCreateInfo pipelineDepthStencilStateCreateInfo( + vk::PipelineDepthStencilStateCreateFlags(), + true, + true, + vk::CompareOp::eLessOrEqual, + false, + false, + stencilOpState, + stencilOpState ); - vk::ColorComponentFlags colorComponentFlags(vk::ColorComponentFlagBits::eR | vk::ColorComponentFlagBits::eG | vk::ColorComponentFlagBits::eB | vk::ColorComponentFlagBits::eA); - vk::PipelineColorBlendAttachmentState pipelineColorBlendAttachmentState(false, vk::BlendFactor::eZero, vk::BlendFactor::eZero, vk::BlendOp::eAdd, vk::BlendFactor::eZero, vk::BlendFactor::eZero, vk::BlendOp::eAdd, colorComponentFlags); - vk::PipelineColorBlendStateCreateInfo pipelineColorBlendStateCreateInfo(vk::PipelineColorBlendStateCreateFlags(), false, vk::LogicOp::eNoOp, 1, &pipelineColorBlendAttachmentState, { { 1.0f, 1.0f, 1.0f, 1.0f } }); + vk::ColorComponentFlags colorComponentFlags( vk::ColorComponentFlagBits::eR | vk::ColorComponentFlagBits::eG | + vk::ColorComponentFlagBits::eB | vk::ColorComponentFlagBits::eA ); + vk::PipelineColorBlendAttachmentState pipelineColorBlendAttachmentState( false, + vk::BlendFactor::eZero, + vk::BlendFactor::eZero, + vk::BlendOp::eAdd, + vk::BlendFactor::eZero, + vk::BlendFactor::eZero, + vk::BlendOp::eAdd, + colorComponentFlags ); + vk::PipelineColorBlendStateCreateInfo pipelineColorBlendStateCreateInfo( vk::PipelineColorBlendStateCreateFlags(), + false, + vk::LogicOp::eNoOp, + 1, + &pipelineColorBlendAttachmentState, + { { 1.0f, 1.0f, 1.0f, 1.0f } } ); - vk::DynamicState dynamicStates[2] = { vk::DynamicState::eViewport, vk::DynamicState::eScissor }; - vk::PipelineDynamicStateCreateInfo pipelineDynamicStateCreateInfo(vk::PipelineDynamicStateCreateFlags(), 2, dynamicStates); + vk::DynamicState dynamicStates[2] = { vk::DynamicState::eViewport, vk::DynamicState::eScissor }; + vk::PipelineDynamicStateCreateInfo pipelineDynamicStateCreateInfo( + vk::PipelineDynamicStateCreateFlags(), 2, dynamicStates ); - vk::GraphicsPipelineCreateInfo graphicsPipelineCreateInfo(vk::PipelineCreateFlagBits::eAllowDerivatives, 2, pipelineShaderStageCreateInfos, &pipelineVertexInputStateCreateInfo, - &pipelineInputAssemblyStateCreateInfo, nullptr, &pipelineViewportStateCreateInfo, &pipelineRasterizationStateCreateInfo, &pipelineMultisampleStateCreateInfo, - &pipelineDepthStencilStateCreateInfo, &pipelineColorBlendStateCreateInfo, &pipelineDynamicStateCreateInfo, pipelineLayout.get(), renderPass.get()); + vk::GraphicsPipelineCreateInfo graphicsPipelineCreateInfo( vk::PipelineCreateFlagBits::eAllowDerivatives, + 2, + pipelineShaderStageCreateInfos, + &pipelineVertexInputStateCreateInfo, + &pipelineInputAssemblyStateCreateInfo, + nullptr, + &pipelineViewportStateCreateInfo, + &pipelineRasterizationStateCreateInfo, + &pipelineMultisampleStateCreateInfo, + &pipelineDepthStencilStateCreateInfo, + &pipelineColorBlendStateCreateInfo, + &pipelineDynamicStateCreateInfo, + pipelineLayout.get(), + renderPass.get() ); - vk::UniquePipeline basePipeline = device->createGraphicsPipelineUnique(pipelineCache.get(), graphicsPipelineCreateInfo); + vk::UniquePipeline basePipeline = + device->createGraphicsPipelineUnique( pipelineCache.get(), graphicsPipelineCreateInfo ); // Now create the derivative pipeline, using a different fragment shader // This shader will shade the cube faces with interpolated colors @@ -145,69 +233,87 @@ void main() // Convert GLSL to SPIR-V glslang::InitializeProcess(); - vk::UniqueShaderModule fragmentShaderModule2 = vk::su::createShaderModule(device, vk::ShaderStageFlagBits::eFragment, fragmentShaderText_T_C_2); + vk::UniqueShaderModule fragmentShaderModule2 = + vk::su::createShaderModule( device, vk::ShaderStageFlagBits::eFragment, fragmentShaderText_T_C_2 ); glslang::FinalizeProcess(); // Modify pipeline info to reflect derivation - pipelineShaderStageCreateInfos[1] = vk::PipelineShaderStageCreateInfo(vk::PipelineShaderStageCreateFlags(), vk::ShaderStageFlagBits::eFragment, fragmentShaderModule2.get(), "main"); - graphicsPipelineCreateInfo.flags = vk::PipelineCreateFlagBits::eDerivative; + pipelineShaderStageCreateInfos[1] = vk::PipelineShaderStageCreateInfo( + vk::PipelineShaderStageCreateFlags(), vk::ShaderStageFlagBits::eFragment, fragmentShaderModule2.get(), "main" ); + graphicsPipelineCreateInfo.flags = vk::PipelineCreateFlagBits::eDerivative; graphicsPipelineCreateInfo.basePipelineHandle = basePipeline.get(); - graphicsPipelineCreateInfo.basePipelineIndex = -1; + graphicsPipelineCreateInfo.basePipelineIndex = -1; // And create the derived pipeline - vk::UniquePipeline derivedPipeline = device->createGraphicsPipelineUnique(pipelineCache.get(), graphicsPipelineCreateInfo); + vk::UniquePipeline derivedPipeline = + device->createGraphicsPipelineUnique( pipelineCache.get(), graphicsPipelineCreateInfo ); /* VULKAN_KEY_END */ - vk::UniqueSemaphore imageAcquiredSemaphore = device->createSemaphoreUnique(vk::SemaphoreCreateInfo(vk::SemaphoreCreateFlags())); + vk::UniqueSemaphore imageAcquiredSemaphore = + device->createSemaphoreUnique( vk::SemaphoreCreateInfo( vk::SemaphoreCreateFlags() ) ); // Get the index of the next available swapchain image - vk::ResultValue currentBuffer = device->acquireNextImageKHR(swapChainData.swapChain.get(), UINT64_MAX, imageAcquiredSemaphore.get(), nullptr); - assert(currentBuffer.result == vk::Result::eSuccess); - assert(currentBuffer.value < framebuffers.size()); + vk::ResultValue currentBuffer = + device->acquireNextImageKHR( swapChainData.swapChain.get(), UINT64_MAX, imageAcquiredSemaphore.get(), nullptr ); + assert( currentBuffer.result == vk::Result::eSuccess ); + assert( currentBuffer.value < framebuffers.size() ); vk::ClearValue clearValues[2]; - clearValues[0].color = vk::ClearColorValue(std::array({ 0.2f, 0.2f, 0.2f, 0.2f })); - clearValues[1].depthStencil = vk::ClearDepthStencilValue(1.0f, 0); + clearValues[0].color = vk::ClearColorValue( std::array( { 0.2f, 0.2f, 0.2f, 0.2f } ) ); + clearValues[1].depthStencil = vk::ClearDepthStencilValue( 1.0f, 0 ); - commandBuffer->beginRenderPass(vk::RenderPassBeginInfo(renderPass.get(), framebuffers[currentBuffer.value].get(), vk::Rect2D(vk::Offset2D(), surfaceData.extent), 2, clearValues), vk::SubpassContents::eInline); - commandBuffer->bindPipeline(vk::PipelineBindPoint::eGraphics, derivedPipeline.get()); - commandBuffer->bindDescriptorSets(vk::PipelineBindPoint::eGraphics, pipelineLayout.get(), 0, descriptorSet.get(), {}); + commandBuffer->beginRenderPass( vk::RenderPassBeginInfo( renderPass.get(), + framebuffers[currentBuffer.value].get(), + vk::Rect2D( vk::Offset2D(), surfaceData.extent ), + 2, + clearValues ), + vk::SubpassContents::eInline ); + commandBuffer->bindPipeline( vk::PipelineBindPoint::eGraphics, derivedPipeline.get() ); + commandBuffer->bindDescriptorSets( + vk::PipelineBindPoint::eGraphics, pipelineLayout.get(), 0, descriptorSet.get(), {} ); - commandBuffer->bindVertexBuffers(0, *vertexBufferData.buffer, {0}); - commandBuffer->setViewport(0, vk::Viewport(0.0f, 0.0f, static_cast(surfaceData.extent.width), static_cast(surfaceData.extent.height), 0.0f, 1.0f)); - commandBuffer->setScissor(0, vk::Rect2D(vk::Offset2D(0, 0), surfaceData.extent)); + commandBuffer->bindVertexBuffers( 0, *vertexBufferData.buffer, { 0 } ); + commandBuffer->setViewport( 0, + vk::Viewport( 0.0f, + 0.0f, + static_cast( surfaceData.extent.width ), + static_cast( surfaceData.extent.height ), + 0.0f, + 1.0f ) ); + commandBuffer->setScissor( 0, vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ) ); - commandBuffer->draw(12 * 3, 1, 0, 0); + commandBuffer->draw( 12 * 3, 1, 0, 0 ); commandBuffer->endRenderPass(); commandBuffer->end(); - vk::UniqueFence drawFence = device->createFenceUnique(vk::FenceCreateInfo()); + vk::UniqueFence drawFence = device->createFenceUnique( vk::FenceCreateInfo() ); - vk::PipelineStageFlags waitDestinationStageMask(vk::PipelineStageFlagBits::eColorAttachmentOutput); - vk::SubmitInfo submitInfo(1, &imageAcquiredSemaphore.get(), &waitDestinationStageMask, 1, &commandBuffer.get()); - graphicsQueue.submit(submitInfo, drawFence.get()); + vk::PipelineStageFlags waitDestinationStageMask( vk::PipelineStageFlagBits::eColorAttachmentOutput ); + vk::SubmitInfo submitInfo( 1, &imageAcquiredSemaphore.get(), &waitDestinationStageMask, 1, &commandBuffer.get() ); + graphicsQueue.submit( submitInfo, drawFence.get() ); - while (vk::Result::eTimeout == device->waitForFences(drawFence.get(), VK_TRUE, vk::su::FenceTimeout)) + while ( vk::Result::eTimeout == device->waitForFences( drawFence.get(), VK_TRUE, vk::su::FenceTimeout ) ) ; - presentQueue.presentKHR(vk::PresentInfoKHR(0, nullptr, 1, &swapChainData.swapChain.get(), ¤tBuffer.value)); - std::this_thread::sleep_for(std::chrono::milliseconds(1000)); + presentQueue.presentKHR( + vk::PresentInfoKHR( 0, nullptr, 1, &swapChainData.swapChain.get(), ¤tBuffer.value ) ); + std::this_thread::sleep_for( std::chrono::milliseconds( 1000 ) ); } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (std::runtime_error& err) + catch ( std::runtime_error & err ) { std::cout << "std::runtime_error: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/PushConstants/PushConstants.cpp b/samples/PushConstants/PushConstants.cpp index 25b3b84..bddbbdc 100644 --- a/samples/PushConstants/PushConstants.cpp +++ b/samples/PushConstants/PushConstants.cpp @@ -19,13 +19,14 @@ #include "../utils/math.hpp" #include "../utils/shaders.hpp" #include "../utils/utils.hpp" -#include "vulkan/vulkan.hpp" #include "SPIRV/GlslangToSpv.h" +#include "vulkan/vulkan.hpp" + #include #include -static char const* AppName = "PushConstants"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "PushConstants"; +static char const * EngineName = "Vulkan.hpp"; const std::string fragmentShaderText = R"( #version 400 @@ -68,136 +69,186 @@ void main() } )"; -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { - vk::UniqueInstance instance = vk::su::createInstance(AppName, EngineName, {}, vk::su::getInstanceExtensions()); -#if !defined(NDEBUG) - vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger(instance); + vk::UniqueInstance instance = vk::su::createInstance( AppName, EngineName, {}, vk::su::getInstanceExtensions() ); +#if !defined( NDEBUG ) + vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger( instance ); #endif vk::PhysicalDevice physicalDevice = instance->enumeratePhysicalDevices().front(); - vk::su::SurfaceData surfaceData(instance, AppName, vk::Extent2D(500, 500)); + vk::su::SurfaceData surfaceData( instance, AppName, vk::Extent2D( 500, 500 ) ); - std::pair graphicsAndPresentQueueFamilyIndex = vk::su::findGraphicsAndPresentQueueFamilyIndex(physicalDevice, *surfaceData.surface); - vk::UniqueDevice device = vk::su::createDevice(physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions()); + std::pair graphicsAndPresentQueueFamilyIndex = + vk::su::findGraphicsAndPresentQueueFamilyIndex( physicalDevice, *surfaceData.surface ); + vk::UniqueDevice device = + vk::su::createDevice( physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions() ); - vk::UniqueCommandPool commandPool = vk::su::createCommandPool(device, graphicsAndPresentQueueFamilyIndex.first); - vk::UniqueCommandBuffer commandBuffer = std::move(device->allocateCommandBuffersUnique(vk::CommandBufferAllocateInfo(commandPool.get(), vk::CommandBufferLevel::ePrimary, 1)).front()); + vk::UniqueCommandPool commandPool = vk::su::createCommandPool( device, graphicsAndPresentQueueFamilyIndex.first ); + vk::UniqueCommandBuffer commandBuffer = std::move( device + ->allocateCommandBuffersUnique( vk::CommandBufferAllocateInfo( + commandPool.get(), vk::CommandBufferLevel::ePrimary, 1 ) ) + .front() ); - vk::Queue graphicsQueue = device->getQueue(graphicsAndPresentQueueFamilyIndex.first, 0); - vk::Queue presentQueue = device->getQueue(graphicsAndPresentQueueFamilyIndex.second, 0); + vk::Queue graphicsQueue = device->getQueue( graphicsAndPresentQueueFamilyIndex.first, 0 ); + vk::Queue presentQueue = device->getQueue( graphicsAndPresentQueueFamilyIndex.second, 0 ); - vk::su::SwapChainData swapChainData(physicalDevice, device, *surfaceData.surface, surfaceData.extent, vk::ImageUsageFlagBits::eColorAttachment | vk::ImageUsageFlagBits::eTransferSrc, - vk::UniqueSwapchainKHR(), graphicsAndPresentQueueFamilyIndex.first, graphicsAndPresentQueueFamilyIndex.second); + vk::su::SwapChainData swapChainData( physicalDevice, + device, + *surfaceData.surface, + surfaceData.extent, + vk::ImageUsageFlagBits::eColorAttachment | + vk::ImageUsageFlagBits::eTransferSrc, + vk::UniqueSwapchainKHR(), + graphicsAndPresentQueueFamilyIndex.first, + graphicsAndPresentQueueFamilyIndex.second ); - vk::su::DepthBufferData depthBufferData(physicalDevice, device, vk::Format::eD16Unorm, surfaceData.extent); + vk::su::DepthBufferData depthBufferData( physicalDevice, device, vk::Format::eD16Unorm, surfaceData.extent ); - vk::su::BufferData uniformBufferData(physicalDevice, device, sizeof(glm::mat4x4), vk::BufferUsageFlagBits::eUniformBuffer); - vk::su::copyToDevice(device, uniformBufferData.deviceMemory, vk::su::createModelViewProjectionClipMatrix(surfaceData.extent)); + vk::su::BufferData uniformBufferData( + physicalDevice, device, sizeof( glm::mat4x4 ), vk::BufferUsageFlagBits::eUniformBuffer ); + vk::su::copyToDevice( + device, uniformBufferData.deviceMemory, vk::su::createModelViewProjectionClipMatrix( surfaceData.extent ) ); - vk::UniqueRenderPass renderPass = vk::su::createRenderPass(device, vk::su::pickSurfaceFormat(physicalDevice.getSurfaceFormatsKHR(surfaceData.surface.get())).format, depthBufferData.format); + vk::UniqueRenderPass renderPass = vk::su::createRenderPass( + device, + vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( surfaceData.surface.get() ) ).format, + depthBufferData.format ); glslang::InitializeProcess(); - vk::UniqueShaderModule vertexShaderModule = vk::su::createShaderModule(device, vk::ShaderStageFlagBits::eVertex, vertexShaderText_PT_T); - vk::UniqueShaderModule fragmentShaderModule = vk::su::createShaderModule(device, vk::ShaderStageFlagBits::eFragment, fragmentShaderText); + vk::UniqueShaderModule vertexShaderModule = + vk::su::createShaderModule( device, vk::ShaderStageFlagBits::eVertex, vertexShaderText_PT_T ); + vk::UniqueShaderModule fragmentShaderModule = + vk::su::createShaderModule( device, vk::ShaderStageFlagBits::eFragment, fragmentShaderText ); glslang::FinalizeProcess(); - std::vector framebuffers = vk::su::createFramebuffers(device, renderPass, swapChainData.imageViews, depthBufferData.imageView, surfaceData.extent); + std::vector framebuffers = vk::su::createFramebuffers( + device, renderPass, swapChainData.imageViews, depthBufferData.imageView, surfaceData.extent ); - vk::su::BufferData vertexBufferData(physicalDevice, device, sizeof(texturedCubeData), vk::BufferUsageFlagBits::eVertexBuffer); - vk::su::copyToDevice(device, vertexBufferData.deviceMemory, texturedCubeData, sizeof(texturedCubeData) / sizeof(texturedCubeData[0])); + vk::su::BufferData vertexBufferData( + physicalDevice, device, sizeof( texturedCubeData ), vk::BufferUsageFlagBits::eVertexBuffer ); + vk::su::copyToDevice( device, + vertexBufferData.deviceMemory, + texturedCubeData, + sizeof( texturedCubeData ) / sizeof( texturedCubeData[0] ) ); // Create binding and layout for the following, matching contents of shader // binding 0 = uniform buffer (MVP) - vk::UniqueDescriptorSetLayout descriptorSetLayout = vk::su::createDescriptorSetLayout(device, { {vk::DescriptorType::eUniformBuffer, 1, vk::ShaderStageFlagBits::eVertex} }); + vk::UniqueDescriptorSetLayout descriptorSetLayout = vk::su::createDescriptorSetLayout( + device, { { vk::DescriptorType::eUniformBuffer, 1, vk::ShaderStageFlagBits::eVertex } } ); /* VULKAN_KEY_START */ // Set up our push constant range, which mirrors the declaration of - vk::PushConstantRange pushConstantRanges(vk::ShaderStageFlagBits::eFragment, 0, 8); - vk::UniquePipelineLayout pipelineLayout = device->createPipelineLayoutUnique(vk::PipelineLayoutCreateInfo(vk::PipelineLayoutCreateFlags(), 1, &descriptorSetLayout.get(), 1, &pushConstantRanges)); + vk::PushConstantRange pushConstantRanges( vk::ShaderStageFlagBits::eFragment, 0, 8 ); + vk::UniquePipelineLayout pipelineLayout = device->createPipelineLayoutUnique( vk::PipelineLayoutCreateInfo( + vk::PipelineLayoutCreateFlags(), 1, &descriptorSetLayout.get(), 1, &pushConstantRanges ) ); // Create a single pool to contain data for our descriptor set - vk::DescriptorPoolSize poolSizes[2] = - { - vk::DescriptorPoolSize(vk::DescriptorType::eUniformBuffer, 1), - vk::DescriptorPoolSize(vk::DescriptorType::eCombinedImageSampler, 1) - }; - vk::UniqueDescriptorPool descriptorPool = device->createDescriptorPoolUnique(vk::DescriptorPoolCreateInfo(vk::DescriptorPoolCreateFlagBits::eFreeDescriptorSet, 1, 2, poolSizes)); + vk::DescriptorPoolSize poolSizes[2] = { vk::DescriptorPoolSize( vk::DescriptorType::eUniformBuffer, 1 ), + vk::DescriptorPoolSize( vk::DescriptorType::eCombinedImageSampler, 1 ) }; + vk::UniqueDescriptorPool descriptorPool = device->createDescriptorPoolUnique( + vk::DescriptorPoolCreateInfo( vk::DescriptorPoolCreateFlagBits::eFreeDescriptorSet, 1, 2, poolSizes ) ); // Populate descriptor sets - vk::UniqueDescriptorSet descriptorSet = std::move(device->allocateDescriptorSetsUnique(vk::DescriptorSetAllocateInfo(*descriptorPool, 1, &*descriptorSetLayout)).front()); + vk::UniqueDescriptorSet descriptorSet = std::move( + device->allocateDescriptorSetsUnique( vk::DescriptorSetAllocateInfo( *descriptorPool, 1, &*descriptorSetLayout ) ) + .front() ); // Populate with info about our uniform buffer for MVP - vk::DescriptorBufferInfo bufferInfo(uniformBufferData.buffer.get(), 0, sizeof(glm::mat4x4)); - device->updateDescriptorSets(vk::WriteDescriptorSet(*descriptorSet, 0, 0, 1, vk::DescriptorType::eUniformBuffer, nullptr, &bufferInfo), {}); + vk::DescriptorBufferInfo bufferInfo( uniformBufferData.buffer.get(), 0, sizeof( glm::mat4x4 ) ); + device->updateDescriptorSets( + vk::WriteDescriptorSet( *descriptorSet, 0, 0, 1, vk::DescriptorType::eUniformBuffer, nullptr, &bufferInfo ), {} ); // Create our push constant data, which matches shader expectations std::array pushConstants = { (unsigned)2, (unsigned)0x3F800000 }; // Ensure we have enough room for push constant data - assert((sizeof(pushConstants) <= physicalDevice.getProperties().limits.maxPushConstantsSize) && "Too many push constants"); - commandBuffer->begin(vk::CommandBufferBeginInfo()); - commandBuffer->pushConstants(pipelineLayout.get(), vk::ShaderStageFlagBits::eFragment, 0, pushConstants); + assert( ( sizeof( pushConstants ) <= physicalDevice.getProperties().limits.maxPushConstantsSize ) && + "Too many push constants" ); + commandBuffer->begin( vk::CommandBufferBeginInfo() ); + commandBuffer->pushConstants( + pipelineLayout.get(), vk::ShaderStageFlagBits::eFragment, 0, pushConstants ); /* VULKAN_KEY_END */ - vk::UniquePipelineCache pipelineCache = device->createPipelineCacheUnique(vk::PipelineCacheCreateInfo()); - vk::UniquePipeline graphicsPipeline = vk::su::createGraphicsPipeline(device, pipelineCache, std::make_pair(*vertexShaderModule, nullptr), std::make_pair(*fragmentShaderModule, nullptr), - sizeof(texturedCubeData[0]), { { vk::Format::eR32G32B32A32Sfloat, 0 }, { vk::Format::eR32G32B32A32Sfloat, 16 } }, - vk::FrontFace::eClockwise, true, pipelineLayout, renderPass); + vk::UniquePipelineCache pipelineCache = device->createPipelineCacheUnique( vk::PipelineCacheCreateInfo() ); + vk::UniquePipeline graphicsPipeline = vk::su::createGraphicsPipeline( + device, + pipelineCache, + std::make_pair( *vertexShaderModule, nullptr ), + std::make_pair( *fragmentShaderModule, nullptr ), + sizeof( texturedCubeData[0] ), + { { vk::Format::eR32G32B32A32Sfloat, 0 }, { vk::Format::eR32G32B32A32Sfloat, 16 } }, + vk::FrontFace::eClockwise, + true, + pipelineLayout, + renderPass ); - vk::UniqueSemaphore imageAcquiredSemaphore = device->createSemaphoreUnique(vk::SemaphoreCreateInfo()); - vk::ResultValue currentBuffer = device->acquireNextImageKHR(swapChainData.swapChain.get(), vk::su::FenceTimeout, imageAcquiredSemaphore.get(), nullptr); - assert(currentBuffer.result == vk::Result::eSuccess); - assert(currentBuffer.value < framebuffers.size()); + vk::UniqueSemaphore imageAcquiredSemaphore = device->createSemaphoreUnique( vk::SemaphoreCreateInfo() ); + vk::ResultValue currentBuffer = device->acquireNextImageKHR( + swapChainData.swapChain.get(), vk::su::FenceTimeout, imageAcquiredSemaphore.get(), nullptr ); + assert( currentBuffer.result == vk::Result::eSuccess ); + assert( currentBuffer.value < framebuffers.size() ); vk::ClearValue clearValues[2]; - clearValues[0].color = vk::ClearColorValue(std::array({ 0.2f, 0.2f, 0.2f, 0.2f })); - clearValues[1].depthStencil = vk::ClearDepthStencilValue(1.0f, 0); + clearValues[0].color = vk::ClearColorValue( std::array( { 0.2f, 0.2f, 0.2f, 0.2f } ) ); + clearValues[1].depthStencil = vk::ClearDepthStencilValue( 1.0f, 0 ); - vk::RenderPassBeginInfo renderPassBeginInfo(renderPass.get(), framebuffers[currentBuffer.value].get(), vk::Rect2D(vk::Offset2D(0, 0), surfaceData.extent), 2, clearValues); - commandBuffer->beginRenderPass(renderPassBeginInfo, vk::SubpassContents::eInline); - commandBuffer->bindPipeline(vk::PipelineBindPoint::eGraphics, graphicsPipeline.get()); - commandBuffer->bindDescriptorSets(vk::PipelineBindPoint::eGraphics, pipelineLayout.get(), 0, descriptorSet.get(), nullptr); + vk::RenderPassBeginInfo renderPassBeginInfo( renderPass.get(), + framebuffers[currentBuffer.value].get(), + vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ), + 2, + clearValues ); + commandBuffer->beginRenderPass( renderPassBeginInfo, vk::SubpassContents::eInline ); + commandBuffer->bindPipeline( vk::PipelineBindPoint::eGraphics, graphicsPipeline.get() ); + commandBuffer->bindDescriptorSets( + vk::PipelineBindPoint::eGraphics, pipelineLayout.get(), 0, descriptorSet.get(), nullptr ); - commandBuffer->bindVertexBuffers(0, *vertexBufferData.buffer, {0}); - commandBuffer->setViewport(0, vk::Viewport(0.0f, 0.0f, static_cast(surfaceData.extent.width), static_cast(surfaceData.extent.height), 0.0f, 1.0f)); - commandBuffer->setScissor(0, vk::Rect2D(vk::Offset2D(0, 0), surfaceData.extent)); + commandBuffer->bindVertexBuffers( 0, *vertexBufferData.buffer, { 0 } ); + commandBuffer->setViewport( 0, + vk::Viewport( 0.0f, + 0.0f, + static_cast( surfaceData.extent.width ), + static_cast( surfaceData.extent.height ), + 0.0f, + 1.0f ) ); + commandBuffer->setScissor( 0, vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ) ); - commandBuffer->draw(12 * 3, 1, 0, 0); + commandBuffer->draw( 12 * 3, 1, 0, 0 ); commandBuffer->endRenderPass(); commandBuffer->end(); - vk::UniqueFence drawFence = device->createFenceUnique(vk::FenceCreateInfo()); + vk::UniqueFence drawFence = device->createFenceUnique( vk::FenceCreateInfo() ); - vk::PipelineStageFlags waitDestinationStageMask(vk::PipelineStageFlagBits::eColorAttachmentOutput); - vk::SubmitInfo submitInfo(1, &imageAcquiredSemaphore.get(), &waitDestinationStageMask, 1, &commandBuffer.get()); - graphicsQueue.submit(submitInfo, drawFence.get()); + vk::PipelineStageFlags waitDestinationStageMask( vk::PipelineStageFlagBits::eColorAttachmentOutput ); + vk::SubmitInfo submitInfo( 1, &imageAcquiredSemaphore.get(), &waitDestinationStageMask, 1, &commandBuffer.get() ); + graphicsQueue.submit( submitInfo, drawFence.get() ); - while (vk::Result::eTimeout == device->waitForFences(drawFence.get(), VK_TRUE, vk::su::FenceTimeout)) + while ( vk::Result::eTimeout == device->waitForFences( drawFence.get(), VK_TRUE, vk::su::FenceTimeout ) ) ; - presentQueue.presentKHR(vk::PresentInfoKHR(0, nullptr, 1, &swapChainData.swapChain.get(), ¤tBuffer.value)); - std::this_thread::sleep_for(std::chrono::milliseconds(1000)); + presentQueue.presentKHR( + vk::PresentInfoKHR( 0, nullptr, 1, &swapChainData.swapChain.get(), ¤tBuffer.value ) ); + std::this_thread::sleep_for( std::chrono::milliseconds( 1000 ) ); } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (std::runtime_error& err) + catch ( std::runtime_error & err ) { std::cout << "std::runtime_error: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/PushDescriptors/PushDescriptors.cpp b/samples/PushDescriptors/PushDescriptors.cpp index 2d0cc0f..b9a84c2 100644 --- a/samples/PushDescriptors/PushDescriptors.cpp +++ b/samples/PushDescriptors/PushDescriptors.cpp @@ -19,164 +19,219 @@ #include "../utils/math.hpp" #include "../utils/shaders.hpp" #include "../utils/utils.hpp" -#include "vulkan/vulkan.hpp" #include "SPIRV/GlslangToSpv.h" +#include "vulkan/vulkan.hpp" + #include #include -static char const* AppName = "PushDescriptors"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "PushDescriptors"; +static char const * EngineName = "Vulkan.hpp"; -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { -#if (VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1) +#if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) // initialize the DipatchLoaderDynamic to use - static vk::DynamicLoader dl; - PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = dl.getProcAddress("vkGetInstanceProcAddr"); - VULKAN_HPP_DEFAULT_DISPATCHER.init(vkGetInstanceProcAddr); + static vk::DynamicLoader dl; + PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = + dl.getProcAddress( "vkGetInstanceProcAddr" ); + VULKAN_HPP_DEFAULT_DISPATCHER.init( vkGetInstanceProcAddr ); #endif /* VULKAN_KEY_START */ // To use PUSH_DESCRIPTOR, you must also specify GET_PHYSICAL_DEVICE_PROPERTIES_2 std::vector extensionProperties = vk::enumerateInstanceExtensionProperties(); - if (std::find_if(extensionProperties.begin(), extensionProperties.end(), [](vk::ExtensionProperties ep) { return (strcmp(ep.extensionName,VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME) == 0); }) == extensionProperties.end()) + if ( std::find_if( extensionProperties.begin(), extensionProperties.end(), []( vk::ExtensionProperties ep ) { + return ( strcmp( ep.extensionName, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME ) == 0 ); + } ) == extensionProperties.end() ) { std::cout << "No GET_PHYSICAL_DEVICE_PROPERTIES_2 extension" << std::endl; return 0; } std::vector instanceExtensions = vk::su::getInstanceExtensions(); - instanceExtensions.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME); + instanceExtensions.push_back( VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME ); - vk::UniqueInstance instance = vk::su::createInstance(AppName, EngineName, {}, instanceExtensions); -#if !defined(NDEBUG) - vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger(instance); + vk::UniqueInstance instance = vk::su::createInstance( AppName, EngineName, {}, instanceExtensions ); +#if !defined( NDEBUG ) + vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger( instance ); #endif vk::PhysicalDevice physicalDevice = instance->enumeratePhysicalDevices().front(); // Once instance is created, need to make sure the extension is available extensionProperties = physicalDevice.enumerateDeviceExtensionProperties(); - if (std::find_if(extensionProperties.begin(), extensionProperties.end(), [](vk::ExtensionProperties ep) { return (strcmp(ep.extensionName,VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME) == 0); }) == extensionProperties.end()) + if ( std::find_if( extensionProperties.begin(), extensionProperties.end(), []( vk::ExtensionProperties ep ) { + return ( strcmp( ep.extensionName, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME ) == 0 ); + } ) == extensionProperties.end() ) { std::cout << "No extension for push descriptors" << std::endl; return 0; } std::vector deviceExtensions = vk::su::getDeviceExtensions(); - deviceExtensions.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME); + deviceExtensions.push_back( VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME ); - vk::su::SurfaceData surfaceData(instance, AppName, vk::Extent2D(500, 500)); + vk::su::SurfaceData surfaceData( instance, AppName, vk::Extent2D( 500, 500 ) ); - std::pair graphicsAndPresentQueueFamilyIndex = vk::su::findGraphicsAndPresentQueueFamilyIndex(physicalDevice, *surfaceData.surface); - vk::UniqueDevice device = vk::su::createDevice(physicalDevice, graphicsAndPresentQueueFamilyIndex.first, deviceExtensions); + std::pair graphicsAndPresentQueueFamilyIndex = + vk::su::findGraphicsAndPresentQueueFamilyIndex( physicalDevice, *surfaceData.surface ); + vk::UniqueDevice device = + vk::su::createDevice( physicalDevice, graphicsAndPresentQueueFamilyIndex.first, deviceExtensions ); - vk::UniqueCommandPool commandPool = vk::su::createCommandPool(device, graphicsAndPresentQueueFamilyIndex.first); - vk::UniqueCommandBuffer commandBuffer = std::move(device->allocateCommandBuffersUnique(vk::CommandBufferAllocateInfo(commandPool.get(), vk::CommandBufferLevel::ePrimary, 1)).front()); + vk::UniqueCommandPool commandPool = vk::su::createCommandPool( device, graphicsAndPresentQueueFamilyIndex.first ); + vk::UniqueCommandBuffer commandBuffer = std::move( device + ->allocateCommandBuffersUnique( vk::CommandBufferAllocateInfo( + commandPool.get(), vk::CommandBufferLevel::ePrimary, 1 ) ) + .front() ); - vk::Queue graphicsQueue = device->getQueue(graphicsAndPresentQueueFamilyIndex.first, 0); - vk::Queue presentQueue = device->getQueue(graphicsAndPresentQueueFamilyIndex.second, 0); + vk::Queue graphicsQueue = device->getQueue( graphicsAndPresentQueueFamilyIndex.first, 0 ); + vk::Queue presentQueue = device->getQueue( graphicsAndPresentQueueFamilyIndex.second, 0 ); - vk::su::SwapChainData swapChainData(physicalDevice, device, *surfaceData.surface, surfaceData.extent, vk::ImageUsageFlagBits::eColorAttachment | vk::ImageUsageFlagBits::eTransferSrc, - vk::UniqueSwapchainKHR(), graphicsAndPresentQueueFamilyIndex.first, graphicsAndPresentQueueFamilyIndex.second); + vk::su::SwapChainData swapChainData( physicalDevice, + device, + *surfaceData.surface, + surfaceData.extent, + vk::ImageUsageFlagBits::eColorAttachment | + vk::ImageUsageFlagBits::eTransferSrc, + vk::UniqueSwapchainKHR(), + graphicsAndPresentQueueFamilyIndex.first, + graphicsAndPresentQueueFamilyIndex.second ); - vk::su::DepthBufferData depthBufferData(physicalDevice, device, vk::Format::eD16Unorm, surfaceData.extent); + vk::su::DepthBufferData depthBufferData( physicalDevice, device, vk::Format::eD16Unorm, surfaceData.extent ); - vk::su::TextureData textureData(physicalDevice, device); - commandBuffer->begin(vk::CommandBufferBeginInfo()); - textureData.setImage(device, commandBuffer, vk::su::CheckerboardImageGenerator()); + vk::su::TextureData textureData( physicalDevice, device ); + commandBuffer->begin( vk::CommandBufferBeginInfo() ); + textureData.setImage( device, commandBuffer, vk::su::CheckerboardImageGenerator() ); - vk::su::BufferData uniformBufferData(physicalDevice, device, sizeof(glm::mat4x4), vk::BufferUsageFlagBits::eUniformBuffer); - vk::su::copyToDevice(device, uniformBufferData.deviceMemory, vk::su::createModelViewProjectionClipMatrix(surfaceData.extent)); + vk::su::BufferData uniformBufferData( + physicalDevice, device, sizeof( glm::mat4x4 ), vk::BufferUsageFlagBits::eUniformBuffer ); + vk::su::copyToDevice( + device, uniformBufferData.deviceMemory, vk::su::createModelViewProjectionClipMatrix( surfaceData.extent ) ); // Need to specify that descriptor set layout will be for push descriptors - vk::UniqueDescriptorSetLayout descriptorSetLayout = vk::su::createDescriptorSetLayout(device, - { {vk::DescriptorType::eUniformBuffer, 1, vk::ShaderStageFlagBits::eVertex}, {vk::DescriptorType::eCombinedImageSampler, 1, vk::ShaderStageFlagBits::eFragment} }, - vk::DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR); - vk::UniquePipelineLayout pipelineLayout = device->createPipelineLayoutUnique(vk::PipelineLayoutCreateInfo(vk::PipelineLayoutCreateFlags(), 1, &descriptorSetLayout.get())); + vk::UniqueDescriptorSetLayout descriptorSetLayout = vk::su::createDescriptorSetLayout( + device, + { { vk::DescriptorType::eUniformBuffer, 1, vk::ShaderStageFlagBits::eVertex }, + { vk::DescriptorType::eCombinedImageSampler, 1, vk::ShaderStageFlagBits::eFragment } }, + vk::DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR ); + vk::UniquePipelineLayout pipelineLayout = device->createPipelineLayoutUnique( + vk::PipelineLayoutCreateInfo( vk::PipelineLayoutCreateFlags(), 1, &descriptorSetLayout.get() ) ); - vk::UniqueRenderPass renderPass = vk::su::createRenderPass(device, vk::su::pickSurfaceFormat(physicalDevice.getSurfaceFormatsKHR(surfaceData.surface.get())).format, depthBufferData.format); + vk::UniqueRenderPass renderPass = vk::su::createRenderPass( + device, + vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( surfaceData.surface.get() ) ).format, + depthBufferData.format ); glslang::InitializeProcess(); - vk::UniqueShaderModule vertexShaderModule = vk::su::createShaderModule(device, vk::ShaderStageFlagBits::eVertex, vertexShaderText_PT_T); - vk::UniqueShaderModule fragmentShaderModule = vk::su::createShaderModule(device, vk::ShaderStageFlagBits::eFragment, fragmentShaderText_T_C); + vk::UniqueShaderModule vertexShaderModule = + vk::su::createShaderModule( device, vk::ShaderStageFlagBits::eVertex, vertexShaderText_PT_T ); + vk::UniqueShaderModule fragmentShaderModule = + vk::su::createShaderModule( device, vk::ShaderStageFlagBits::eFragment, fragmentShaderText_T_C ); glslang::FinalizeProcess(); - std::vector framebuffers = vk::su::createFramebuffers(device, renderPass, swapChainData.imageViews, depthBufferData.imageView, surfaceData.extent); + std::vector framebuffers = vk::su::createFramebuffers( + device, renderPass, swapChainData.imageViews, depthBufferData.imageView, surfaceData.extent ); - vk::su::BufferData vertexBufferData(physicalDevice, device, sizeof(texturedCubeData), vk::BufferUsageFlagBits::eVertexBuffer); - vk::su::copyToDevice(device, vertexBufferData.deviceMemory, texturedCubeData, sizeof(texturedCubeData) / sizeof(texturedCubeData[0])); + vk::su::BufferData vertexBufferData( + physicalDevice, device, sizeof( texturedCubeData ), vk::BufferUsageFlagBits::eVertexBuffer ); + vk::su::copyToDevice( device, + vertexBufferData.deviceMemory, + texturedCubeData, + sizeof( texturedCubeData ) / sizeof( texturedCubeData[0] ) ); - vk::UniquePipelineCache pipelineCache = device->createPipelineCacheUnique(vk::PipelineCacheCreateInfo()); - vk::UniquePipeline graphicsPipeline = vk::su::createGraphicsPipeline(device, pipelineCache, std::make_pair(*vertexShaderModule, nullptr), std::make_pair(*fragmentShaderModule, nullptr), - sizeof(texturedCubeData[0]), { { vk::Format::eR32G32B32A32Sfloat, 0 }, { vk::Format::eR32G32Sfloat, 16 } }, - vk::FrontFace::eClockwise, true, pipelineLayout, renderPass); + vk::UniquePipelineCache pipelineCache = device->createPipelineCacheUnique( vk::PipelineCacheCreateInfo() ); + vk::UniquePipeline graphicsPipeline = + vk::su::createGraphicsPipeline( device, + pipelineCache, + std::make_pair( *vertexShaderModule, nullptr ), + std::make_pair( *fragmentShaderModule, nullptr ), + sizeof( texturedCubeData[0] ), + { { vk::Format::eR32G32B32A32Sfloat, 0 }, { vk::Format::eR32G32Sfloat, 16 } }, + vk::FrontFace::eClockwise, + true, + pipelineLayout, + renderPass ); // Get the index of the next available swapchain image: - vk::UniqueSemaphore imageAcquiredSemaphore = device->createSemaphoreUnique(vk::SemaphoreCreateInfo()); - vk::ResultValue currentBuffer = device->acquireNextImageKHR(swapChainData.swapChain.get(), vk::su::FenceTimeout, imageAcquiredSemaphore.get(), nullptr); - assert(currentBuffer.result == vk::Result::eSuccess); - assert(currentBuffer.value < framebuffers.size()); + vk::UniqueSemaphore imageAcquiredSemaphore = device->createSemaphoreUnique( vk::SemaphoreCreateInfo() ); + vk::ResultValue currentBuffer = device->acquireNextImageKHR( + swapChainData.swapChain.get(), vk::su::FenceTimeout, imageAcquiredSemaphore.get(), nullptr ); + assert( currentBuffer.result == vk::Result::eSuccess ); + assert( currentBuffer.value < framebuffers.size() ); vk::ClearValue clearValues[2]; - clearValues[0].color = vk::ClearColorValue(std::array({ 0.2f, 0.2f, 0.2f, 0.2f })); - clearValues[1].depthStencil = vk::ClearDepthStencilValue(1.0f, 0); - vk::RenderPassBeginInfo renderPassBeginInfo(renderPass.get(), framebuffers[currentBuffer.value].get(), vk::Rect2D(vk::Offset2D(0, 0), surfaceData.extent), 2, clearValues); - commandBuffer->beginRenderPass(renderPassBeginInfo, vk::SubpassContents::eInline); - commandBuffer->bindPipeline(vk::PipelineBindPoint::eGraphics, graphicsPipeline.get()); + clearValues[0].color = vk::ClearColorValue( std::array( { 0.2f, 0.2f, 0.2f, 0.2f } ) ); + clearValues[1].depthStencil = vk::ClearDepthStencilValue( 1.0f, 0 ); + vk::RenderPassBeginInfo renderPassBeginInfo( renderPass.get(), + framebuffers[currentBuffer.value].get(), + vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ), + 2, + clearValues ); + commandBuffer->beginRenderPass( renderPassBeginInfo, vk::SubpassContents::eInline ); + commandBuffer->bindPipeline( vk::PipelineBindPoint::eGraphics, graphicsPipeline.get() ); - vk::DescriptorBufferInfo bufferInfo(uniformBufferData.buffer.get(), 0, sizeof(glm::mat4x4)); - vk::DescriptorImageInfo imageInfo(textureData.textureSampler.get(), textureData.imageData->imageView.get(), vk::ImageLayout::eShaderReadOnlyOptimal); - vk::WriteDescriptorSet writeDescriptorSets[2] = - { - vk::WriteDescriptorSet({}, 0, 0, 1, vk::DescriptorType::eUniformBuffer, nullptr, &bufferInfo), - vk::WriteDescriptorSet({}, 1, 0, 1, vk::DescriptorType::eCombinedImageSampler, &imageInfo) + vk::DescriptorBufferInfo bufferInfo( uniformBufferData.buffer.get(), 0, sizeof( glm::mat4x4 ) ); + vk::DescriptorImageInfo imageInfo( textureData.textureSampler.get(), + textureData.imageData->imageView.get(), + vk::ImageLayout::eShaderReadOnlyOptimal ); + vk::WriteDescriptorSet writeDescriptorSets[2] = { + vk::WriteDescriptorSet( {}, 0, 0, 1, vk::DescriptorType::eUniformBuffer, nullptr, &bufferInfo ), + vk::WriteDescriptorSet( {}, 1, 0, 1, vk::DescriptorType::eCombinedImageSampler, &imageInfo ) }; // this call is from an extension and needs the dynamic dispatcher !! - commandBuffer->pushDescriptorSetKHR(vk::PipelineBindPoint::eGraphics, *pipelineLayout, 0, {2, writeDescriptorSets}); + commandBuffer->pushDescriptorSetKHR( + vk::PipelineBindPoint::eGraphics, *pipelineLayout, 0, { 2, writeDescriptorSets } ); - commandBuffer->bindVertexBuffers(0, *vertexBufferData.buffer, {0}); - commandBuffer->setViewport(0, vk::Viewport(0.0f, 0.0f, static_cast(surfaceData.extent.width), static_cast(surfaceData.extent.height), 0.0f, 1.0f)); - commandBuffer->setScissor(0, vk::Rect2D(vk::Offset2D(0, 0), surfaceData.extent)); + commandBuffer->bindVertexBuffers( 0, *vertexBufferData.buffer, { 0 } ); + commandBuffer->setViewport( 0, + vk::Viewport( 0.0f, + 0.0f, + static_cast( surfaceData.extent.width ), + static_cast( surfaceData.extent.height ), + 0.0f, + 1.0f ) ); + commandBuffer->setScissor( 0, vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ) ); - commandBuffer->draw(12 * 3, 1, 0, 0); + commandBuffer->draw( 12 * 3, 1, 0, 0 ); commandBuffer->endRenderPass(); commandBuffer->end(); - vk::UniqueFence drawFence = device->createFenceUnique(vk::FenceCreateInfo()); + vk::UniqueFence drawFence = device->createFenceUnique( vk::FenceCreateInfo() ); - vk::PipelineStageFlags waitDestinationStageMask(vk::PipelineStageFlagBits::eColorAttachmentOutput); - vk::SubmitInfo submitInfo(1, &imageAcquiredSemaphore.get(), &waitDestinationStageMask, 1, &commandBuffer.get()); - graphicsQueue.submit(submitInfo, drawFence.get()); + vk::PipelineStageFlags waitDestinationStageMask( vk::PipelineStageFlagBits::eColorAttachmentOutput ); + vk::SubmitInfo submitInfo( 1, &imageAcquiredSemaphore.get(), &waitDestinationStageMask, 1, &commandBuffer.get() ); + graphicsQueue.submit( submitInfo, drawFence.get() ); - while (vk::Result::eTimeout == device->waitForFences(drawFence.get(), VK_TRUE, vk::su::FenceTimeout)) + while ( vk::Result::eTimeout == device->waitForFences( drawFence.get(), VK_TRUE, vk::su::FenceTimeout ) ) ; - presentQueue.presentKHR(vk::PresentInfoKHR(0, nullptr, 1, &swapChainData.swapChain.get(), ¤tBuffer.value)); - std::this_thread::sleep_for(std::chrono::milliseconds(1000)); + presentQueue.presentKHR( + vk::PresentInfoKHR( 0, nullptr, 1, &swapChainData.swapChain.get(), ¤tBuffer.value ) ); + std::this_thread::sleep_for( std::chrono::milliseconds( 1000 ) ); /* VULKAN_KEY_END */ device->waitIdle(); } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (std::runtime_error& err) + catch ( std::runtime_error & err ) { std::cout << "std::runtime_error: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/RayTracing/CameraManipulator.cpp b/samples/RayTracing/CameraManipulator.cpp index 8fa3bc9..9a9952b 100644 --- a/samples/RayTracing/CameraManipulator.cpp +++ b/samples/RayTracing/CameraManipulator.cpp @@ -28,18 +28,18 @@ namespace vk // MATH functions // template - bool isZero(const T& _a) + bool isZero( const T & _a ) { - return fabs(_a) < std::numeric_limits::epsilon(); + return fabs( _a ) < std::numeric_limits::epsilon(); } template - bool isOne(const T& _a) + bool isOne( const T & _a ) { - return areEqual(_a, (T)1); + return areEqual( _a, (T)1 ); } - inline float sign(float s) + inline float sign( float s ) { - return (s < 0.f) ? -1.f : 1.f; + return ( s < 0.f ) ? -1.f : 1.f; } CameraManipulator::CameraManipulator() @@ -47,17 +47,17 @@ namespace vk update(); } - glm::vec3 const& CameraManipulator::getCameraPosition() const + glm::vec3 const & CameraManipulator::getCameraPosition() const { return m_cameraPosition; } - glm::vec3 const& CameraManipulator::getCenterPosition() const + glm::vec3 const & CameraManipulator::getCenterPosition() const { return m_centerPosition; } - glm::mat4 const& CameraManipulator::getMatrix() const + glm::mat4 const & CameraManipulator::getMatrix() const { return m_matrix; } @@ -67,7 +67,7 @@ namespace vk return m_mode; } - glm::ivec2 const& CameraManipulator::getMousePosition() const + glm::ivec2 const & CameraManipulator::getMousePosition() const { return m_mousePosition; } @@ -82,31 +82,33 @@ namespace vk return m_speed; } - glm::vec3 const& CameraManipulator::getUpVector() const + glm::vec3 const & CameraManipulator::getUpVector() const { return m_upVector; } - glm::u32vec2 const& CameraManipulator::getWindowSize() const + glm::u32vec2 const & CameraManipulator::getWindowSize() const { return m_windowSize; } - CameraManipulator::Action CameraManipulator::mouseMove(glm::ivec2 const& position, MouseButton mouseButton, ModifierFlags & modifiers) + CameraManipulator::Action + CameraManipulator::mouseMove( glm::ivec2 const & position, MouseButton mouseButton, ModifierFlags & modifiers ) { Action curAction = Action::None; - switch (mouseButton) + switch ( mouseButton ) { case MouseButton::Left: - if (((modifiers & ModifierFlagBits::Ctrl) && (modifiers & ModifierFlagBits::Shift)) || (modifiers & ModifierFlagBits::Alt)) + if ( ( ( modifiers & ModifierFlagBits::Ctrl ) && ( modifiers & ModifierFlagBits::Shift ) ) || + ( modifiers & ModifierFlagBits::Alt ) ) { curAction = m_mode == Mode::Examine ? Action::LookAround : Action::Orbit; } - else if (modifiers & ModifierFlagBits::Shift) + else if ( modifiers & ModifierFlagBits::Shift ) { curAction = Action::Dolly; } - else if (modifiers & ModifierFlagBits::Ctrl) + else if ( modifiers & ModifierFlagBits::Ctrl ) { curAction = Action::Pan; } @@ -115,89 +117,86 @@ namespace vk curAction = m_mode == Mode::Examine ? Action::Orbit : Action::LookAround; } break; - case MouseButton::Middle: - curAction = Action::Pan; - break; - case MouseButton::Right: - curAction = Action::Dolly; - break; - default: - assert(false); + case MouseButton::Middle: curAction = Action::Pan; break; + case MouseButton::Right: curAction = Action::Dolly; break; + default: assert( false ); } - assert(curAction != Action::None); - motion(position, curAction); + assert( curAction != Action::None ); + motion( position, curAction ); return curAction; } - void CameraManipulator::setLookat(const glm::vec3& cameraPosition, const glm::vec3& centerPosition, const glm::vec3& upVector) + void CameraManipulator::setLookat( const glm::vec3 & cameraPosition, + const glm::vec3 & centerPosition, + const glm::vec3 & upVector ) { - m_cameraPosition = cameraPosition; - m_centerPosition = centerPosition; - m_upVector = upVector; + m_cameraPosition = cameraPosition; + m_centerPosition = centerPosition; + m_upVector = upVector; update(); } - void CameraManipulator::setMode(Mode mode) + void CameraManipulator::setMode( Mode mode ) { m_mode = mode; } - void CameraManipulator::setMousePosition(glm::ivec2 const& position) + void CameraManipulator::setMousePosition( glm::ivec2 const & position ) { m_mousePosition = position; } - void CameraManipulator::setRoll(float roll) + void CameraManipulator::setRoll( float roll ) { m_roll = roll; update(); } - void CameraManipulator::setSpeed(float speed) + void CameraManipulator::setSpeed( float speed ) { m_speed = speed; } - void CameraManipulator::setWindowSize(glm::ivec2 const& size) + void CameraManipulator::setWindowSize( glm::ivec2 const & size ) { m_windowSize = size; } - void CameraManipulator::wheel(int value) + void CameraManipulator::wheel( int value ) { - float fValue = static_cast(value); - float dx = (fValue * abs(fValue)) / static_cast(m_windowSize[0]); + float fValue = static_cast( value ); + float dx = ( fValue * abs( fValue ) ) / static_cast( m_windowSize[0] ); - glm::vec3 z = m_cameraPosition - m_centerPosition; - float length = z.length() * 0.1f; - length = length < 0.001f ? 0.001f : length; + glm::vec3 z = m_cameraPosition - m_centerPosition; + float length = z.length() * 0.1f; + length = length < 0.001f ? 0.001f : length; dx *= m_speed; - dolly(glm::vec2(dx, dx)); + dolly( glm::vec2( dx, dx ) ); update(); } - void CameraManipulator::dolly(glm::vec2 const& delta) + void CameraManipulator::dolly( glm::vec2 const & delta ) { glm::vec3 z = m_centerPosition - m_cameraPosition; - float length = glm::length(z); + float length = glm::length( z ); // We are at the point of interest, and don't know any direction, so do nothing! - if(isZero(length)) + if ( isZero( length ) ) { return; } // Use the larger movement. float dd; - if(m_mode != Mode::Examine) + if ( m_mode != Mode::Examine ) { dd = -delta[1]; } else { - dd = fabs(delta[0]) > fabs(delta[1]) ? delta[0] : -delta[1]; + dd = fabs( delta[0] ) > fabs( delta[1] ) ? delta[0] : -delta[1]; } float factor = m_speed * dd / length; @@ -208,7 +207,7 @@ namespace vk factor *= length; // Don't move to or through the point of interest. - if (1.0f <= factor) + if ( 1.0f <= factor ) { return; } @@ -216,9 +215,9 @@ namespace vk z *= factor; // Not going up - if(m_mode == Mode::Walk) + if ( m_mode == Mode::Walk ) { - if(m_upVector.y > m_upVector.z) + if ( m_upVector.y > m_upVector.z ) { z.y = 0; } @@ -231,42 +230,39 @@ namespace vk m_cameraPosition += z; // In fly mode, the interest moves with us. - if(m_mode != Mode::Examine) + if ( m_mode != Mode::Examine ) { m_centerPosition += z; } } - void CameraManipulator::motion(glm::ivec2 const& position, Action action) + void CameraManipulator::motion( glm::ivec2 const & position, Action action ) { - glm::vec2 delta(float(position[0] - m_mousePosition[0]) / float(m_windowSize[0]), float(position[1] - m_mousePosition[1]) / float(m_windowSize[1])); + glm::vec2 delta( float( position[0] - m_mousePosition[0] ) / float( m_windowSize[0] ), + float( position[1] - m_mousePosition[1] ) / float( m_windowSize[1] ) ); - switch(action) + switch ( action ) { case Action::Orbit: - if(m_mode == Mode::Trackball) + if ( m_mode == Mode::Trackball ) { - orbit(delta, true); // trackball(position); + orbit( delta, true ); // trackball(position); } else { - orbit(delta, false); + orbit( delta, false ); } break; - case Action::Dolly: - dolly(delta); - break; - case Action::Pan: - pan(delta); - break; + case Action::Dolly: dolly( delta ); break; + case Action::Pan: pan( delta ); break; case Action::LookAround: - if(m_mode == Mode::Trackball) + if ( m_mode == Mode::Trackball ) { - trackball(position); + trackball( position ); } else { - orbit(glm::vec2(delta[0], -delta[1]), true); + orbit( glm::vec2( delta[0], -delta[1] ), true ); } break; } @@ -276,43 +272,43 @@ namespace vk m_mousePosition = position; } - void CameraManipulator::orbit(glm::vec2 const& delta, bool invert) + void CameraManipulator::orbit( glm::vec2 const & delta, bool invert ) { - if(isZero(delta[0]) && isZero(delta[1])) + if ( isZero( delta[0] ) && isZero( delta[1] ) ) { return; } // Full width will do a full turn - float dx = delta[0] * float(glm::two_pi()); - float dy = delta[1] * float(glm::two_pi()); + float dx = delta[0] * float( glm::two_pi() ); + float dy = delta[1] * float( glm::two_pi() ); // Get the camera - glm::vec3 origin(invert ? m_cameraPosition : m_centerPosition); - glm::vec3 position(invert ? m_centerPosition : m_cameraPosition); + glm::vec3 origin( invert ? m_cameraPosition : m_centerPosition ); + glm::vec3 position( invert ? m_centerPosition : m_cameraPosition ); // Get the length of sight - glm::vec3 centerToEye(position - origin); - float radius = glm::length(centerToEye); - centerToEye = glm::normalize(centerToEye); + glm::vec3 centerToEye( position - origin ); + float radius = glm::length( centerToEye ); + centerToEye = glm::normalize( centerToEye ); // Find the rotation around the UP axis (Y) - glm::vec3 zAxis(centerToEye); - glm::mat4 yRotation = glm::rotate(-dx, m_upVector); + glm::vec3 zAxis( centerToEye ); + glm::mat4 yRotation = glm::rotate( -dx, m_upVector ); // Apply the (Y) rotation to the eye-center vector - glm::vec4 tmpVector = yRotation * glm::vec4(centerToEye.x, centerToEye.y, centerToEye.z, 0.0f); - centerToEye = glm::vec3(tmpVector.x, tmpVector.y, tmpVector.z); + glm::vec4 tmpVector = yRotation * glm::vec4( centerToEye.x, centerToEye.y, centerToEye.z, 0.0f ); + centerToEye = glm::vec3( tmpVector.x, tmpVector.y, tmpVector.z ); // Find the rotation around the X vector: cross between eye-center and up (X) - glm::vec3 xAxis = glm::cross(m_upVector, zAxis); - xAxis = glm::normalize(xAxis); - glm::mat4 xRotation = glm::rotate(-dy, xAxis); + glm::vec3 xAxis = glm::cross( m_upVector, zAxis ); + xAxis = glm::normalize( xAxis ); + glm::mat4 xRotation = glm::rotate( -dy, xAxis ); // Apply the (X) rotation to the eye-center vector - tmpVector = xRotation * glm::vec4(centerToEye.x, centerToEye.y, centerToEye.z, 0); - glm::vec3 rotatedVector(tmpVector.x, tmpVector.y, tmpVector.z); - if(sign(rotatedVector.x) == sign(centerToEye.x)) + tmpVector = xRotation * glm::vec4( centerToEye.x, centerToEye.y, centerToEye.z, 0 ); + glm::vec3 rotatedVector( tmpVector.x, tmpVector.y, tmpVector.z ); + if ( sign( rotatedVector.x ) == sign( centerToEye.x ) ) { centerToEye = rotatedVector; } @@ -323,7 +319,7 @@ namespace vk // Finding the new position glm::vec3 newPosition = centerToEye + origin; - if(!invert) + if ( !invert ) { m_cameraPosition = newPosition; // Normal: change the position of the camera } @@ -333,17 +329,17 @@ namespace vk } } - void CameraManipulator::pan(glm::vec2 const& delta) + void CameraManipulator::pan( glm::vec2 const & delta ) { - glm::vec3 z(m_cameraPosition - m_centerPosition); - float length = static_cast(glm::length(z)) / 0.785f; // 45 degrees - z = glm::normalize(z); - glm::vec3 x = glm::normalize(glm::cross(m_upVector, z)); - glm::vec3 y = glm::normalize(glm::cross(z, x)); + glm::vec3 z( m_cameraPosition - m_centerPosition ); + float length = static_cast( glm::length( z ) ) / 0.785f; // 45 degrees + z = glm::normalize( z ); + glm::vec3 x = glm::normalize( glm::cross( m_upVector, z ) ); + glm::vec3 y = glm::normalize( glm::cross( z, x ) ); x *= -delta[0] * length; y *= delta[1] * length; - if(m_mode == Mode::Fly) + if ( m_mode == Mode::Fly ) { x = -x; y = -y; @@ -353,14 +349,14 @@ namespace vk m_centerPosition += x + y; } - double CameraManipulator::projectOntoTBSphere(const glm::vec2& p) + double CameraManipulator::projectOntoTBSphere( const glm::vec2 & p ) { double z; - double d = length(p); - if(d < trackballSize * 0.70710678118654752440) + double d = length( p ); + if ( d < trackballSize * 0.70710678118654752440 ) { // inside sphere - z = sqrt(trackballSize * trackballSize - d * d); + z = sqrt( trackballSize * trackballSize - d * d ); } else { @@ -372,57 +368,58 @@ namespace vk return z; } - void CameraManipulator::trackball(glm::ivec2 const& position) + void CameraManipulator::trackball( glm::ivec2 const & position ) { - glm::vec2 p0(2 * (m_mousePosition[0] - m_windowSize[0] / 2) / double(m_windowSize[0]), - 2 * (m_windowSize[1] / 2 - m_mousePosition[1]) / double(m_windowSize[1])); - glm::vec2 p1(2 * (position[0] - m_windowSize[0] / 2) / double(m_windowSize[0]), 2 * (m_windowSize[1] / 2 - position[1]) / double(m_windowSize[1])); + glm::vec2 p0( 2 * ( m_mousePosition[0] - m_windowSize[0] / 2 ) / double( m_windowSize[0] ), + 2 * ( m_windowSize[1] / 2 - m_mousePosition[1] ) / double( m_windowSize[1] ) ); + glm::vec2 p1( 2 * ( position[0] - m_windowSize[0] / 2 ) / double( m_windowSize[0] ), + 2 * ( m_windowSize[1] / 2 - position[1] ) / double( m_windowSize[1] ) ); // determine the z coordinate on the sphere - glm::vec3 pTB0(p0[0], p0[1], projectOntoTBSphere(p0)); - glm::vec3 pTB1(p1[0], p1[1], projectOntoTBSphere(p1)); + glm::vec3 pTB0( p0[0], p0[1], projectOntoTBSphere( p0 ) ); + glm::vec3 pTB1( p1[0], p1[1], projectOntoTBSphere( p1 ) ); // calculate the rotation axis via cross product between p0 and p1 - glm::vec3 axis = glm::cross(pTB0, pTB1); - axis = glm::normalize(axis); + glm::vec3 axis = glm::cross( pTB0, pTB1 ); + axis = glm::normalize( axis ); // calculate the angle - float t = glm::length(pTB0 - pTB1) / (2.f * trackballSize); + float t = glm::length( pTB0 - pTB1 ) / ( 2.f * trackballSize ); // clamp between -1 and 1 - if(t > 1.0f) + if ( t > 1.0f ) { t = 1.0f; } - else if(t < -1.0f) + else if ( t < -1.0f ) { t = -1.0f; } - float rad = 2.0f * asin(t); + float rad = 2.0f * asin( t ); { - glm::vec4 rot_axis = m_matrix * glm::vec4(axis, 0); - glm::mat4 rot_mat = glm::rotate(rad, glm::vec3(rot_axis.x, rot_axis.y, rot_axis.z)); + glm::vec4 rot_axis = m_matrix * glm::vec4( axis, 0 ); + glm::mat4 rot_mat = glm::rotate( rad, glm::vec3( rot_axis.x, rot_axis.y, rot_axis.z ) ); - glm::vec3 pnt = m_cameraPosition - m_centerPosition; - glm::vec4 pnt2 = rot_mat * glm::vec4(pnt.x, pnt.y, pnt.z, 1); - m_cameraPosition = m_centerPosition + glm::vec3(pnt2.x, pnt2.y, pnt2.z); - glm::vec4 up2 = rot_mat * glm::vec4(m_upVector.x, m_upVector.y, m_upVector.z, 0); - m_upVector = glm::vec3(up2.x, up2.y, up2.z); + glm::vec3 pnt = m_cameraPosition - m_centerPosition; + glm::vec4 pnt2 = rot_mat * glm::vec4( pnt.x, pnt.y, pnt.z, 1 ); + m_cameraPosition = m_centerPosition + glm::vec3( pnt2.x, pnt2.y, pnt2.z ); + glm::vec4 up2 = rot_mat * glm::vec4( m_upVector.x, m_upVector.y, m_upVector.z, 0 ); + m_upVector = glm::vec3( up2.x, up2.y, up2.z ); } } void CameraManipulator::update() { - m_matrix = glm::lookAt(m_cameraPosition, m_centerPosition, m_upVector); + m_matrix = glm::lookAt( m_cameraPosition, m_centerPosition, m_upVector ); - if(!isZero(m_roll)) + if ( !isZero( m_roll ) ) { - glm::mat4 rot = glm::rotate(m_roll, glm::vec3(0, 0, 1)); + glm::mat4 rot = glm::rotate( m_roll, glm::vec3( 0, 0, 1 ) ); m_matrix = m_matrix * rot; } } - } // namespace su + } // namespace su } // namespace vk diff --git a/samples/RayTracing/RayTracing.cpp b/samples/RayTracing/RayTracing.cpp index 81fb78c..be51e3a 100644 --- a/samples/RayTracing/RayTracing.cpp +++ b/samples/RayTracing/RayTracing.cpp @@ -15,50 +15,55 @@ // VulkanHpp Samples : RayTracing // Simple sample how to ray trace using Vulkan +#include +#include #include #include #include -#include -#include - #define GLM_FORCE_DEPTH_ZERO_TO_ONE #define GLM_FORCE_RADIANS #define GLM_ENABLE_EXPERIMENTAL -#pragma warning(disable:4201) // disable warning C4201: nonstandard extension used: nameless struct/union; needed to get glm/detail/type_vec?.hpp without warnings +#pragma warning( disable : 4201 ) // disable warning C4201: nonstandard extension used: nameless struct/union; needed + // to get glm/detail/type_vec?.hpp without warnings +#include "../utils/shaders.hpp" +#include "../utils/utils.hpp" +#include "CameraManipulator.hpp" +#include "SPIRV/GlslangToSpv.h" + #include #include #include -#include "CameraManipulator.hpp" -#include "../utils/shaders.hpp" -#include "../utils/utils.hpp" -#include "SPIRV/GlslangToSpv.h" - -static char const* AppName = "RayTracing"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "RayTracing"; +static char const * EngineName = "Vulkan.hpp"; struct GeometryInstanceData { - GeometryInstanceData(glm::mat4x4 const& transform_, uint32_t instanceID_, uint8_t mask_, uint32_t instanceOffset_, uint8_t flags_, uint64_t accelerationStructureHandle_) - : instanceId(instanceID_) - , mask(mask_) - , instanceOffset(instanceOffset_) - , flags(flags_) - , accelerationStructureHandle(accelerationStructureHandle_) + GeometryInstanceData( glm::mat4x4 const & transform_, + uint32_t instanceID_, + uint8_t mask_, + uint32_t instanceOffset_, + uint8_t flags_, + uint64_t accelerationStructureHandle_ ) + : instanceId( instanceID_ ), + mask( mask_ ), + instanceOffset( instanceOffset_ ), + flags( flags_ ), + accelerationStructureHandle( accelerationStructureHandle_ ) { - assert(!(instanceID_ & 0xFF000000) && !(instanceOffset_ & 0xFF000000)); - memcpy(transform, &transform_, 12 * sizeof(float)); + assert( !( instanceID_ & 0xFF000000 ) && !( instanceOffset_ & 0xFF000000 ) ); + memcpy( transform, &transform_, 12 * sizeof( float ) ); } - float transform[12]; // Transform matrix, containing only the top 3 rows - uint32_t instanceId : 24; // Instance index - uint32_t mask : 8; // Visibility mask - uint32_t instanceOffset : 24; // Index of the hit group which will be invoked when a ray hits the instance - uint32_t flags : 8; // Instance flags, such as culling - uint64_t accelerationStructureHandle; // Opaque handle of the bottom-level acceleration structure + float transform[12]; // Transform matrix, containing only the top 3 rows + uint32_t instanceId : 24; // Instance index + uint32_t mask : 8; // Visibility mask + uint32_t instanceOffset : 24; // Index of the hit group which will be invoked when a ray hits the instance + uint32_t flags : 8; // Instance flags, such as culling + uint64_t accelerationStructureHandle; // Opaque handle of the bottom-level acceleration structure }; -static_assert(sizeof(GeometryInstanceData) == 64, "GeometryInstanceData structure compiles to incorrect size"); +static_assert( sizeof( GeometryInstanceData ) == 64, "GeometryInstanceData structure compiles to incorrect size" ); struct AccelerationStructureData { @@ -68,69 +73,112 @@ struct AccelerationStructureData std::unique_ptr instanceBufferData; }; -AccelerationStructureData createAccelerationStructureData(vk::PhysicalDevice const& physicalDevice, vk::UniqueDevice const& device, vk::UniqueCommandBuffer const& commandBuffer, - std::vector> const& instances, std::vector const& geometries) +AccelerationStructureData + createAccelerationStructureData( vk::PhysicalDevice const & physicalDevice, + vk::UniqueDevice const & device, + vk::UniqueCommandBuffer const & commandBuffer, + std::vector> const & instances, + std::vector const & geometries ) { - assert(instances.empty() ^ geometries.empty()); + assert( instances.empty() ^ geometries.empty() ); AccelerationStructureData accelerationStructureData; - vk::AccelerationStructureTypeNV accelerationStructureType = instances.empty() ? vk::AccelerationStructureTypeNV::eBottomLevel : vk::AccelerationStructureTypeNV::eTopLevel; - vk::AccelerationStructureInfoNV accelerationStructureInfo(accelerationStructureType, {}, vk::su::checked_cast(instances.size()), - vk::su::checked_cast(geometries.size()), geometries.data()); - accelerationStructureData.acclerationStructure = device->createAccelerationStructureNVUnique(vk::AccelerationStructureCreateInfoNV(0, accelerationStructureInfo)); + vk::AccelerationStructureTypeNV accelerationStructureType = + instances.empty() ? vk::AccelerationStructureTypeNV::eBottomLevel : vk::AccelerationStructureTypeNV::eTopLevel; + vk::AccelerationStructureInfoNV accelerationStructureInfo( accelerationStructureType, + {}, + vk::su::checked_cast( instances.size() ), + vk::su::checked_cast( geometries.size() ), + geometries.data() ); + accelerationStructureData.acclerationStructure = device->createAccelerationStructureNVUnique( + vk::AccelerationStructureCreateInfoNV( 0, accelerationStructureInfo ) ); - vk::AccelerationStructureMemoryRequirementsInfoNV objectRequirements(vk::AccelerationStructureMemoryRequirementsTypeNV::eObject, *accelerationStructureData.acclerationStructure); - vk::DeviceSize resultSizeInBytes = device->getAccelerationStructureMemoryRequirementsNV(objectRequirements).memoryRequirements.size; - assert(0 < resultSizeInBytes); - accelerationStructureData.resultBufferData = std::unique_ptr(new vk::su::BufferData(physicalDevice, device, resultSizeInBytes, vk::BufferUsageFlagBits::eRayTracingNV, - vk::MemoryPropertyFlagBits::eDeviceLocal)); + vk::AccelerationStructureMemoryRequirementsInfoNV objectRequirements( + vk::AccelerationStructureMemoryRequirementsTypeNV::eObject, *accelerationStructureData.acclerationStructure ); + vk::DeviceSize resultSizeInBytes = + device->getAccelerationStructureMemoryRequirementsNV( objectRequirements ).memoryRequirements.size; + assert( 0 < resultSizeInBytes ); + accelerationStructureData.resultBufferData = + std::unique_ptr( new vk::su::BufferData( physicalDevice, + device, + resultSizeInBytes, + vk::BufferUsageFlagBits::eRayTracingNV, + vk::MemoryPropertyFlagBits::eDeviceLocal ) ); - vk::AccelerationStructureMemoryRequirementsInfoNV buildScratchRequirements(vk::AccelerationStructureMemoryRequirementsTypeNV::eBuildScratch, - *accelerationStructureData.acclerationStructure); - vk::AccelerationStructureMemoryRequirementsInfoNV updateScratchRequirements(vk::AccelerationStructureMemoryRequirementsTypeNV::eUpdateScratch, - *accelerationStructureData.acclerationStructure); - vk::DeviceSize scratchSizeInBytes = - std::max(device->getAccelerationStructureMemoryRequirementsNV(buildScratchRequirements).memoryRequirements.size, - device->getAccelerationStructureMemoryRequirementsNV(updateScratchRequirements).memoryRequirements.size); - assert(0 < scratchSizeInBytes); + vk::AccelerationStructureMemoryRequirementsInfoNV buildScratchRequirements( + vk::AccelerationStructureMemoryRequirementsTypeNV::eBuildScratch, *accelerationStructureData.acclerationStructure ); + vk::AccelerationStructureMemoryRequirementsInfoNV updateScratchRequirements( + vk::AccelerationStructureMemoryRequirementsTypeNV::eUpdateScratch, + *accelerationStructureData.acclerationStructure ); + vk::DeviceSize scratchSizeInBytes = std::max( + device->getAccelerationStructureMemoryRequirementsNV( buildScratchRequirements ).memoryRequirements.size, + device->getAccelerationStructureMemoryRequirementsNV( updateScratchRequirements ).memoryRequirements.size ); + assert( 0 < scratchSizeInBytes ); - accelerationStructureData.scratchBufferData = std::unique_ptr(new vk::su::BufferData(physicalDevice, device, scratchSizeInBytes, vk::BufferUsageFlagBits::eRayTracingNV, - vk::MemoryPropertyFlagBits::eDeviceLocal)); + accelerationStructureData.scratchBufferData = + std::unique_ptr( new vk::su::BufferData( physicalDevice, + device, + scratchSizeInBytes, + vk::BufferUsageFlagBits::eRayTracingNV, + vk::MemoryPropertyFlagBits::eDeviceLocal ) ); - if (!instances.empty()) + if ( !instances.empty() ) { - accelerationStructureData.instanceBufferData = std::unique_ptr(new vk::su::BufferData(physicalDevice, device, instances.size() * sizeof(GeometryInstanceData), - vk::BufferUsageFlagBits::eRayTracingNV)); + accelerationStructureData.instanceBufferData = + std::unique_ptr( new vk::su::BufferData( physicalDevice, + device, + instances.size() * sizeof( GeometryInstanceData ), + vk::BufferUsageFlagBits::eRayTracingNV ) ); std::vector geometryInstanceData; - for(size_t i = 0; i < instances.size(); i++) + for ( size_t i = 0; i < instances.size(); i++ ) { uint64_t accelerationStructureHandle = 0; - device->getAccelerationStructureHandleNV(instances[i].first, sizeof(uint64_t), &accelerationStructureHandle); + device->getAccelerationStructureHandleNV( instances[i].first, sizeof( uint64_t ), &accelerationStructureHandle ); // For each instance we set its instance index to its index i in the instance vector, and set // its hit group index to 2*i. The hit group index defines which entry of the shader binding // table will contain the hit group to be executed when hitting this instance. We set this // index to 2*i due to the use of 2 types of rays in the scene: the camera rays and the shadow // rays. For each instance, the SBT will then have 2 hit groups - geometryInstanceData.push_back(GeometryInstanceData(glm::transpose(instances[i].second), static_cast(i), 0xFF, static_cast(2 * i), - static_cast(vk::GeometryInstanceFlagBitsNV::eTriangleCullDisable), accelerationStructureHandle)); + geometryInstanceData.push_back( + GeometryInstanceData( glm::transpose( instances[i].second ), + static_cast( i ), + 0xFF, + static_cast( 2 * i ), + static_cast( vk::GeometryInstanceFlagBitsNV::eTriangleCullDisable ), + accelerationStructureHandle ) ); } - accelerationStructureData.instanceBufferData->upload(device, geometryInstanceData); + accelerationStructureData.instanceBufferData->upload( device, geometryInstanceData ); } - device->bindAccelerationStructureMemoryNV(vk::BindAccelerationStructureMemoryInfoNV(*accelerationStructureData.acclerationStructure, - *accelerationStructureData.resultBufferData->deviceMemory)); + device->bindAccelerationStructureMemoryNV( vk::BindAccelerationStructureMemoryInfoNV( + *accelerationStructureData.acclerationStructure, *accelerationStructureData.resultBufferData->deviceMemory ) ); - commandBuffer->buildAccelerationStructureNV(vk::AccelerationStructureInfoNV(accelerationStructureType, {}, vk::su::checked_cast(instances.size()), - vk::su::checked_cast(geometries.size()), geometries.data()), - accelerationStructureData.instanceBufferData ? *accelerationStructureData.instanceBufferData->buffer : nullptr, 0, false, - *accelerationStructureData.acclerationStructure, nullptr, *accelerationStructureData.scratchBufferData->buffer, 0); + commandBuffer->buildAccelerationStructureNV( + vk::AccelerationStructureInfoNV( accelerationStructureType, + {}, + vk::su::checked_cast( instances.size() ), + vk::su::checked_cast( geometries.size() ), + geometries.data() ), + accelerationStructureData.instanceBufferData ? *accelerationStructureData.instanceBufferData->buffer : nullptr, + 0, + false, + *accelerationStructureData.acclerationStructure, + nullptr, + *accelerationStructureData.scratchBufferData->buffer, + 0 ); - commandBuffer->pipelineBarrier(vk::PipelineStageFlagBits::eAccelerationStructureBuildNV, vk::PipelineStageFlagBits::eAccelerationStructureBuildNV, {}, - vk::MemoryBarrier(vk::AccessFlagBits::eAccelerationStructureWriteNV | vk::AccessFlagBits::eAccelerationStructureReadNV, - vk::AccessFlagBits::eAccelerationStructureWriteNV | vk::AccessFlagBits::eAccelerationStructureReadNV), {}, {}); + commandBuffer->pipelineBarrier( vk::PipelineStageFlagBits::eAccelerationStructureBuildNV, + vk::PipelineStageFlagBits::eAccelerationStructureBuildNV, + {}, + vk::MemoryBarrier( vk::AccessFlagBits::eAccelerationStructureWriteNV | + vk::AccessFlagBits::eAccelerationStructureReadNV, + vk::AccessFlagBits::eAccelerationStructureWriteNV | + vk::AccessFlagBits::eAccelerationStructureReadNV ), + {}, + {} ); return accelerationStructureData; } @@ -156,18 +204,15 @@ struct UniformBufferObject struct Material { - glm::vec3 diffuse = glm::vec3(0.7f, 0.7f, 0.7f); - int textureID = -1; + glm::vec3 diffuse = glm::vec3( 0.7f, 0.7f, 0.7f ); + int textureID = -1; }; -const size_t MaterialStride = ((sizeof(Material) + 15) / 16) * 16; +const size_t MaterialStride = ( ( sizeof( Material ) + 15 ) / 16 ) * 16; struct Vertex { - Vertex(glm::vec3 const& p, glm::vec3 const& n, glm::vec2 const& tc, int m = 0) - : pos(p) - , nrm(n) - , texCoord(tc) - , matID(m) + Vertex( glm::vec3 const & p, glm::vec3 const & n, glm::vec2 const & tc, int m = 0 ) + : pos( p ), nrm( n ), texCoord( tc ), matID( m ) {} glm::vec3 pos; @@ -175,53 +220,52 @@ struct Vertex glm::vec2 texCoord; int matID; }; -const size_t VertexStride = ((sizeof(Vertex) + 15) / 16) * 16; +const size_t VertexStride = ( ( sizeof( Vertex ) + 15 ) / 16 ) * 16; -static const std::vector cubeData = -{ +static const std::vector cubeData = { // pos nrm texcoord matID // front face - { Vertex(glm::vec3(-1.0f, -1.0f, 1.0f), glm::vec3( 0.0f, 0.0f, 1.0f), glm::vec2(0.0f, 0.0f), 0) }, - { Vertex(glm::vec3( 1.0f, -1.0f, 1.0f), glm::vec3( 0.0f, 0.0f, 1.0f), glm::vec2(1.0f, 0.0f), 0) }, - { Vertex(glm::vec3( 1.0f, 1.0f, 1.0f), glm::vec3( 0.0f, 0.0f, 1.0f), glm::vec2(1.0f, 1.0f), 0) }, - { Vertex(glm::vec3( 1.0f, 1.0f, 1.0f), glm::vec3( 0.0f, 0.0f, 1.0f), glm::vec2(1.0f, 1.0f), 0) }, - { Vertex(glm::vec3(-1.0f, 1.0f, 1.0f), glm::vec3( 0.0f, 0.0f, 1.0f), glm::vec2(0.0f, 1.0f), 0) }, - { Vertex(glm::vec3(-1.0f, -1.0f, 1.0f), glm::vec3( 0.0f, 0.0f, 1.0f), glm::vec2(0.0f, 0.0f), 0) }, + { Vertex( glm::vec3( -1.0f, -1.0f, 1.0f ), glm::vec3( 0.0f, 0.0f, 1.0f ), glm::vec2( 0.0f, 0.0f ), 0 ) }, + { Vertex( glm::vec3( 1.0f, -1.0f, 1.0f ), glm::vec3( 0.0f, 0.0f, 1.0f ), glm::vec2( 1.0f, 0.0f ), 0 ) }, + { Vertex( glm::vec3( 1.0f, 1.0f, 1.0f ), glm::vec3( 0.0f, 0.0f, 1.0f ), glm::vec2( 1.0f, 1.0f ), 0 ) }, + { Vertex( glm::vec3( 1.0f, 1.0f, 1.0f ), glm::vec3( 0.0f, 0.0f, 1.0f ), glm::vec2( 1.0f, 1.0f ), 0 ) }, + { Vertex( glm::vec3( -1.0f, 1.0f, 1.0f ), glm::vec3( 0.0f, 0.0f, 1.0f ), glm::vec2( 0.0f, 1.0f ), 0 ) }, + { Vertex( glm::vec3( -1.0f, -1.0f, 1.0f ), glm::vec3( 0.0f, 0.0f, 1.0f ), glm::vec2( 0.0f, 0.0f ), 0 ) }, // back face - { Vertex(glm::vec3( 1.0f, -1.0f, -1.0f), glm::vec3( 0.0f, 0.0f, -1.0f), glm::vec2(0.0f, 0.0f), 0) }, - { Vertex(glm::vec3(-1.0f, -1.0f, -1.0f), glm::vec3( 0.0f, 0.0f, -1.0f), glm::vec2(1.0f, 0.0f), 0) }, - { Vertex(glm::vec3(-1.0f, 1.0f, -1.0f), glm::vec3( 0.0f, 0.0f, -1.0f), glm::vec2(1.0f, 1.0f), 0) }, - { Vertex(glm::vec3(-1.0f, 1.0f, -1.0f), glm::vec3( 0.0f, 0.0f, -1.0f), glm::vec2(1.0f, 1.0f), 0) }, - { Vertex(glm::vec3( 1.0f, 1.0f, -1.0f), glm::vec3( 0.0f, 0.0f, -1.0f), glm::vec2(0.0f, 1.0f), 0) }, - { Vertex(glm::vec3( 1.0f, -1.0f, -1.0f), glm::vec3( 0.0f, 0.0f, -1.0f), glm::vec2(0.0f, 0.0f), 0) }, + { Vertex( glm::vec3( 1.0f, -1.0f, -1.0f ), glm::vec3( 0.0f, 0.0f, -1.0f ), glm::vec2( 0.0f, 0.0f ), 0 ) }, + { Vertex( glm::vec3( -1.0f, -1.0f, -1.0f ), glm::vec3( 0.0f, 0.0f, -1.0f ), glm::vec2( 1.0f, 0.0f ), 0 ) }, + { Vertex( glm::vec3( -1.0f, 1.0f, -1.0f ), glm::vec3( 0.0f, 0.0f, -1.0f ), glm::vec2( 1.0f, 1.0f ), 0 ) }, + { Vertex( glm::vec3( -1.0f, 1.0f, -1.0f ), glm::vec3( 0.0f, 0.0f, -1.0f ), glm::vec2( 1.0f, 1.0f ), 0 ) }, + { Vertex( glm::vec3( 1.0f, 1.0f, -1.0f ), glm::vec3( 0.0f, 0.0f, -1.0f ), glm::vec2( 0.0f, 1.0f ), 0 ) }, + { Vertex( glm::vec3( 1.0f, -1.0f, -1.0f ), glm::vec3( 0.0f, 0.0f, -1.0f ), glm::vec2( 0.0f, 0.0f ), 0 ) }, // left face - { Vertex(glm::vec3(-1.0f, -1.0f, -1.0f), glm::vec3(-1.0f, 0.0f, 0.0f), glm::vec2(0.0f, 0.0f), 0) }, - { Vertex(glm::vec3(-1.0f, -1.0f, 1.0f), glm::vec3(-1.0f, 0.0f, 0.0f), glm::vec2(1.0f, 0.0f), 0) }, - { Vertex(glm::vec3(-1.0f, 1.0f, 1.0f), glm::vec3(-1.0f, 0.0f, 0.0f), glm::vec2(1.0f, 1.0f), 0) }, - { Vertex(glm::vec3(-1.0f, 1.0f, 1.0f), glm::vec3(-1.0f, 0.0f, 0.0f), glm::vec2(1.0f, 1.0f), 0) }, - { Vertex(glm::vec3(-1.0f, 1.0f, -1.0f), glm::vec3(-1.0f, 0.0f, 0.0f), glm::vec2(0.0f, 1.0f), 0) }, - { Vertex(glm::vec3(-1.0f, -1.0f, -1.0f), glm::vec3(-1.0f, 0.0f, 0.0f), glm::vec2(0.0f, 0.0f), 0) }, + { Vertex( glm::vec3( -1.0f, -1.0f, -1.0f ), glm::vec3( -1.0f, 0.0f, 0.0f ), glm::vec2( 0.0f, 0.0f ), 0 ) }, + { Vertex( glm::vec3( -1.0f, -1.0f, 1.0f ), glm::vec3( -1.0f, 0.0f, 0.0f ), glm::vec2( 1.0f, 0.0f ), 0 ) }, + { Vertex( glm::vec3( -1.0f, 1.0f, 1.0f ), glm::vec3( -1.0f, 0.0f, 0.0f ), glm::vec2( 1.0f, 1.0f ), 0 ) }, + { Vertex( glm::vec3( -1.0f, 1.0f, 1.0f ), glm::vec3( -1.0f, 0.0f, 0.0f ), glm::vec2( 1.0f, 1.0f ), 0 ) }, + { Vertex( glm::vec3( -1.0f, 1.0f, -1.0f ), glm::vec3( -1.0f, 0.0f, 0.0f ), glm::vec2( 0.0f, 1.0f ), 0 ) }, + { Vertex( glm::vec3( -1.0f, -1.0f, -1.0f ), glm::vec3( -1.0f, 0.0f, 0.0f ), glm::vec2( 0.0f, 0.0f ), 0 ) }, // right face - { Vertex(glm::vec3( 1.0f, -1.0f, 1.0f), glm::vec3( 1.0f, 0.0f, 0.0f), glm::vec2(0.0f, 0.0f), 0) }, - { Vertex(glm::vec3( 1.0f, -1.0f, -1.0f), glm::vec3( 1.0f, 0.0f, 0.0f), glm::vec2(1.0f, 0.0f), 0) }, - { Vertex(glm::vec3( 1.0f, 1.0f, -1.0f), glm::vec3( 1.0f, 0.0f, 0.0f), glm::vec2(1.0f, 1.0f), 0) }, - { Vertex(glm::vec3( 1.0f, 1.0f, -1.0f), glm::vec3( 1.0f, 0.0f, 0.0f), glm::vec2(1.0f, 1.0f), 0) }, - { Vertex(glm::vec3( 1.0f, 1.0f, 1.0f), glm::vec3( 1.0f, 0.0f, 0.0f), glm::vec2(0.0f, 1.0f), 0) }, - { Vertex(glm::vec3( 1.0f, -1.0f, 1.0f), glm::vec3( 1.0f, 0.0f, 0.0f), glm::vec2(0.0f, 0.0f), 0) }, + { Vertex( glm::vec3( 1.0f, -1.0f, 1.0f ), glm::vec3( 1.0f, 0.0f, 0.0f ), glm::vec2( 0.0f, 0.0f ), 0 ) }, + { Vertex( glm::vec3( 1.0f, -1.0f, -1.0f ), glm::vec3( 1.0f, 0.0f, 0.0f ), glm::vec2( 1.0f, 0.0f ), 0 ) }, + { Vertex( glm::vec3( 1.0f, 1.0f, -1.0f ), glm::vec3( 1.0f, 0.0f, 0.0f ), glm::vec2( 1.0f, 1.0f ), 0 ) }, + { Vertex( glm::vec3( 1.0f, 1.0f, -1.0f ), glm::vec3( 1.0f, 0.0f, 0.0f ), glm::vec2( 1.0f, 1.0f ), 0 ) }, + { Vertex( glm::vec3( 1.0f, 1.0f, 1.0f ), glm::vec3( 1.0f, 0.0f, 0.0f ), glm::vec2( 0.0f, 1.0f ), 0 ) }, + { Vertex( glm::vec3( 1.0f, -1.0f, 1.0f ), glm::vec3( 1.0f, 0.0f, 0.0f ), glm::vec2( 0.0f, 0.0f ), 0 ) }, // top face - { Vertex(glm::vec3(-1.0f, 1.0f, 1.0f), glm::vec3( 0.0f, 1.0f, 0.0f), glm::vec2(0.0f, 0.0f), 0) }, - { Vertex(glm::vec3( 1.0f, 1.0f, 1.0f), glm::vec3( 0.0f, 1.0f, 0.0f), glm::vec2(1.0f, 0.0f), 0) }, - { Vertex(glm::vec3( 1.0f, 1.0f, -1.0f), glm::vec3( 0.0f, 1.0f, 0.0f), glm::vec2(1.0f, 1.0f), 0) }, - { Vertex(glm::vec3( 1.0f, 1.0f, -1.0f), glm::vec3( 0.0f, 1.0f, 0.0f), glm::vec2(1.0f, 1.0f), 0) }, - { Vertex(glm::vec3(-1.0f, 1.0f, -1.0f), glm::vec3( 0.0f, 1.0f, 0.0f), glm::vec2(0.0f, 1.0f), 0) }, - { Vertex(glm::vec3(-1.0f, 1.0f, 1.0f), glm::vec3( 0.0f, 1.0f, 0.0f), glm::vec2(0.0f, 0.0f), 0) }, + { Vertex( glm::vec3( -1.0f, 1.0f, 1.0f ), glm::vec3( 0.0f, 1.0f, 0.0f ), glm::vec2( 0.0f, 0.0f ), 0 ) }, + { Vertex( glm::vec3( 1.0f, 1.0f, 1.0f ), glm::vec3( 0.0f, 1.0f, 0.0f ), glm::vec2( 1.0f, 0.0f ), 0 ) }, + { Vertex( glm::vec3( 1.0f, 1.0f, -1.0f ), glm::vec3( 0.0f, 1.0f, 0.0f ), glm::vec2( 1.0f, 1.0f ), 0 ) }, + { Vertex( glm::vec3( 1.0f, 1.0f, -1.0f ), glm::vec3( 0.0f, 1.0f, 0.0f ), glm::vec2( 1.0f, 1.0f ), 0 ) }, + { Vertex( glm::vec3( -1.0f, 1.0f, -1.0f ), glm::vec3( 0.0f, 1.0f, 0.0f ), glm::vec2( 0.0f, 1.0f ), 0 ) }, + { Vertex( glm::vec3( -1.0f, 1.0f, 1.0f ), glm::vec3( 0.0f, 1.0f, 0.0f ), glm::vec2( 0.0f, 0.0f ), 0 ) }, // bottom face - { Vertex(glm::vec3(-1.0f, -1.0f, -1.0f), glm::vec3( 0.0f, -1.0f, 0.0f), glm::vec2(0.0f, 0.0f), 0) }, - { Vertex(glm::vec3( 1.0f, -1.0f, -1.0f), glm::vec3( 0.0f, -1.0f, 0.0f), glm::vec2(1.0f, 0.0f), 0) }, - { Vertex(glm::vec3( 1.0f, -1.0f, 1.0f), glm::vec3( 0.0f, -1.0f, 0.0f), glm::vec2(1.0f, 1.0f), 0) }, - { Vertex(glm::vec3( 1.0f, -1.0f, 1.0f), glm::vec3( 0.0f, -1.0f, 0.0f), glm::vec2(1.0f, 1.0f), 0) }, - { Vertex(glm::vec3(-1.0f, -1.0f, 1.0f), glm::vec3( 0.0f, -1.0f, 0.0f), glm::vec2(0.0f, 1.0f), 0) }, - { Vertex(glm::vec3(-1.0f, -1.0f, -1.0f), glm::vec3( 0.0f, -1.0f, 0.0f), glm::vec2(0.0f, 0.0f), 0) }, + { Vertex( glm::vec3( -1.0f, -1.0f, -1.0f ), glm::vec3( 0.0f, -1.0f, 0.0f ), glm::vec2( 0.0f, 0.0f ), 0 ) }, + { Vertex( glm::vec3( 1.0f, -1.0f, -1.0f ), glm::vec3( 0.0f, -1.0f, 0.0f ), glm::vec2( 1.0f, 0.0f ), 0 ) }, + { Vertex( glm::vec3( 1.0f, -1.0f, 1.0f ), glm::vec3( 0.0f, -1.0f, 0.0f ), glm::vec2( 1.0f, 1.0f ), 0 ) }, + { Vertex( glm::vec3( 1.0f, -1.0f, 1.0f ), glm::vec3( 0.0f, -1.0f, 0.0f ), glm::vec2( 1.0f, 1.0f ), 0 ) }, + { Vertex( glm::vec3( -1.0f, -1.0f, 1.0f ), glm::vec3( 0.0f, -1.0f, 0.0f ), glm::vec2( 0.0f, 1.0f ), 0 ) }, + { Vertex( glm::vec3( -1.0f, -1.0f, -1.0f ), glm::vec3( 0.0f, -1.0f, 0.0f ), glm::vec2( 0.0f, 0.0f ), 0 ) }, }; static std::string vertexShaderText = R"( @@ -476,81 +520,83 @@ void main() )"; #ifndef IMGUI_VK_QUEUED_FRAMES -#define IMGUI_VK_QUEUED_FRAMES 2 +# define IMGUI_VK_QUEUED_FRAMES 2 #endif // !IMGUI_VK_QUEUED_FRAMES struct AppInfo { vk::su::CameraManipulator cameraManipulator; - bool useRasterRender = false; + bool useRasterRender = false; }; -static void check_vk_result(VkResult err) +static void check_vk_result( VkResult err ) { - if (err != 0) + if ( err != 0 ) { - std::cerr << AppName << ": Vulkan error " << vk::to_string(static_cast(err)); - if (err < 0) + std::cerr << AppName << ": Vulkan error " << vk::to_string( static_cast( err ) ); + if ( err < 0 ) { abort(); } } } -static void cursorPosCallback(GLFWwindow* window, double mouseX, double mouseY) +static void cursorPosCallback( GLFWwindow * window, double mouseX, double mouseY ) { - vk::su::CameraManipulator::MouseButton mouseButton = (glfwGetMouseButton(window, GLFW_MOUSE_BUTTON_LEFT) == GLFW_PRESS) - ? vk::su::CameraManipulator::MouseButton::Left - : (glfwGetMouseButton(window, GLFW_MOUSE_BUTTON_MIDDLE) == GLFW_PRESS) - ? vk::su::CameraManipulator::MouseButton::Middle - : (glfwGetMouseButton(window, GLFW_MOUSE_BUTTON_RIGHT) == GLFW_PRESS) - ? vk::su::CameraManipulator::MouseButton::Right - : vk::su::CameraManipulator::MouseButton::None; - if (mouseButton != vk::su::CameraManipulator::MouseButton::None) + vk::su::CameraManipulator::MouseButton mouseButton = + ( glfwGetMouseButton( window, GLFW_MOUSE_BUTTON_LEFT ) == GLFW_PRESS ) + ? vk::su::CameraManipulator::MouseButton::Left + : ( glfwGetMouseButton( window, GLFW_MOUSE_BUTTON_MIDDLE ) == GLFW_PRESS ) + ? vk::su::CameraManipulator::MouseButton::Middle + : ( glfwGetMouseButton( window, GLFW_MOUSE_BUTTON_RIGHT ) == GLFW_PRESS ) + ? vk::su::CameraManipulator::MouseButton::Right + : vk::su::CameraManipulator::MouseButton::None; + if ( mouseButton != vk::su::CameraManipulator::MouseButton::None ) { vk::su::CameraManipulator::ModifierFlags modifiers; - if (glfwGetKey(window, GLFW_KEY_LEFT_ALT) == GLFW_PRESS) + if ( glfwGetKey( window, GLFW_KEY_LEFT_ALT ) == GLFW_PRESS ) { modifiers |= vk::su::CameraManipulator::ModifierFlagBits::Alt; } - if (glfwGetKey(window, GLFW_KEY_LEFT_CONTROL) == GLFW_PRESS) + if ( glfwGetKey( window, GLFW_KEY_LEFT_CONTROL ) == GLFW_PRESS ) { modifiers |= vk::su::CameraManipulator::ModifierFlagBits::Ctrl; } - if (glfwGetKey(window, GLFW_KEY_LEFT_SHIFT) == GLFW_PRESS) + if ( glfwGetKey( window, GLFW_KEY_LEFT_SHIFT ) == GLFW_PRESS ) { modifiers |= vk::su::CameraManipulator::ModifierFlagBits::Shift; } - vk::su::CameraManipulator & cameraManipulator = reinterpret_cast(glfwGetWindowUserPointer(window))->cameraManipulator; - cameraManipulator.mouseMove(glm::ivec2(static_cast(mouseX), static_cast(mouseY)), mouseButton, modifiers); + vk::su::CameraManipulator & cameraManipulator = + reinterpret_cast( glfwGetWindowUserPointer( window ) )->cameraManipulator; + cameraManipulator.mouseMove( + glm::ivec2( static_cast( mouseX ), static_cast( mouseY ) ), mouseButton, modifiers ); } } -static void errorCallback(int error, const char* description) +static void errorCallback( int error, const char * description ) { - fprintf(stderr, "GLFW Error %d: %s\n", error, description); + fprintf( stderr, "GLFW Error %d: %s\n", error, description ); } -static void framebufferSizeCallback(GLFWwindow* window, int w, int h) +static void framebufferSizeCallback( GLFWwindow * window, int w, int h ) { - vk::su::CameraManipulator & cameraManipulator = reinterpret_cast(glfwGetWindowUserPointer(window))->cameraManipulator; - cameraManipulator.setWindowSize(glm::ivec2(w, h)); + vk::su::CameraManipulator & cameraManipulator = + reinterpret_cast( glfwGetWindowUserPointer( window ) )->cameraManipulator; + cameraManipulator.setWindowSize( glm::ivec2( w, h ) ); } -static void keyCallback(GLFWwindow* window, int key, int /*scancode*/, int action, int /*mods*/) +static void keyCallback( GLFWwindow * window, int key, int /*scancode*/, int action, int /*mods*/ ) { - if (action == GLFW_PRESS) + if ( action == GLFW_PRESS ) { - switch (key) + switch ( key ) { case GLFW_KEY_ESCAPE: - case 'Q': - glfwSetWindowShouldClose(window, 1); - break; + case 'Q': glfwSetWindowShouldClose( window, 1 ); break; case 'R': { - AppInfo * appInfo = reinterpret_cast(glfwGetWindowUserPointer(window)); + AppInfo * appInfo = reinterpret_cast( glfwGetWindowUserPointer( window ) ); appInfo->useRasterRender = !appInfo->useRasterRender; } break; @@ -558,43 +604,46 @@ static void keyCallback(GLFWwindow* window, int key, int /*scancode*/, int actio } } -static void mouseButtonCallback(GLFWwindow* window, int /*button*/, int /*action*/, int /*mods*/) +static void mouseButtonCallback( GLFWwindow * window, int /*button*/, int /*action*/, int /*mods*/ ) { double xpos, ypos; - glfwGetCursorPos(window, &xpos, &ypos); + glfwGetCursorPos( window, &xpos, &ypos ); - vk::su::CameraManipulator & cameraManipulator = reinterpret_cast(glfwGetWindowUserPointer(window))->cameraManipulator; - cameraManipulator.setMousePosition(glm::ivec2(static_cast(xpos), static_cast(ypos))); + vk::su::CameraManipulator & cameraManipulator = + reinterpret_cast( glfwGetWindowUserPointer( window ) )->cameraManipulator; + cameraManipulator.setMousePosition( glm::ivec2( static_cast( xpos ), static_cast( ypos ) ) ); } -static void scrollCallback(GLFWwindow* window, double /*xoffset*/, double yoffset) +static void scrollCallback( GLFWwindow * window, double /*xoffset*/, double yoffset ) { - vk::su::CameraManipulator & cameraManipulator = reinterpret_cast(glfwGetWindowUserPointer(window))->cameraManipulator; - cameraManipulator.wheel(static_cast(yoffset)); + vk::su::CameraManipulator & cameraManipulator = + reinterpret_cast( glfwGetWindowUserPointer( window ) )->cameraManipulator; + cameraManipulator.wheel( static_cast( yoffset ) ); } - // random data and functions static std::random_device randomDevice; -static std::mt19937 randomGenerator(randomDevice()); +static std::mt19937 randomGenerator( randomDevice() ); template -T random(T minValue = std::numeric_limits::min(), T maxValue = std::numeric_limits::max()) +T random( T minValue = std::numeric_limits::min(), T maxValue = std::numeric_limits::max() ) { - static_assert(std::numeric_limits::is_integer, "Type T needs to be an integral type!\n"); - std::uniform_int_distribution<> randomDistribution(minValue, maxValue); + static_assert( std::numeric_limits::is_integer, "Type T needs to be an integral type!\n" ); + std::uniform_int_distribution<> randomDistribution( minValue, maxValue ); - return static_cast(randomDistribution(randomGenerator)); + return static_cast( randomDistribution( randomGenerator ) ); } -glm::vec3 randomVec3(float minValue, float maxValue) +glm::vec3 randomVec3( float minValue, float maxValue ) { - std::uniform_real_distribution randomDistribution(minValue, maxValue); + std::uniform_real_distribution randomDistribution( minValue, maxValue ); - return glm::vec3(randomDistribution(randomGenerator), randomDistribution(randomGenerator), randomDistribution(randomGenerator)); + return glm::vec3( randomDistribution( randomGenerator ), + randomDistribution( randomGenerator ), + randomDistribution( randomGenerator ) ); } -int main(int /*argc*/, char** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { // number of cubes in x-, y-, and z-direction const size_t xMax = 10; @@ -606,481 +655,690 @@ int main(int /*argc*/, char** /*argv*/) try { // Setup glfw - glfwSetErrorCallback(errorCallback); - if (!glfwInit()) + glfwSetErrorCallback( errorCallback ); + if ( !glfwInit() ) { std::cerr << AppName << ": can't initialize glfw!\n"; return 1; } - if (!glfwVulkanSupported()) + if ( !glfwVulkanSupported() ) { std::cerr << AppName << ": Vulkan not supported!\n"; return 1; } // create a window using glfw - glfwWindowHint(GLFW_CLIENT_API, GLFW_NO_API); - vk::Extent2D windowExtent(1280, 720); - GLFWwindow* window = glfwCreateWindow(windowExtent.width, windowExtent.height, AppName, nullptr, nullptr); + glfwWindowHint( GLFW_CLIENT_API, GLFW_NO_API ); + vk::Extent2D windowExtent( 1280, 720 ); + GLFWwindow * window = glfwCreateWindow( windowExtent.width, windowExtent.height, AppName, nullptr, nullptr ); // install some callbacks - glfwSetCursorPosCallback(window, cursorPosCallback); - glfwSetFramebufferSizeCallback(window, framebufferSizeCallback); - glfwSetKeyCallback(window, keyCallback); - glfwSetMouseButtonCallback(window, mouseButtonCallback); - glfwSetScrollCallback(window, scrollCallback); + glfwSetCursorPosCallback( window, cursorPosCallback ); + glfwSetFramebufferSizeCallback( window, framebufferSizeCallback ); + glfwSetKeyCallback( window, keyCallback ); + glfwSetMouseButtonCallback( window, mouseButtonCallback ); + glfwSetScrollCallback( window, scrollCallback ); // Setup camera and make it available as the userPointer in the glfw window - appInfo.cameraManipulator.setWindowSize(glm::u32vec2(windowExtent.width, windowExtent.height)); - glm::vec3 diagonal = 3.0f * glm::vec3(static_cast(xMax), static_cast(yMax), static_cast(zMax)); - appInfo.cameraManipulator.setLookat(1.5f * diagonal, 0.5f *diagonal, glm::vec3(0, 1, 0)); - glfwSetWindowUserPointer(window, &appInfo); + appInfo.cameraManipulator.setWindowSize( glm::u32vec2( windowExtent.width, windowExtent.height ) ); + glm::vec3 diagonal = + 3.0f * glm::vec3( static_cast( xMax ), static_cast( yMax ), static_cast( zMax ) ); + appInfo.cameraManipulator.setLookat( 1.5f * diagonal, 0.5f * diagonal, glm::vec3( 0, 1, 0 ) ); + glfwSetWindowUserPointer( window, &appInfo ); // Create Vulkan Instance with needed extensions - uint32_t glfwExtensionsCount; - const char** glfwExtensions = glfwGetRequiredInstanceExtensions(&glfwExtensionsCount); + uint32_t glfwExtensionsCount; + const char ** glfwExtensions = glfwGetRequiredInstanceExtensions( &glfwExtensionsCount ); std::vector instanceExtensions; - instanceExtensions.reserve(glfwExtensionsCount + 1); - for (uint32_t i = 0; i < glfwExtensionsCount; i++) + instanceExtensions.reserve( glfwExtensionsCount + 1 ); + for ( uint32_t i = 0; i < glfwExtensionsCount; i++ ) { - instanceExtensions.push_back(glfwExtensions[i]); + instanceExtensions.push_back( glfwExtensions[i] ); } - instanceExtensions.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME); + instanceExtensions.push_back( VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME ); - vk::UniqueInstance instance = vk::su::createInstance(AppName, EngineName, {}, instanceExtensions); -#if !defined(NDEBUG) - vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger(instance); + vk::UniqueInstance instance = vk::su::createInstance( AppName, EngineName, {}, instanceExtensions ); +#if !defined( NDEBUG ) + vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger( instance ); #endif vk::PhysicalDevice physicalDevice = instance->enumeratePhysicalDevices().front(); // Create Window Surface (using glfw) vk::SurfaceKHR surface; - VkResult err = glfwCreateWindowSurface(VkInstance(*instance), window, nullptr, reinterpret_cast(&surface)); - check_vk_result(err); + VkResult err = + glfwCreateWindowSurface( VkInstance( *instance ), window, nullptr, reinterpret_cast( &surface ) ); + check_vk_result( err ); - std::pair graphicsAndPresentQueueFamilyIndex = vk::su::findGraphicsAndPresentQueueFamilyIndex(physicalDevice, surface); + std::pair graphicsAndPresentQueueFamilyIndex = + vk::su::findGraphicsAndPresentQueueFamilyIndex( physicalDevice, surface ); // Create a Device with ray tracing support (besides some other extensions needed) and needed features - auto supportedFeatures = physicalDevice.getFeatures2(); - vk::UniqueDevice device = vk::su::createDevice(physicalDevice, graphicsAndPresentQueueFamilyIndex.first, - {VK_KHR_SWAPCHAIN_EXTENSION_NAME, VK_NV_RAY_TRACING_EXTENSION_NAME, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME}, - &supportedFeatures.get().features, &supportedFeatures.get()); + auto supportedFeatures = + physicalDevice.getFeatures2(); + vk::UniqueDevice device = + vk::su::createDevice( physicalDevice, + graphicsAndPresentQueueFamilyIndex.first, + { VK_KHR_SWAPCHAIN_EXTENSION_NAME, + VK_NV_RAY_TRACING_EXTENSION_NAME, + VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME }, + &supportedFeatures.get().features, + &supportedFeatures.get() ); // setup stuff per frame std::array perFrameData; - for (int i = 0; i < IMGUI_VK_QUEUED_FRAMES; i++) + for ( int i = 0; i < IMGUI_VK_QUEUED_FRAMES; i++ ) { - perFrameData[i].commandPool = device->createCommandPoolUnique(vk::CommandPoolCreateInfo(vk::CommandPoolCreateFlagBits::eResetCommandBuffer, graphicsAndPresentQueueFamilyIndex.first)); - perFrameData[i].commandBuffer = std::move(device->allocateCommandBuffersUnique(vk::CommandBufferAllocateInfo(*perFrameData[i].commandPool, vk::CommandBufferLevel::ePrimary, 1)).front()); - perFrameData[i].fence = device->createFenceUnique(vk::FenceCreateInfo(vk::FenceCreateFlagBits::eSignaled)); - perFrameData[i].presentCompleteSemaphore = device->createSemaphoreUnique(vk::SemaphoreCreateInfo()); - perFrameData[i].renderCompleteSemaphore = device->createSemaphoreUnique(vk::SemaphoreCreateInfo()); + perFrameData[i].commandPool = device->createCommandPoolUnique( vk::CommandPoolCreateInfo( + vk::CommandPoolCreateFlagBits::eResetCommandBuffer, graphicsAndPresentQueueFamilyIndex.first ) ); + perFrameData[i].commandBuffer = + std::move( device + ->allocateCommandBuffersUnique( vk::CommandBufferAllocateInfo( + *perFrameData[i].commandPool, vk::CommandBufferLevel::ePrimary, 1 ) ) + .front() ); + perFrameData[i].fence = device->createFenceUnique( vk::FenceCreateInfo( vk::FenceCreateFlagBits::eSignaled ) ); + perFrameData[i].presentCompleteSemaphore = device->createSemaphoreUnique( vk::SemaphoreCreateInfo() ); + perFrameData[i].renderCompleteSemaphore = device->createSemaphoreUnique( vk::SemaphoreCreateInfo() ); } - vk::Queue graphicsQueue = device->getQueue(graphicsAndPresentQueueFamilyIndex.first, 0); - vk::Queue presentQueue = device->getQueue(graphicsAndPresentQueueFamilyIndex.second, 0); + vk::Queue graphicsQueue = device->getQueue( graphicsAndPresentQueueFamilyIndex.first, 0 ); + vk::Queue presentQueue = device->getQueue( graphicsAndPresentQueueFamilyIndex.second, 0 ); // create a descriptor pool with a number of available descriptors - std::vector poolSizes = - { + std::vector poolSizes = { { vk::DescriptorType::eCombinedImageSampler, 1000 }, - { vk::DescriptorType::eUniformBuffer, 1000}, + { vk::DescriptorType::eUniformBuffer, 1000 }, { vk::DescriptorType::eStorageBuffer, 1000 }, }; - vk::UniqueDescriptorPool descriptorPool = vk::su::createDescriptorPool(device, poolSizes); + vk::UniqueDescriptorPool descriptorPool = vk::su::createDescriptorPool( device, poolSizes ); // setup swap chain, render pass, depth buffer and the frame buffers - vk::su::SwapChainData swapChainData(physicalDevice, device, surface, windowExtent, vk::ImageUsageFlagBits::eColorAttachment | vk::ImageUsageFlagBits::eStorage, vk::UniqueSwapchainKHR(), - graphicsAndPresentQueueFamilyIndex.first, graphicsAndPresentQueueFamilyIndex.second); - vk::SurfaceFormatKHR surfaceFormat = vk::su::pickSurfaceFormat(physicalDevice.getSurfaceFormatsKHR(surface)); - vk::Format depthFormat = vk::su::pickDepthFormat(physicalDevice); + vk::su::SwapChainData swapChainData( physicalDevice, + device, + surface, + windowExtent, + vk::ImageUsageFlagBits::eColorAttachment | vk::ImageUsageFlagBits::eStorage, + vk::UniqueSwapchainKHR(), + graphicsAndPresentQueueFamilyIndex.first, + graphicsAndPresentQueueFamilyIndex.second ); + vk::SurfaceFormatKHR surfaceFormat = vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( surface ) ); + vk::Format depthFormat = vk::su::pickDepthFormat( physicalDevice ); // setup a render pass - vk::UniqueRenderPass renderPass = vk::su::createRenderPass(device, surfaceFormat.format, depthFormat); + vk::UniqueRenderPass renderPass = vk::su::createRenderPass( device, surfaceFormat.format, depthFormat ); - vk::su::DepthBufferData depthBufferData(physicalDevice, device, depthFormat, windowExtent); - std::vector framebuffers = vk::su::createFramebuffers(device, renderPass, swapChainData.imageViews, depthBufferData.imageView, windowExtent); + vk::su::DepthBufferData depthBufferData( physicalDevice, device, depthFormat, windowExtent ); + std::vector framebuffers = vk::su::createFramebuffers( + device, renderPass, swapChainData.imageViews, depthBufferData.imageView, windowExtent ); bool samplerAnisotropy = supportedFeatures.get().features.samplerAnisotropy; // create some simple checkerboard textures, randomly sized and colored - const size_t textureCount = 10; + const size_t textureCount = 10; std::vector textures; - textures.reserve(textureCount); - for (size_t i = 0; i < textureCount; i++) + textures.reserve( textureCount ); + for ( size_t i = 0; i < textureCount; i++ ) { - textures.push_back(vk::su::TextureData(physicalDevice, device, {random(2, 8) * 16, random(2, 8) * 16}, - vk::ImageUsageFlagBits::eTransferDst | vk::ImageUsageFlagBits::eSampled, {}, samplerAnisotropy, true)); + textures.push_back( vk::su::TextureData( physicalDevice, + device, + { random( 2, 8 ) * 16, random( 2, 8 ) * 16 }, + vk::ImageUsageFlagBits::eTransferDst | vk::ImageUsageFlagBits::eSampled, + {}, + samplerAnisotropy, + true ) ); } - vk::su::oneTimeSubmit(device, perFrameData[0].commandPool, graphicsQueue, - [&](vk::UniqueCommandBuffer const& commandBuffer) - { - for (auto & t : textures) - { - t.setImage(device, commandBuffer, vk::su::CheckerboardImageGenerator({random(), random(), random()}, - {random(), random(), random()})); - } - }); + vk::su::oneTimeSubmit( + device, perFrameData[0].commandPool, graphicsQueue, [&]( vk::UniqueCommandBuffer const & commandBuffer ) { + for ( auto & t : textures ) + { + t.setImage( + device, + commandBuffer, + vk::su::CheckerboardImageGenerator( { random(), random(), random() }, + { random(), random(), random() } ) ); + } + } ); // create some materials with a random diffuse color, referencing one of the above textures const size_t materialCount = 10; - assert(materialCount == textureCount); - std::vector materials(materialCount); - for (size_t i = 0; i < materialCount; i++) + assert( materialCount == textureCount ); + std::vector materials( materialCount ); + for ( size_t i = 0; i < materialCount; i++ ) { - materials[i].diffuse = randomVec3(0.0f, 1.0f); - materials[i].textureID = vk::su::checked_cast(i); + materials[i].diffuse = randomVec3( 0.0f, 1.0f ); + materials[i].textureID = vk::su::checked_cast( i ); } - vk::su::BufferData materialBufferData(physicalDevice, device, materialCount * MaterialStride, vk::BufferUsageFlagBits::eStorageBuffer); - materialBufferData.upload(device, materials, MaterialStride); + vk::su::BufferData materialBufferData( + physicalDevice, device, materialCount * MaterialStride, vk::BufferUsageFlagBits::eStorageBuffer ); + materialBufferData.upload( device, materials, MaterialStride ); // create a a 3D-array of cubes, randomly jittered, using a random material std::vector vertices; - vertices.reserve(xMax * yMax * zMax * cubeData.size()); - for (size_t x = 0; x < xMax; x++) + vertices.reserve( xMax * yMax * zMax * cubeData.size() ); + for ( size_t x = 0; x < xMax; x++ ) { - for (size_t y = 0; y < yMax; y++) + for ( size_t y = 0; y < yMax; y++ ) { - for (size_t z = 0; z < zMax; z++) + for ( size_t z = 0; z < zMax; z++ ) { - int m = random(0, materialCount - 1); - glm::vec3 jitter = randomVec3(0.0f, 0.6f); - for (auto const& v : cubeData) + int m = random( 0, materialCount - 1 ); + glm::vec3 jitter = randomVec3( 0.0f, 0.6f ); + for ( auto const & v : cubeData ) { - vertices.push_back(v); - vertices.back().pos += 3.0f * glm::vec3(static_cast(x), static_cast(y), static_cast(z)) + jitter; - vertices.back().matID = static_cast(m); + vertices.push_back( v ); + vertices.back().pos += + 3.0f * glm::vec3( static_cast( x ), static_cast( y ), static_cast( z ) ) + jitter; + vertices.back().matID = static_cast( m ); } } } } // create an 1-1 index buffer - std::vector indices(vertices.size()); - std::iota(indices.begin(), indices.end(), 0); + std::vector indices( vertices.size() ); + std::iota( indices.begin(), indices.end(), 0 ); // there's just one vertex- and one index-buffer, but with more complex scene loaders there might be more! - vk::su::BufferData vertexBufferData(physicalDevice, device, vertices.size() * VertexStride, - vk::BufferUsageFlagBits::eTransferDst | vk::BufferUsageFlagBits::eVertexBuffer | vk::BufferUsageFlagBits::eStorageBuffer, - vk::MemoryPropertyFlagBits::eDeviceLocal); - vertexBufferData.upload(physicalDevice, device, perFrameData[0].commandPool, graphicsQueue, vertices, VertexStride); + vk::su::BufferData vertexBufferData( physicalDevice, + device, + vertices.size() * VertexStride, + vk::BufferUsageFlagBits::eTransferDst | + vk::BufferUsageFlagBits::eVertexBuffer | + vk::BufferUsageFlagBits::eStorageBuffer, + vk::MemoryPropertyFlagBits::eDeviceLocal ); + vertexBufferData.upload( + physicalDevice, device, perFrameData[0].commandPool, graphicsQueue, vertices, VertexStride ); - vk::su::BufferData indexBufferData(physicalDevice, device, indices.size() * sizeof(uint32_t), - vk::BufferUsageFlagBits::eTransferDst | vk::BufferUsageFlagBits::eIndexBuffer | vk::BufferUsageFlagBits::eStorageBuffer, - vk::MemoryPropertyFlagBits::eDeviceLocal); - indexBufferData.upload(physicalDevice, device, perFrameData[0].commandPool, graphicsQueue, indices, sizeof(uint32_t)); + vk::su::BufferData indexBufferData( physicalDevice, + device, + indices.size() * sizeof( uint32_t ), + vk::BufferUsageFlagBits::eTransferDst | vk::BufferUsageFlagBits::eIndexBuffer | + vk::BufferUsageFlagBits::eStorageBuffer, + vk::MemoryPropertyFlagBits::eDeviceLocal ); + indexBufferData.upload( + physicalDevice, device, perFrameData[0].commandPool, graphicsQueue, indices, sizeof( uint32_t ) ); - glm::mat4x4 transform(glm::mat4x4(1.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f)); + glm::mat4x4 transform( + glm::mat4x4( 1.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f ) ); vk::UniqueDescriptorSetLayout descriptorSetLayout = - vk::su::createDescriptorSetLayout(device, - { - { vk::DescriptorType::eUniformBuffer, 1, vk::ShaderStageFlagBits::eVertex }, - { vk::DescriptorType::eStorageBuffer, 1, vk::ShaderStageFlagBits::eVertex | vk::ShaderStageFlagBits::eFragment }, - { vk::DescriptorType::eCombinedImageSampler, static_cast(textures.size()), vk::ShaderStageFlagBits::eFragment } - }); - vk::UniquePipelineLayout pipelineLayout = device->createPipelineLayoutUnique(vk::PipelineLayoutCreateInfo({}, 1, &(*descriptorSetLayout))); + vk::su::createDescriptorSetLayout( device, + { { vk::DescriptorType::eUniformBuffer, 1, vk::ShaderStageFlagBits::eVertex }, + { vk::DescriptorType::eStorageBuffer, + 1, + vk::ShaderStageFlagBits::eVertex | vk::ShaderStageFlagBits::eFragment }, + { vk::DescriptorType::eCombinedImageSampler, + static_cast( textures.size() ), + vk::ShaderStageFlagBits::eFragment } } ); + vk::UniquePipelineLayout pipelineLayout = + device->createPipelineLayoutUnique( vk::PipelineLayoutCreateInfo( {}, 1, &( *descriptorSetLayout ) ) ); glslang::InitializeProcess(); - vk::UniqueShaderModule vertexShaderModule = vk::su::createShaderModule(device, vk::ShaderStageFlagBits::eVertex, vertexShaderText); - vk::UniqueShaderModule fragmentShaderModule = vk::su::createShaderModule(device, vk::ShaderStageFlagBits::eFragment, fragmentShaderText); + vk::UniqueShaderModule vertexShaderModule = + vk::su::createShaderModule( device, vk::ShaderStageFlagBits::eVertex, vertexShaderText ); + vk::UniqueShaderModule fragmentShaderModule = + vk::su::createShaderModule( device, vk::ShaderStageFlagBits::eFragment, fragmentShaderText ); glslang::FinalizeProcess(); - vk::UniquePipeline graphicsPipeline = vk::su::createGraphicsPipeline(device, vk::UniquePipelineCache(), std::make_pair(*vertexShaderModule, nullptr), - std::make_pair(*fragmentShaderModule, nullptr), VertexStride, - { - { vk::Format::eR32G32B32Sfloat, vk::su::checked_cast(offsetof(Vertex, pos)) }, - { vk::Format::eR32G32B32Sfloat, vk::su::checked_cast(offsetof(Vertex, nrm)) }, - { vk::Format::eR32G32Sfloat, vk::su::checked_cast(offsetof(Vertex, texCoord)) }, - { vk::Format::eR32Sint, vk::su::checked_cast(offsetof(Vertex, matID)) } - }, vk::FrontFace::eCounterClockwise, true, pipelineLayout, renderPass); + vk::UniquePipeline graphicsPipeline = vk::su::createGraphicsPipeline( + device, + vk::UniquePipelineCache(), + std::make_pair( *vertexShaderModule, nullptr ), + std::make_pair( *fragmentShaderModule, nullptr ), + VertexStride, + { { vk::Format::eR32G32B32Sfloat, vk::su::checked_cast( offsetof( Vertex, pos ) ) }, + { vk::Format::eR32G32B32Sfloat, vk::su::checked_cast( offsetof( Vertex, nrm ) ) }, + { vk::Format::eR32G32Sfloat, vk::su::checked_cast( offsetof( Vertex, texCoord ) ) }, + { vk::Format::eR32Sint, vk::su::checked_cast( offsetof( Vertex, matID ) ) } }, + vk::FrontFace::eCounterClockwise, + true, + pipelineLayout, + renderPass ); - vk::su::BufferData uniformBufferData(physicalDevice, device, sizeof(UniformBufferObject), vk::BufferUsageFlagBits::eUniformBuffer); + vk::su::BufferData uniformBufferData( + physicalDevice, device, sizeof( UniformBufferObject ), vk::BufferUsageFlagBits::eUniformBuffer ); - vk::UniqueDescriptorSet descriptorSet = std::move(device->allocateDescriptorSetsUnique(vk::DescriptorSetAllocateInfo(*descriptorPool, 1, &*descriptorSetLayout)).front()); - vk::su::updateDescriptorSets(device, descriptorSet, { { vk::DescriptorType::eUniformBuffer, uniformBufferData.buffer, vk::UniqueBufferView() }, - { vk::DescriptorType::eStorageBuffer, materialBufferData.buffer, vk::UniqueBufferView() } }, textures); + vk::UniqueDescriptorSet descriptorSet = std::move( + device->allocateDescriptorSetsUnique( vk::DescriptorSetAllocateInfo( *descriptorPool, 1, &*descriptorSetLayout ) ) + .front() ); + vk::su::updateDescriptorSets( + device, + descriptorSet, + { { vk::DescriptorType::eUniformBuffer, uniformBufferData.buffer, vk::UniqueBufferView() }, + { vk::DescriptorType::eStorageBuffer, materialBufferData.buffer, vk::UniqueBufferView() } }, + textures ); // RayTracing specific stuff // create acceleration structures: one top-level, and just one bottom-level AccelerationStructureData topLevelAS, bottomLevelAS; - vk::su::oneTimeSubmit(device, perFrameData[0].commandPool, graphicsQueue, - [&](vk::UniqueCommandBuffer const& commandBuffer) - { - vk::GeometryDataNV geometryDataNV(vk::GeometryTrianglesNV(*vertexBufferData.buffer, 0, vk::su::checked_cast(vertices.size()), VertexStride, - vk::Format::eR32G32B32Sfloat, *indexBufferData.buffer, 0, - vk::su::checked_cast(indices.size()), vk::IndexType::eUint32), {}); - bottomLevelAS = createAccelerationStructureData(physicalDevice, device, commandBuffer, {}, {vk::GeometryNV(vk::GeometryTypeNV::eTriangles, geometryDataNV)}); + vk::su::oneTimeSubmit( + device, perFrameData[0].commandPool, graphicsQueue, [&]( vk::UniqueCommandBuffer const & commandBuffer ) { + vk::GeometryDataNV geometryDataNV( vk::GeometryTrianglesNV( *vertexBufferData.buffer, + 0, + vk::su::checked_cast( vertices.size() ), + VertexStride, + vk::Format::eR32G32B32Sfloat, + *indexBufferData.buffer, + 0, + vk::su::checked_cast( indices.size() ), + vk::IndexType::eUint32 ), + {} ); + bottomLevelAS = + createAccelerationStructureData( physicalDevice, + device, + commandBuffer, + {}, + { vk::GeometryNV( vk::GeometryTypeNV::eTriangles, geometryDataNV ) } ); - topLevelAS = createAccelerationStructureData(physicalDevice, device, commandBuffer, {std::make_pair(*bottomLevelAS.acclerationStructure, transform)}, - std::vector()); - }); + topLevelAS = + createAccelerationStructureData( physicalDevice, + device, + commandBuffer, + { std::make_pair( *bottomLevelAS.acclerationStructure, transform ) }, + std::vector() ); + } ); // create raytracing descriptor set - vk::su::oneTimeSubmit(device, perFrameData[0].commandPool, graphicsQueue, - [&](vk::UniqueCommandBuffer const& commandBuffer) - { - vk::BufferMemoryBarrier bufferMemoryBarrier({}, vk::AccessFlagBits::eShaderRead, VK_QUEUE_FAMILY_IGNORED, VK_QUEUE_FAMILY_IGNORED, *vertexBufferData.buffer, 0, - VK_WHOLE_SIZE); - commandBuffer->pipelineBarrier(vk::PipelineStageFlagBits::eAllCommands, vk::PipelineStageFlagBits::eAllCommands, {}, nullptr, bufferMemoryBarrier, nullptr); + vk::su::oneTimeSubmit( + device, perFrameData[0].commandPool, graphicsQueue, [&]( vk::UniqueCommandBuffer const & commandBuffer ) { + vk::BufferMemoryBarrier bufferMemoryBarrier( {}, + vk::AccessFlagBits::eShaderRead, + VK_QUEUE_FAMILY_IGNORED, + VK_QUEUE_FAMILY_IGNORED, + *vertexBufferData.buffer, + 0, + VK_WHOLE_SIZE ); + commandBuffer->pipelineBarrier( vk::PipelineStageFlagBits::eAllCommands, + vk::PipelineStageFlagBits::eAllCommands, + {}, + nullptr, + bufferMemoryBarrier, + nullptr ); - bufferMemoryBarrier.buffer = *indexBufferData.buffer; - commandBuffer->pipelineBarrier(vk::PipelineStageFlagBits::eAllCommands, vk::PipelineStageFlagBits::eAllCommands, {}, nullptr, bufferMemoryBarrier, nullptr); - }); + bufferMemoryBarrier.buffer = *indexBufferData.buffer; + commandBuffer->pipelineBarrier( vk::PipelineStageFlagBits::eAllCommands, + vk::PipelineStageFlagBits::eAllCommands, + {}, + nullptr, + bufferMemoryBarrier, + nullptr ); + } ); std::vector bindings; - bindings.push_back(vk::DescriptorSetLayoutBinding(0, vk::DescriptorType::eAccelerationStructureNV, 1, vk::ShaderStageFlagBits::eRaygenNV | vk::ShaderStageFlagBits::eClosestHitNV)); - bindings.push_back(vk::DescriptorSetLayoutBinding(1, vk::DescriptorType::eStorageImage, 1, vk::ShaderStageFlagBits::eRaygenNV)); // raytracing output - bindings.push_back(vk::DescriptorSetLayoutBinding(2, vk::DescriptorType::eUniformBuffer, 1, vk::ShaderStageFlagBits::eRaygenNV)); // camera information - bindings.push_back(vk::DescriptorSetLayoutBinding(3, vk::DescriptorType::eStorageBuffer, 1, vk::ShaderStageFlagBits::eClosestHitNV)); // vertex buffer - bindings.push_back(vk::DescriptorSetLayoutBinding(4, vk::DescriptorType::eStorageBuffer, 1, vk::ShaderStageFlagBits::eClosestHitNV)); // index buffer - bindings.push_back(vk::DescriptorSetLayoutBinding(5, vk::DescriptorType::eStorageBuffer, 1, vk::ShaderStageFlagBits::eClosestHitNV)); // material buffer - bindings.push_back(vk::DescriptorSetLayoutBinding(6, vk::DescriptorType::eCombinedImageSampler, vk::su::checked_cast(textures.size()), vk::ShaderStageFlagBits::eClosestHitNV)); // textures + bindings.push_back( + vk::DescriptorSetLayoutBinding( 0, + vk::DescriptorType::eAccelerationStructureNV, + 1, + vk::ShaderStageFlagBits::eRaygenNV | vk::ShaderStageFlagBits::eClosestHitNV ) ); + bindings.push_back( vk::DescriptorSetLayoutBinding( + 1, vk::DescriptorType::eStorageImage, 1, vk::ShaderStageFlagBits::eRaygenNV ) ); // raytracing output + bindings.push_back( vk::DescriptorSetLayoutBinding( + 2, vk::DescriptorType::eUniformBuffer, 1, vk::ShaderStageFlagBits::eRaygenNV ) ); // camera information + bindings.push_back( vk::DescriptorSetLayoutBinding( + 3, vk::DescriptorType::eStorageBuffer, 1, vk::ShaderStageFlagBits::eClosestHitNV ) ); // vertex buffer + bindings.push_back( vk::DescriptorSetLayoutBinding( + 4, vk::DescriptorType::eStorageBuffer, 1, vk::ShaderStageFlagBits::eClosestHitNV ) ); // index buffer + bindings.push_back( vk::DescriptorSetLayoutBinding( + 5, vk::DescriptorType::eStorageBuffer, 1, vk::ShaderStageFlagBits::eClosestHitNV ) ); // material buffer + bindings.push_back( vk::DescriptorSetLayoutBinding( 6, + vk::DescriptorType::eCombinedImageSampler, + vk::su::checked_cast( textures.size() ), + vk::ShaderStageFlagBits::eClosestHitNV ) ); // textures std::vector descriptorPoolSizes; - descriptorPoolSizes.reserve(bindings.size()); - for (const auto& b : bindings) + descriptorPoolSizes.reserve( bindings.size() ); + for ( const auto & b : bindings ) { - descriptorPoolSizes.push_back(vk::DescriptorPoolSize(b.descriptorType, vk::su::checked_cast(swapChainData.images.size()) * b.descriptorCount)); + descriptorPoolSizes.push_back( vk::DescriptorPoolSize( + b.descriptorType, vk::su::checked_cast( swapChainData.images.size() ) * b.descriptorCount ) ); } - vk::DescriptorPoolCreateInfo descriptorPoolCreateInfo(vk::DescriptorPoolCreateFlagBits::eFreeDescriptorSet, vk::su::checked_cast(swapChainData.images.size()), - vk::su::checked_cast(descriptorPoolSizes.size()), descriptorPoolSizes.data()); - vk::UniqueDescriptorPool rayTracingDescriptorPool = device->createDescriptorPoolUnique(descriptorPoolCreateInfo); - vk::UniqueDescriptorSetLayout rayTracingDescriptorSetLayout = device->createDescriptorSetLayoutUnique(vk::DescriptorSetLayoutCreateInfo({}, static_cast(bindings.size()), - bindings.data())); + vk::DescriptorPoolCreateInfo descriptorPoolCreateInfo( + vk::DescriptorPoolCreateFlagBits::eFreeDescriptorSet, + vk::su::checked_cast( swapChainData.images.size() ), + vk::su::checked_cast( descriptorPoolSizes.size() ), + descriptorPoolSizes.data() ); + vk::UniqueDescriptorPool rayTracingDescriptorPool = device->createDescriptorPoolUnique( descriptorPoolCreateInfo ); + vk::UniqueDescriptorSetLayout rayTracingDescriptorSetLayout = device->createDescriptorSetLayoutUnique( + vk::DescriptorSetLayoutCreateInfo( {}, static_cast( bindings.size() ), bindings.data() ) ); std::vector layouts; - for (size_t i = 0; i < swapChainData.images.size(); i++) + for ( size_t i = 0; i < swapChainData.images.size(); i++ ) { - layouts.push_back(*rayTracingDescriptorSetLayout); + layouts.push_back( *rayTracingDescriptorSetLayout ); } - vk::DescriptorSetAllocateInfo descriptorSetAllocateInfo(*rayTracingDescriptorPool, vk::su::checked_cast(layouts.size()), layouts.data()); - std::vector rayTracingDescriptorSets = device->allocateDescriptorSetsUnique(descriptorSetAllocateInfo); + vk::DescriptorSetAllocateInfo descriptorSetAllocateInfo( + *rayTracingDescriptorPool, vk::su::checked_cast( layouts.size() ), layouts.data() ); + std::vector rayTracingDescriptorSets = + device->allocateDescriptorSetsUnique( descriptorSetAllocateInfo ); // Bind ray tracing specific descriptor sets into pNext of a vk::WriteDescriptorSet - vk::WriteDescriptorSetAccelerationStructureNV writeDescriptorSetAcceleration(1, &*topLevelAS.acclerationStructure); - std::vector accelerationDescriptionSets; - for (size_t i = 0; i < rayTracingDescriptorSets.size(); i++) + vk::WriteDescriptorSetAccelerationStructureNV writeDescriptorSetAcceleration( 1, + &*topLevelAS.acclerationStructure ); + std::vector accelerationDescriptionSets; + for ( size_t i = 0; i < rayTracingDescriptorSets.size(); i++ ) { - accelerationDescriptionSets.push_back(vk::WriteDescriptorSet(*rayTracingDescriptorSets[i], 0, 0, 1, bindings[0].descriptorType)); + accelerationDescriptionSets.push_back( + vk::WriteDescriptorSet( *rayTracingDescriptorSets[i], 0, 0, 1, bindings[0].descriptorType ) ); accelerationDescriptionSets.back().pNext = &writeDescriptorSetAcceleration; } - device->updateDescriptorSets(accelerationDescriptionSets, {}); + device->updateDescriptorSets( accelerationDescriptionSets, {} ); - // Bind all the other buffers and images, starting with dstBinding == 2 (dstBinding == 1 is used by the backBuffer view) - for (size_t i = 0; i < rayTracingDescriptorSets.size(); i++) + // Bind all the other buffers and images, starting with dstBinding == 2 (dstBinding == 1 is used by the backBuffer + // view) + for ( size_t i = 0; i < rayTracingDescriptorSets.size(); i++ ) { - vk::su::updateDescriptorSets(device, rayTracingDescriptorSets[i], - { - { bindings[2].descriptorType, uniformBufferData.buffer, vk::UniqueBufferView() }, - { bindings[3].descriptorType, vertexBufferData.buffer, vk::UniqueBufferView() }, - { bindings[4].descriptorType, indexBufferData.buffer, vk::UniqueBufferView() }, - { bindings[5].descriptorType, materialBufferData.buffer, vk::UniqueBufferView() } - }, textures, 2); + vk::su::updateDescriptorSets( + device, + rayTracingDescriptorSets[i], + { { bindings[2].descriptorType, uniformBufferData.buffer, vk::UniqueBufferView() }, + { bindings[3].descriptorType, vertexBufferData.buffer, vk::UniqueBufferView() }, + { bindings[4].descriptorType, indexBufferData.buffer, vk::UniqueBufferView() }, + { bindings[5].descriptorType, materialBufferData.buffer, vk::UniqueBufferView() } }, + textures, + 2 ); } // create the ray-tracing shader modules glslang::InitializeProcess(); - vk::UniqueShaderModule raygenShaderModule = vk::su::createShaderModule(device, vk::ShaderStageFlagBits::eRaygenNV, raygenShaderText); - vk::UniqueShaderModule missShaderModule = vk::su::createShaderModule(device, vk::ShaderStageFlagBits::eMissNV, missShaderText); - vk::UniqueShaderModule shadowMissShaderModule = vk::su::createShaderModule(device, vk::ShaderStageFlagBits::eMissNV, shadowMissShaderText); - vk::UniqueShaderModule closestHitShaderModule = vk::su::createShaderModule(device, vk::ShaderStageFlagBits::eClosestHitNV, closestHitShaderText); + vk::UniqueShaderModule raygenShaderModule = + vk::su::createShaderModule( device, vk::ShaderStageFlagBits::eRaygenNV, raygenShaderText ); + vk::UniqueShaderModule missShaderModule = + vk::su::createShaderModule( device, vk::ShaderStageFlagBits::eMissNV, missShaderText ); + vk::UniqueShaderModule shadowMissShaderModule = + vk::su::createShaderModule( device, vk::ShaderStageFlagBits::eMissNV, shadowMissShaderText ); + vk::UniqueShaderModule closestHitShaderModule = + vk::su::createShaderModule( device, vk::ShaderStageFlagBits::eClosestHitNV, closestHitShaderText ); glslang::FinalizeProcess(); // create the ray tracing pipeline - std::vector shaderStages; + std::vector shaderStages; std::vector shaderGroups; // We use only one ray generation, that will implement the camera model - shaderStages.push_back(vk::PipelineShaderStageCreateInfo({}, vk::ShaderStageFlagBits::eRaygenNV, *raygenShaderModule, "main")); - shaderGroups.push_back(vk::RayTracingShaderGroupCreateInfoNV(vk::RayTracingShaderGroupTypeNV::eGeneral, 0, VK_SHADER_UNUSED_NV, VK_SHADER_UNUSED_NV, VK_SHADER_UNUSED_NV)); + shaderStages.push_back( + vk::PipelineShaderStageCreateInfo( {}, vk::ShaderStageFlagBits::eRaygenNV, *raygenShaderModule, "main" ) ); + shaderGroups.push_back( vk::RayTracingShaderGroupCreateInfoNV( + vk::RayTracingShaderGroupTypeNV::eGeneral, 0, VK_SHADER_UNUSED_NV, VK_SHADER_UNUSED_NV, VK_SHADER_UNUSED_NV ) ); // The first miss shader is used to look-up the environment in case the rays from the camera miss the geometry - shaderStages.push_back(vk::PipelineShaderStageCreateInfo({}, vk::ShaderStageFlagBits::eMissNV, *missShaderModule, "main")); - shaderGroups.push_back(vk::RayTracingShaderGroupCreateInfoNV(vk::RayTracingShaderGroupTypeNV::eGeneral, 1, VK_SHADER_UNUSED_NV, VK_SHADER_UNUSED_NV, VK_SHADER_UNUSED_NV)); + shaderStages.push_back( + vk::PipelineShaderStageCreateInfo( {}, vk::ShaderStageFlagBits::eMissNV, *missShaderModule, "main" ) ); + shaderGroups.push_back( vk::RayTracingShaderGroupCreateInfoNV( + vk::RayTracingShaderGroupTypeNV::eGeneral, 1, VK_SHADER_UNUSED_NV, VK_SHADER_UNUSED_NV, VK_SHADER_UNUSED_NV ) ); - // The second miss shader is invoked when a shadow ray misses the geometry. It simply indicates that no occlusion has been found - shaderStages.push_back(vk::PipelineShaderStageCreateInfo({}, vk::ShaderStageFlagBits::eMissNV, *shadowMissShaderModule, "main")); - shaderGroups.push_back(vk::RayTracingShaderGroupCreateInfoNV(vk::RayTracingShaderGroupTypeNV::eGeneral, 2, VK_SHADER_UNUSED_NV, VK_SHADER_UNUSED_NV, VK_SHADER_UNUSED_NV)); + // The second miss shader is invoked when a shadow ray misses the geometry. It simply indicates that no occlusion + // has been found + shaderStages.push_back( + vk::PipelineShaderStageCreateInfo( {}, vk::ShaderStageFlagBits::eMissNV, *shadowMissShaderModule, "main" ) ); + shaderGroups.push_back( vk::RayTracingShaderGroupCreateInfoNV( + vk::RayTracingShaderGroupTypeNV::eGeneral, 2, VK_SHADER_UNUSED_NV, VK_SHADER_UNUSED_NV, VK_SHADER_UNUSED_NV ) ); - // The first hit group defines the shaders invoked when a ray shot from the camera hit the geometry. In this case we only specify the closest hit - // shader, and rely on the build-in triangle intersection and pass-through any-hit shader. However, explicit intersection and any hit shaders could be added as well. - shaderStages.push_back(vk::PipelineShaderStageCreateInfo({}, vk::ShaderStageFlagBits::eClosestHitNV, *closestHitShaderModule, "main")); - shaderGroups.push_back(vk::RayTracingShaderGroupCreateInfoNV(vk::RayTracingShaderGroupTypeNV::eTrianglesHitGroup, VK_SHADER_UNUSED_NV, 3, VK_SHADER_UNUSED_NV, VK_SHADER_UNUSED_NV)); + // The first hit group defines the shaders invoked when a ray shot from the camera hit the geometry. In this case we + // only specify the closest hit shader, and rely on the build-in triangle intersection and pass-through any-hit + // shader. However, explicit intersection and any hit shaders could be added as well. + shaderStages.push_back( vk::PipelineShaderStageCreateInfo( + {}, vk::ShaderStageFlagBits::eClosestHitNV, *closestHitShaderModule, "main" ) ); + shaderGroups.push_back( vk::RayTracingShaderGroupCreateInfoNV( vk::RayTracingShaderGroupTypeNV::eTrianglesHitGroup, + VK_SHADER_UNUSED_NV, + 3, + VK_SHADER_UNUSED_NV, + VK_SHADER_UNUSED_NV ) ); - // The second hit group defines the shaders invoked when a shadow ray hits the geometry. For simple shadows we do not need any shader in that group: we will rely on - // initializing the payload and update it only in the miss shader - shaderGroups.push_back(vk::RayTracingShaderGroupCreateInfoNV(vk::RayTracingShaderGroupTypeNV::eTrianglesHitGroup, VK_SHADER_UNUSED_NV, VK_SHADER_UNUSED_NV, VK_SHADER_UNUSED_NV, VK_SHADER_UNUSED_NV)); + // The second hit group defines the shaders invoked when a shadow ray hits the geometry. For simple shadows we do + // not need any shader in that group: we will rely on initializing the payload and update it only in the miss shader + shaderGroups.push_back( vk::RayTracingShaderGroupCreateInfoNV( vk::RayTracingShaderGroupTypeNV::eTrianglesHitGroup, + VK_SHADER_UNUSED_NV, + VK_SHADER_UNUSED_NV, + VK_SHADER_UNUSED_NV, + VK_SHADER_UNUSED_NV ) ); // Create the layout of the pipeline following the provided descriptor set layout - vk::UniquePipelineLayout rayTracingPipelineLayout = device->createPipelineLayoutUnique(vk::PipelineLayoutCreateInfo({}, 1, &*rayTracingDescriptorSetLayout)); + vk::UniquePipelineLayout rayTracingPipelineLayout = + device->createPipelineLayoutUnique( vk::PipelineLayoutCreateInfo( {}, 1, &*rayTracingDescriptorSetLayout ) ); // Assemble the shader stages and recursion depth info into the raytracing pipeline // The ray tracing process can shoot rays from the camera, and a shadow ray can be shot from the // hit points of the camera rays, hence a recursion level of 2. This number should be kept as low // as possible for performance reasons. Even recursive ray tracing should be flattened into a loop // in the ray generation to avoid deep recursion. - uint32_t maxRecursionDepth = 2; - vk::RayTracingPipelineCreateInfoNV rayTracingPipelineCreateInfo({}, static_cast(shaderStages.size()), shaderStages.data(), static_cast(shaderGroups.size()), - shaderGroups.data(), maxRecursionDepth, *rayTracingPipelineLayout); - vk::UniquePipeline rayTracingPipeline = device->createRayTracingPipelineNVUnique(nullptr, rayTracingPipelineCreateInfo); + uint32_t maxRecursionDepth = 2; + vk::RayTracingPipelineCreateInfoNV rayTracingPipelineCreateInfo( {}, + static_cast( shaderStages.size() ), + shaderStages.data(), + static_cast( shaderGroups.size() ), + shaderGroups.data(), + maxRecursionDepth, + *rayTracingPipelineLayout ); + vk::UniquePipeline rayTracingPipeline = + device->createRayTracingPipelineNVUnique( nullptr, rayTracingPipelineCreateInfo ); - uint32_t shaderGroupHandleSize = physicalDevice.getProperties2().get().shaderGroupHandleSize; - assert(!(shaderGroupHandleSize % 16)); - uint32_t shaderBindingTableSize = 5 * shaderGroupHandleSize; // 1x raygen, 2x miss, 2x hitGroup + uint32_t shaderGroupHandleSize = + physicalDevice.getProperties2() + .get() + .shaderGroupHandleSize; + assert( !( shaderGroupHandleSize % 16 ) ); + uint32_t shaderBindingTableSize = 5 * shaderGroupHandleSize; // 1x raygen, 2x miss, 2x hitGroup // with 5 shaders, we need a buffer to hold 5 shaderGroupHandles - std::vector shaderHandleStorage(shaderBindingTableSize); - device->getRayTracingShaderGroupHandlesNV(*rayTracingPipeline, 0, 5, shaderHandleStorage); + std::vector shaderHandleStorage( shaderBindingTableSize ); + device->getRayTracingShaderGroupHandlesNV( *rayTracingPipeline, 0, 5, shaderHandleStorage ); - vk::su::BufferData shaderBindingTableBufferData(physicalDevice, device, shaderBindingTableSize, vk::BufferUsageFlagBits::eTransferDst, vk::MemoryPropertyFlagBits::eHostVisible); - shaderBindingTableBufferData.upload(device, shaderHandleStorage); + vk::su::BufferData shaderBindingTableBufferData( physicalDevice, + device, + shaderBindingTableSize, + vk::BufferUsageFlagBits::eTransferDst, + vk::MemoryPropertyFlagBits::eHostVisible ); + shaderBindingTableBufferData.upload( device, shaderHandleStorage ); std::array clearValues; - clearValues[0].color = vk::ClearColorValue(std::array({0.2f, 0.2f, 0.2f, 0.2f})); - clearValues[1].depthStencil = vk::ClearDepthStencilValue(1.0f, 0); + clearValues[0].color = vk::ClearColorValue( std::array( { 0.2f, 0.2f, 0.2f, 0.2f } ) ); + clearValues[1].depthStencil = vk::ClearDepthStencilValue( 1.0f, 0 ); // Main loop - uint32_t frameIndex = 0; + uint32_t frameIndex = 0; UniformBufferObject uniformBufferObject; - uniformBufferObject.model = glm::mat4(1); - uniformBufferObject.modelIT = glm::inverseTranspose(uniformBufferObject.model); + uniformBufferObject.model = glm::mat4( 1 ); + uniformBufferObject.modelIT = glm::inverseTranspose( uniformBufferObject.model ); - double accumulatedTime{0.0}; - size_t frameCount{0}; - while (!glfwWindowShouldClose(window)) + double accumulatedTime{ 0.0 }; + size_t frameCount{ 0 }; + while ( !glfwWindowShouldClose( window ) ) { double startTime = glfwGetTime(); glfwPollEvents(); - vk::UniqueCommandBuffer const& commandBuffer = perFrameData[frameIndex].commandBuffer; + vk::UniqueCommandBuffer const & commandBuffer = perFrameData[frameIndex].commandBuffer; int w, h; - glfwGetWindowSize(window, &w, &h); - if ((w != static_cast(windowExtent.width)) || (h != static_cast(windowExtent.height))) + glfwGetWindowSize( window, &w, &h ); + if ( ( w != static_cast( windowExtent.width ) ) || ( h != static_cast( windowExtent.height ) ) ) { - windowExtent.width = w; + windowExtent.width = w; windowExtent.height = h; device->waitIdle(); - swapChainData = vk::su::SwapChainData(physicalDevice, device, surface, windowExtent, vk::ImageUsageFlagBits::eColorAttachment | vk::ImageUsageFlagBits::eStorage, swapChainData.swapChain, - graphicsAndPresentQueueFamilyIndex.first, graphicsAndPresentQueueFamilyIndex.second); - depthBufferData = vk::su::DepthBufferData(physicalDevice, device, vk::su::pickDepthFormat(physicalDevice), windowExtent); + swapChainData = + vk::su::SwapChainData( physicalDevice, + device, + surface, + windowExtent, + vk::ImageUsageFlagBits::eColorAttachment | vk::ImageUsageFlagBits::eStorage, + swapChainData.swapChain, + graphicsAndPresentQueueFamilyIndex.first, + graphicsAndPresentQueueFamilyIndex.second ); + depthBufferData = + vk::su::DepthBufferData( physicalDevice, device, vk::su::pickDepthFormat( physicalDevice ), windowExtent ); - vk::su::oneTimeSubmit(commandBuffer, graphicsQueue, - [&](vk::UniqueCommandBuffer const& commandBuffer) - { - vk::su::setImageLayout(commandBuffer, *depthBufferData.image, depthFormat, vk::ImageLayout::eUndefined, vk::ImageLayout::eDepthStencilAttachmentOptimal); - }); + vk::su::oneTimeSubmit( commandBuffer, graphicsQueue, [&]( vk::UniqueCommandBuffer const & commandBuffer ) { + vk::su::setImageLayout( commandBuffer, + *depthBufferData.image, + depthFormat, + vk::ImageLayout::eUndefined, + vk::ImageLayout::eDepthStencilAttachmentOptimal ); + } ); - framebuffers = vk::su::createFramebuffers(device, renderPass, swapChainData.imageViews, depthBufferData.imageView, windowExtent); + framebuffers = vk::su::createFramebuffers( + device, renderPass, swapChainData.imageViews, depthBufferData.imageView, windowExtent ); } // update the uniformBufferObject - assert(0 < windowExtent.height); + assert( 0 < windowExtent.height ); uniformBufferObject.view = appInfo.cameraManipulator.getMatrix(); - uniformBufferObject.proj = glm::perspective(glm::radians(65.0f), windowExtent.width / static_cast(windowExtent.height), 0.1f, 1000.0f); + uniformBufferObject.proj = glm::perspective( + glm::radians( 65.0f ), windowExtent.width / static_cast( windowExtent.height ), 0.1f, 1000.0f ); uniformBufferObject.proj[1][1] *= -1; // Inverting Y for Vulkan - uniformBufferObject.viewInverse = glm::inverse(uniformBufferObject.view); - uniformBufferObject.projInverse = glm::inverse(uniformBufferObject.proj); - uniformBufferData.upload(device, uniformBufferObject); + uniformBufferObject.viewInverse = glm::inverse( uniformBufferObject.view ); + uniformBufferObject.projInverse = glm::inverse( uniformBufferObject.proj ); + uniformBufferData.upload( device, uniformBufferObject ); // frame begin - vk::ResultValue rv = device->acquireNextImageKHR(*swapChainData.swapChain, UINT64_MAX, *perFrameData[frameIndex].presentCompleteSemaphore, nullptr); - assert(rv.result == vk::Result::eSuccess); + vk::ResultValue rv = device->acquireNextImageKHR( + *swapChainData.swapChain, UINT64_MAX, *perFrameData[frameIndex].presentCompleteSemaphore, nullptr ); + assert( rv.result == vk::Result::eSuccess ); uint32_t backBufferIndex = rv.value; - while (vk::Result::eTimeout == device->waitForFences(*perFrameData[frameIndex].fence, VK_TRUE, vk::su::FenceTimeout)) + while ( vk::Result::eTimeout == + device->waitForFences( *perFrameData[frameIndex].fence, VK_TRUE, vk::su::FenceTimeout ) ) ; - device->resetFences(*perFrameData[frameIndex].fence); + device->resetFences( *perFrameData[frameIndex].fence ); - commandBuffer->begin(vk::CommandBufferBeginInfo(vk::CommandBufferUsageFlagBits::eOneTimeSubmit)); + commandBuffer->begin( vk::CommandBufferBeginInfo( vk::CommandBufferUsageFlagBits::eOneTimeSubmit ) ); - if (appInfo.useRasterRender) + if ( appInfo.useRasterRender ) { - commandBuffer->beginRenderPass(vk::RenderPassBeginInfo(*renderPass, *framebuffers[backBufferIndex], vk::Rect2D(vk::Offset2D(0, 0), windowExtent), - static_cast(clearValues.size()), clearValues.data()), vk::SubpassContents::eInline); + commandBuffer->beginRenderPass( vk::RenderPassBeginInfo( *renderPass, + *framebuffers[backBufferIndex], + vk::Rect2D( vk::Offset2D( 0, 0 ), windowExtent ), + static_cast( clearValues.size() ), + clearValues.data() ), + vk::SubpassContents::eInline ); - commandBuffer->bindPipeline(vk::PipelineBindPoint::eGraphics, *graphicsPipeline); - commandBuffer->bindDescriptorSets(vk::PipelineBindPoint::eGraphics, *pipelineLayout, 0, *descriptorSet, nullptr); + commandBuffer->bindPipeline( vk::PipelineBindPoint::eGraphics, *graphicsPipeline ); + commandBuffer->bindDescriptorSets( + vk::PipelineBindPoint::eGraphics, *pipelineLayout, 0, *descriptorSet, nullptr ); - commandBuffer->setViewport(0, vk::Viewport(0.0f, 0.0f, static_cast(windowExtent.width), static_cast(windowExtent.height), 0.0f, 1.0f)); - commandBuffer->setScissor(0, vk::Rect2D(vk::Offset2D(0, 0), windowExtent)); + commandBuffer->setViewport( 0, + vk::Viewport( 0.0f, + 0.0f, + static_cast( windowExtent.width ), + static_cast( windowExtent.height ), + 0.0f, + 1.0f ) ); + commandBuffer->setScissor( 0, vk::Rect2D( vk::Offset2D( 0, 0 ), windowExtent ) ); - commandBuffer->bindVertexBuffers(0, *vertexBufferData.buffer, {0}); - commandBuffer->bindIndexBuffer(*indexBufferData.buffer, 0, vk::IndexType::eUint32); - commandBuffer->drawIndexed(vk::su::checked_cast(indices.size()), 1, 0, 0, 0); + commandBuffer->bindVertexBuffers( 0, *vertexBufferData.buffer, { 0 } ); + commandBuffer->bindIndexBuffer( *indexBufferData.buffer, 0, vk::IndexType::eUint32 ); + commandBuffer->drawIndexed( vk::su::checked_cast( indices.size() ), 1, 0, 0, 0 ); commandBuffer->endRenderPass(); } else { - vk::DescriptorImageInfo imageInfo(nullptr, *swapChainData.imageViews[backBufferIndex], vk::ImageLayout::eGeneral); - device->updateDescriptorSets(vk::WriteDescriptorSet(*rayTracingDescriptorSets[backBufferIndex], 1, 0, 1, bindings[1].descriptorType, &imageInfo), {}); + vk::DescriptorImageInfo imageInfo( + nullptr, *swapChainData.imageViews[backBufferIndex], vk::ImageLayout::eGeneral ); + device->updateDescriptorSets( + vk::WriteDescriptorSet( + *rayTracingDescriptorSets[backBufferIndex], 1, 0, 1, bindings[1].descriptorType, &imageInfo ), + {} ); - vk::su::setImageLayout(commandBuffer, swapChainData.images[backBufferIndex], surfaceFormat.format, vk::ImageLayout::eUndefined, vk::ImageLayout::eGeneral); + vk::su::setImageLayout( commandBuffer, + swapChainData.images[backBufferIndex], + surfaceFormat.format, + vk::ImageLayout::eUndefined, + vk::ImageLayout::eGeneral ); - commandBuffer->bindPipeline(vk::PipelineBindPoint::eRayTracingNV, *rayTracingPipeline); + commandBuffer->bindPipeline( vk::PipelineBindPoint::eRayTracingNV, *rayTracingPipeline ); - commandBuffer->bindDescriptorSets(vk::PipelineBindPoint::eRayTracingNV, *rayTracingPipelineLayout, 0, *rayTracingDescriptorSets[backBufferIndex], nullptr); + commandBuffer->bindDescriptorSets( vk::PipelineBindPoint::eRayTracingNV, + *rayTracingPipelineLayout, + 0, + *rayTracingDescriptorSets[backBufferIndex], + nullptr ); - VkDeviceSize rayGenOffset = 0; // starting with raygen - VkDeviceSize missOffset = shaderGroupHandleSize; // after raygen - VkDeviceSize missStride = shaderGroupHandleSize; - VkDeviceSize hitGroupOffset = shaderGroupHandleSize + 2 * shaderGroupHandleSize; // after 1x raygen and 2x miss + VkDeviceSize rayGenOffset = 0; // starting with raygen + VkDeviceSize missOffset = shaderGroupHandleSize; // after raygen + VkDeviceSize missStride = shaderGroupHandleSize; + VkDeviceSize hitGroupOffset = shaderGroupHandleSize + 2 * shaderGroupHandleSize; // after 1x raygen and 2x miss VkDeviceSize hitGroupStride = shaderGroupHandleSize; - commandBuffer->traceRaysNV(*shaderBindingTableBufferData.buffer, rayGenOffset, *shaderBindingTableBufferData.buffer, missOffset, missStride, *shaderBindingTableBufferData.buffer, - hitGroupOffset, hitGroupStride, nullptr, 0, 0, windowExtent.width, windowExtent.height, 1); + commandBuffer->traceRaysNV( *shaderBindingTableBufferData.buffer, + rayGenOffset, + *shaderBindingTableBufferData.buffer, + missOffset, + missStride, + *shaderBindingTableBufferData.buffer, + hitGroupOffset, + hitGroupStride, + nullptr, + 0, + 0, + windowExtent.width, + windowExtent.height, + 1 ); - vk::su::setImageLayout(commandBuffer, swapChainData.images[backBufferIndex], surfaceFormat.format, vk::ImageLayout::eGeneral, vk::ImageLayout::ePresentSrcKHR); + vk::su::setImageLayout( commandBuffer, + swapChainData.images[backBufferIndex], + surfaceFormat.format, + vk::ImageLayout::eGeneral, + vk::ImageLayout::ePresentSrcKHR ); } // frame end commandBuffer->end(); const vk::PipelineStageFlags waitDstStageMask = vk::PipelineStageFlagBits::eColorAttachmentOutput; - graphicsQueue.submit(vk::SubmitInfo(1, &(*perFrameData[frameIndex].presentCompleteSemaphore), &waitDstStageMask, 1, &(*commandBuffer), 1, - &(*perFrameData[frameIndex].renderCompleteSemaphore)), *perFrameData[frameIndex].fence); - presentQueue.presentKHR(vk::PresentInfoKHR(1, &(*perFrameData[frameIndex].renderCompleteSemaphore), 1, &(*swapChainData.swapChain), &backBufferIndex)); - frameIndex = (frameIndex + 1) % IMGUI_VK_QUEUED_FRAMES; + graphicsQueue.submit( vk::SubmitInfo( 1, + &( *perFrameData[frameIndex].presentCompleteSemaphore ), + &waitDstStageMask, + 1, + &( *commandBuffer ), + 1, + &( *perFrameData[frameIndex].renderCompleteSemaphore ) ), + *perFrameData[frameIndex].fence ); + presentQueue.presentKHR( vk::PresentInfoKHR( 1, + &( *perFrameData[frameIndex].renderCompleteSemaphore ), + 1, + &( *swapChainData.swapChain ), + &backBufferIndex ) ); + frameIndex = ( frameIndex + 1 ) % IMGUI_VK_QUEUED_FRAMES; double endTime = glfwGetTime(); accumulatedTime += endTime - startTime; ++frameCount; - if (1.0 < accumulatedTime) + if ( 1.0 < accumulatedTime ) { - assert(0 < frameCount); + assert( 0 < frameCount ); std::ostringstream oss; - oss << AppName << ": " << vertices.size() << " Vertices " << (appInfo.useRasterRender ? "Rastering" : "RayTracing") << " ( " << frameCount / accumulatedTime << " fps)"; - glfwSetWindowTitle(window, oss.str().c_str()); + oss << AppName << ": " << vertices.size() << " Vertices " + << ( appInfo.useRasterRender ? "Rastering" : "RayTracing" ) << " ( " << frameCount / accumulatedTime + << " fps)"; + glfwSetWindowTitle( window, oss.str().c_str() ); accumulatedTime = 0.0; - frameCount = 0; + frameCount = 0; } } // Cleanup device->waitIdle(); - swapChainData.swapChain.reset(); // need to reset swapChain before destroying the surface ! - VULKAN_HPP_DEFAULT_DISPATCHER.vkDestroySurfaceKHR(VkInstance(*instance), VkSurfaceKHR(surface), nullptr); - glfwDestroyWindow(window); + swapChainData.swapChain.reset(); // need to reset swapChain before destroying the surface ! + VULKAN_HPP_DEFAULT_DISPATCHER.vkDestroySurfaceKHR( VkInstance( *instance ), VkSurfaceKHR( surface ), nullptr ); + glfwDestroyWindow( window ); glfwTerminate(); } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (std::runtime_error& err) + catch ( std::runtime_error & err ) { std::cout << "std::runtime_error: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/SecondaryCommandBuffer/SecondaryCommandBuffer.cpp b/samples/SecondaryCommandBuffer/SecondaryCommandBuffer.cpp index a54edaf..a5c75f9 100644 --- a/samples/SecondaryCommandBuffer/SecondaryCommandBuffer.cpp +++ b/samples/SecondaryCommandBuffer/SecondaryCommandBuffer.cpp @@ -19,167 +19,241 @@ #include "../utils/math.hpp" #include "../utils/shaders.hpp" #include "../utils/utils.hpp" -#include "vulkan/vulkan.hpp" #include "SPIRV/GlslangToSpv.h" +#include "vulkan/vulkan.hpp" + #include #include -static char const* AppName = "SecondaryCommandBuffer"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "SecondaryCommandBuffer"; +static char const * EngineName = "Vulkan.hpp"; -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { - vk::UniqueInstance instance = vk::su::createInstance(AppName, EngineName, {}, vk::su::getInstanceExtensions()); -#if !defined(NDEBUG) - vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger(instance); + vk::UniqueInstance instance = vk::su::createInstance( AppName, EngineName, {}, vk::su::getInstanceExtensions() ); +#if !defined( NDEBUG ) + vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger( instance ); #endif vk::PhysicalDevice physicalDevice = instance->enumeratePhysicalDevices().front(); - vk::su::SurfaceData surfaceData(instance, AppName, vk::Extent2D(500, 500)); + vk::su::SurfaceData surfaceData( instance, AppName, vk::Extent2D( 500, 500 ) ); - std::pair graphicsAndPresentQueueFamilyIndex = vk::su::findGraphicsAndPresentQueueFamilyIndex(physicalDevice, *surfaceData.surface); - vk::UniqueDevice device = vk::su::createDevice(physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions()); + std::pair graphicsAndPresentQueueFamilyIndex = + vk::su::findGraphicsAndPresentQueueFamilyIndex( physicalDevice, *surfaceData.surface ); + vk::UniqueDevice device = + vk::su::createDevice( physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions() ); - vk::UniqueCommandPool commandPool = vk::su::createCommandPool(device, graphicsAndPresentQueueFamilyIndex.first); - vk::UniqueCommandBuffer commandBuffer = std::move(device->allocateCommandBuffersUnique(vk::CommandBufferAllocateInfo(commandPool.get(), vk::CommandBufferLevel::ePrimary, 1)).front()); + vk::UniqueCommandPool commandPool = vk::su::createCommandPool( device, graphicsAndPresentQueueFamilyIndex.first ); + vk::UniqueCommandBuffer commandBuffer = std::move( device + ->allocateCommandBuffersUnique( vk::CommandBufferAllocateInfo( + commandPool.get(), vk::CommandBufferLevel::ePrimary, 1 ) ) + .front() ); - vk::Queue graphicsQueue = device->getQueue(graphicsAndPresentQueueFamilyIndex.first, 0); - vk::Queue presentQueue = device->getQueue(graphicsAndPresentQueueFamilyIndex.second, 0); + vk::Queue graphicsQueue = device->getQueue( graphicsAndPresentQueueFamilyIndex.first, 0 ); + vk::Queue presentQueue = device->getQueue( graphicsAndPresentQueueFamilyIndex.second, 0 ); - vk::su::SwapChainData swapChainData(physicalDevice, device, *surfaceData.surface, surfaceData.extent, vk::ImageUsageFlagBits::eColorAttachment | vk::ImageUsageFlagBits::eTransferSrc, - vk::UniqueSwapchainKHR(), graphicsAndPresentQueueFamilyIndex.first, graphicsAndPresentQueueFamilyIndex.second); + vk::su::SwapChainData swapChainData( physicalDevice, + device, + *surfaceData.surface, + surfaceData.extent, + vk::ImageUsageFlagBits::eColorAttachment | + vk::ImageUsageFlagBits::eTransferSrc, + vk::UniqueSwapchainKHR(), + graphicsAndPresentQueueFamilyIndex.first, + graphicsAndPresentQueueFamilyIndex.second ); - vk::su::DepthBufferData depthBufferData(physicalDevice, device, vk::Format::eD16Unorm, surfaceData.extent); + vk::su::DepthBufferData depthBufferData( physicalDevice, device, vk::Format::eD16Unorm, surfaceData.extent ); - vk::su::BufferData uniformBufferData(physicalDevice, device, sizeof(glm::mat4x4), vk::BufferUsageFlagBits::eUniformBuffer); - vk::su::copyToDevice(device, uniformBufferData.deviceMemory, vk::su::createModelViewProjectionClipMatrix(surfaceData.extent)); + vk::su::BufferData uniformBufferData( + physicalDevice, device, sizeof( glm::mat4x4 ), vk::BufferUsageFlagBits::eUniformBuffer ); + vk::su::copyToDevice( + device, uniformBufferData.deviceMemory, vk::su::createModelViewProjectionClipMatrix( surfaceData.extent ) ); - vk::UniqueDescriptorSetLayout descriptorSetLayout = vk::su::createDescriptorSetLayout(device, - { {vk::DescriptorType::eUniformBuffer, 1, vk::ShaderStageFlagBits::eVertex}, {vk::DescriptorType::eCombinedImageSampler, 1, vk::ShaderStageFlagBits::eFragment} }); - vk::UniquePipelineLayout pipelineLayout = device->createPipelineLayoutUnique(vk::PipelineLayoutCreateInfo(vk::PipelineLayoutCreateFlags(), 1, &descriptorSetLayout.get())); + vk::UniqueDescriptorSetLayout descriptorSetLayout = vk::su::createDescriptorSetLayout( + device, + { { vk::DescriptorType::eUniformBuffer, 1, vk::ShaderStageFlagBits::eVertex }, + { vk::DescriptorType::eCombinedImageSampler, 1, vk::ShaderStageFlagBits::eFragment } } ); + vk::UniquePipelineLayout pipelineLayout = device->createPipelineLayoutUnique( + vk::PipelineLayoutCreateInfo( vk::PipelineLayoutCreateFlags(), 1, &descriptorSetLayout.get() ) ); - vk::UniqueRenderPass renderPass = vk::su::createRenderPass(device, vk::su::pickSurfaceFormat(physicalDevice.getSurfaceFormatsKHR(surfaceData.surface.get())).format, depthBufferData.format, - vk::AttachmentLoadOp::eClear, vk::ImageLayout::eColorAttachmentOptimal); + vk::UniqueRenderPass renderPass = vk::su::createRenderPass( + device, + vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( surfaceData.surface.get() ) ).format, + depthBufferData.format, + vk::AttachmentLoadOp::eClear, + vk::ImageLayout::eColorAttachmentOptimal ); glslang::InitializeProcess(); - vk::UniqueShaderModule vertexShaderModule = vk::su::createShaderModule(device, vk::ShaderStageFlagBits::eVertex, vertexShaderText_PT_T); - vk::UniqueShaderModule fragmentShaderModule = vk::su::createShaderModule(device, vk::ShaderStageFlagBits::eFragment, fragmentShaderText_T_C); + vk::UniqueShaderModule vertexShaderModule = + vk::su::createShaderModule( device, vk::ShaderStageFlagBits::eVertex, vertexShaderText_PT_T ); + vk::UniqueShaderModule fragmentShaderModule = + vk::su::createShaderModule( device, vk::ShaderStageFlagBits::eFragment, fragmentShaderText_T_C ); glslang::FinalizeProcess(); - std::vector framebuffers = vk::su::createFramebuffers(device, renderPass, swapChainData.imageViews, depthBufferData.imageView, surfaceData.extent); + std::vector framebuffers = vk::su::createFramebuffers( + device, renderPass, swapChainData.imageViews, depthBufferData.imageView, surfaceData.extent ); - vk::su::BufferData vertexBufferData(physicalDevice, device, sizeof(texturedCubeData), vk::BufferUsageFlagBits::eVertexBuffer); - vk::su::copyToDevice(device, vertexBufferData.deviceMemory, texturedCubeData, sizeof(texturedCubeData) / sizeof(texturedCubeData[0])); + vk::su::BufferData vertexBufferData( + physicalDevice, device, sizeof( texturedCubeData ), vk::BufferUsageFlagBits::eVertexBuffer ); + vk::su::copyToDevice( device, + vertexBufferData.deviceMemory, + texturedCubeData, + sizeof( texturedCubeData ) / sizeof( texturedCubeData[0] ) ); - vk::UniquePipelineCache pipelineCache = device->createPipelineCacheUnique(vk::PipelineCacheCreateInfo()); + vk::UniquePipelineCache pipelineCache = device->createPipelineCacheUnique( vk::PipelineCacheCreateInfo() ); - vk::UniquePipeline graphicsPipeline = vk::su::createGraphicsPipeline(device, pipelineCache, std::make_pair(*vertexShaderModule, nullptr), std::make_pair(*fragmentShaderModule, nullptr), - sizeof(texturedCubeData[0]), { { vk::Format::eR32G32B32A32Sfloat, 0 }, { vk::Format::eR32G32Sfloat, 16 } }, - vk::FrontFace::eClockwise, true, pipelineLayout, renderPass); + vk::UniquePipeline graphicsPipeline = + vk::su::createGraphicsPipeline( device, + pipelineCache, + std::make_pair( *vertexShaderModule, nullptr ), + std::make_pair( *fragmentShaderModule, nullptr ), + sizeof( texturedCubeData[0] ), + { { vk::Format::eR32G32B32A32Sfloat, 0 }, { vk::Format::eR32G32Sfloat, 16 } }, + vk::FrontFace::eClockwise, + true, + pipelineLayout, + renderPass ); - commandBuffer->begin(vk::CommandBufferBeginInfo()); + commandBuffer->begin( vk::CommandBufferBeginInfo() ); - vk::su::TextureData greenTextureData(physicalDevice, device); - greenTextureData.setImage(device, commandBuffer, vk::su::MonochromeImageGenerator({ 118, 185, 0 })); + vk::su::TextureData greenTextureData( physicalDevice, device ); + greenTextureData.setImage( device, commandBuffer, vk::su::MonochromeImageGenerator( { 118, 185, 0 } ) ); - vk::su::TextureData checkeredTextureData(physicalDevice, device); - checkeredTextureData.setImage(device, commandBuffer, vk::su::CheckerboardImageGenerator()); + vk::su::TextureData checkeredTextureData( physicalDevice, device ); + checkeredTextureData.setImage( device, commandBuffer, vk::su::CheckerboardImageGenerator() ); // create two identical descriptor sets, each with a different texture but identical UBOs - vk::UniqueDescriptorPool descriptorPool = vk::su::createDescriptorPool(device, { {vk::DescriptorType::eUniformBuffer, 2}, {vk::DescriptorType::eCombinedImageSampler, 2} }); + vk::UniqueDescriptorPool descriptorPool = vk::su::createDescriptorPool( + device, { { vk::DescriptorType::eUniformBuffer, 2 }, { vk::DescriptorType::eCombinedImageSampler, 2 } } ); - vk::DescriptorSetLayout layouts[] = { descriptorSetLayout.get(), descriptorSetLayout.get() }; - std::vector descriptorSets = device->allocateDescriptorSetsUnique(vk::DescriptorSetAllocateInfo(descriptorPool.get(), 2, layouts)); - assert(descriptorSets.size() == 2); + vk::DescriptorSetLayout layouts[] = { descriptorSetLayout.get(), descriptorSetLayout.get() }; + std::vector descriptorSets = + device->allocateDescriptorSetsUnique( vk::DescriptorSetAllocateInfo( descriptorPool.get(), 2, layouts ) ); + assert( descriptorSets.size() == 2 ); - vk::su::updateDescriptorSets(device, descriptorSets[0], {{vk::DescriptorType::eUniformBuffer, uniformBufferData.buffer, vk::UniqueBufferView()}}, greenTextureData); - vk::su::updateDescriptorSets(device, descriptorSets[1], {{vk::DescriptorType::eUniformBuffer, uniformBufferData.buffer, vk::UniqueBufferView()}}, checkeredTextureData); + vk::su::updateDescriptorSets( + device, + descriptorSets[0], + { { vk::DescriptorType::eUniformBuffer, uniformBufferData.buffer, vk::UniqueBufferView() } }, + greenTextureData ); + vk::su::updateDescriptorSets( + device, + descriptorSets[1], + { { vk::DescriptorType::eUniformBuffer, uniformBufferData.buffer, vk::UniqueBufferView() } }, + checkeredTextureData ); /* VULKAN_KEY_START */ // create four secondary command buffers, for each quadrant of the screen - std::vector secondaryCommandBuffers = device->allocateCommandBuffersUnique(vk::CommandBufferAllocateInfo(commandPool.get(), vk::CommandBufferLevel::eSecondary, 4)); + std::vector secondaryCommandBuffers = device->allocateCommandBuffersUnique( + vk::CommandBufferAllocateInfo( commandPool.get(), vk::CommandBufferLevel::eSecondary, 4 ) ); // Get the index of the next available swapchain image: - vk::UniqueSemaphore imageAcquiredSemaphore = device->createSemaphoreUnique(vk::SemaphoreCreateInfo()); - vk::ResultValue currentBuffer = device->acquireNextImageKHR(swapChainData.swapChain.get(), vk::su::FenceTimeout, imageAcquiredSemaphore.get(), nullptr); - assert(currentBuffer.result == vk::Result::eSuccess); - assert(currentBuffer.value < framebuffers.size()); + vk::UniqueSemaphore imageAcquiredSemaphore = device->createSemaphoreUnique( vk::SemaphoreCreateInfo() ); + vk::ResultValue currentBuffer = device->acquireNextImageKHR( + swapChainData.swapChain.get(), vk::su::FenceTimeout, imageAcquiredSemaphore.get(), nullptr ); + assert( currentBuffer.result == vk::Result::eSuccess ); + assert( currentBuffer.value < framebuffers.size() ); - vk::su::setImageLayout(commandBuffer, swapChainData.images[currentBuffer.value], swapChainData.colorFormat, vk::ImageLayout::eUndefined, vk::ImageLayout::eColorAttachmentOptimal); + vk::su::setImageLayout( commandBuffer, + swapChainData.images[currentBuffer.value], + swapChainData.colorFormat, + vk::ImageLayout::eUndefined, + vk::ImageLayout::eColorAttachmentOptimal ); const vk::DeviceSize offset = 0; - vk::Viewport viewport(0.0f, 0.0f, 200.0f, 200.0f, 0.0f, 1.0f); - vk::Rect2D scissor(vk::Offset2D(0, 0), vk::Extent2D(surfaceData.extent)); + vk::Viewport viewport( 0.0f, 0.0f, 200.0f, 200.0f, 0.0f, 1.0f ); + vk::Rect2D scissor( vk::Offset2D( 0, 0 ), vk::Extent2D( surfaceData.extent ) ); // now we record four separate command buffers, one for each quadrant of the screen - vk::CommandBufferInheritanceInfo commandBufferInheritanceInfo(renderPass.get(), 0, framebuffers[currentBuffer.value].get()); - vk::CommandBufferBeginInfo secondaryBeginInfo(vk::CommandBufferUsageFlagBits::eOneTimeSubmit | vk::CommandBufferUsageFlagBits::eRenderPassContinue, &commandBufferInheritanceInfo); + vk::CommandBufferInheritanceInfo commandBufferInheritanceInfo( + renderPass.get(), 0, framebuffers[currentBuffer.value].get() ); + vk::CommandBufferBeginInfo secondaryBeginInfo( vk::CommandBufferUsageFlagBits::eOneTimeSubmit | + vk::CommandBufferUsageFlagBits::eRenderPassContinue, + &commandBufferInheritanceInfo ); - for (int i = 0; i < 4; i++) + for ( int i = 0; i < 4; i++ ) { - viewport.x = 25.0f + 250.0f * (i % 2); - viewport.y = 25.0f + 250.0f * (i / 2); + viewport.x = 25.0f + 250.0f * ( i % 2 ); + viewport.y = 25.0f + 250.0f * ( i / 2 ); - secondaryCommandBuffers[i]->begin(secondaryBeginInfo); - secondaryCommandBuffers[i]->bindPipeline(vk::PipelineBindPoint::eGraphics, graphicsPipeline.get()); - secondaryCommandBuffers[i]->bindDescriptorSets(vk::PipelineBindPoint::eGraphics, pipelineLayout.get(), 0, descriptorSets[i == 0 || i == 3].get(), nullptr); - secondaryCommandBuffers[i]->bindVertexBuffers(0, vertexBufferData.buffer.get(), offset); - secondaryCommandBuffers[i]->setViewport(0, viewport); - secondaryCommandBuffers[i]->setScissor(0, scissor); - secondaryCommandBuffers[i]->draw(12 * 3, 1, 0, 0); + secondaryCommandBuffers[i]->begin( secondaryBeginInfo ); + secondaryCommandBuffers[i]->bindPipeline( vk::PipelineBindPoint::eGraphics, graphicsPipeline.get() ); + secondaryCommandBuffers[i]->bindDescriptorSets( + vk::PipelineBindPoint::eGraphics, pipelineLayout.get(), 0, descriptorSets[i == 0 || i == 3].get(), nullptr ); + secondaryCommandBuffers[i]->bindVertexBuffers( 0, vertexBufferData.buffer.get(), offset ); + secondaryCommandBuffers[i]->setViewport( 0, viewport ); + secondaryCommandBuffers[i]->setScissor( 0, scissor ); + secondaryCommandBuffers[i]->draw( 12 * 3, 1, 0, 0 ); secondaryCommandBuffers[i]->end(); } - vk::ClearValue clearValues[2]; - clearValues[0].color = vk::ClearColorValue(std::array({ 0.2f, 0.2f, 0.2f, 0.2f })); - clearValues[1].depthStencil = vk::ClearDepthStencilValue(1.0f, 0); + clearValues[0].color = vk::ClearColorValue( std::array( { 0.2f, 0.2f, 0.2f, 0.2f } ) ); + clearValues[1].depthStencil = vk::ClearDepthStencilValue( 1.0f, 0 ); - vk::RenderPassBeginInfo renderPassBeginInfo(renderPass.get(), framebuffers[currentBuffer.value].get(), vk::Rect2D(vk::Offset2D(0, 0), surfaceData.extent), 2, clearValues); - // specifying VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS means this render pass may ONLY call vkCmdExecuteCommands - commandBuffer->beginRenderPass(renderPassBeginInfo, vk::SubpassContents::eSecondaryCommandBuffers); - commandBuffer->executeCommands(vk::uniqueToRaw(secondaryCommandBuffers)); + vk::RenderPassBeginInfo renderPassBeginInfo( renderPass.get(), + framebuffers[currentBuffer.value].get(), + vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ), + 2, + clearValues ); + // specifying VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS means this render pass may ONLY call + // vkCmdExecuteCommands + commandBuffer->beginRenderPass( renderPassBeginInfo, vk::SubpassContents::eSecondaryCommandBuffers ); + commandBuffer->executeCommands( vk::uniqueToRaw( secondaryCommandBuffers ) ); commandBuffer->endRenderPass(); - vk::ImageMemoryBarrier prePresentBarrier(vk::AccessFlagBits::eColorAttachmentWrite, vk::AccessFlagBits::eMemoryRead, vk::ImageLayout::eColorAttachmentOptimal, vk::ImageLayout::ePresentSrcKHR, - VK_QUEUE_FAMILY_IGNORED, VK_QUEUE_FAMILY_IGNORED, swapChainData.images[currentBuffer.value], vk::ImageSubresourceRange(vk::ImageAspectFlagBits::eColor, 0, 1, 0, 1)); - commandBuffer->pipelineBarrier(vk::PipelineStageFlagBits::eColorAttachmentOutput, vk::PipelineStageFlagBits::eBottomOfPipe, vk::DependencyFlags(), nullptr, nullptr, prePresentBarrier); + vk::ImageMemoryBarrier prePresentBarrier( + vk::AccessFlagBits::eColorAttachmentWrite, + vk::AccessFlagBits::eMemoryRead, + vk::ImageLayout::eColorAttachmentOptimal, + vk::ImageLayout::ePresentSrcKHR, + VK_QUEUE_FAMILY_IGNORED, + VK_QUEUE_FAMILY_IGNORED, + swapChainData.images[currentBuffer.value], + vk::ImageSubresourceRange( vk::ImageAspectFlagBits::eColor, 0, 1, 0, 1 ) ); + commandBuffer->pipelineBarrier( vk::PipelineStageFlagBits::eColorAttachmentOutput, + vk::PipelineStageFlagBits::eBottomOfPipe, + vk::DependencyFlags(), + nullptr, + nullptr, + prePresentBarrier ); commandBuffer->end(); - vk::UniqueFence drawFence = device->createFenceUnique(vk::FenceCreateInfo()); + vk::UniqueFence drawFence = device->createFenceUnique( vk::FenceCreateInfo() ); - vk::PipelineStageFlags waitDestinationStageMask(vk::PipelineStageFlagBits::eColorAttachmentOutput); - vk::SubmitInfo submitInfo(1, &imageAcquiredSemaphore.get(), &waitDestinationStageMask, 1, &commandBuffer.get()); - graphicsQueue.submit(submitInfo, drawFence.get()); + vk::PipelineStageFlags waitDestinationStageMask( vk::PipelineStageFlagBits::eColorAttachmentOutput ); + vk::SubmitInfo submitInfo( 1, &imageAcquiredSemaphore.get(), &waitDestinationStageMask, 1, &commandBuffer.get() ); + graphicsQueue.submit( submitInfo, drawFence.get() ); - while (vk::Result::eTimeout == device->waitForFences(drawFence.get(), VK_TRUE, vk::su::FenceTimeout)) + while ( vk::Result::eTimeout == device->waitForFences( drawFence.get(), VK_TRUE, vk::su::FenceTimeout ) ) ; - presentQueue.presentKHR(vk::PresentInfoKHR(0, nullptr, 1, &swapChainData.swapChain.get(), ¤tBuffer.value)); - std::this_thread::sleep_for(std::chrono::milliseconds(1000)); + presentQueue.presentKHR( + vk::PresentInfoKHR( 0, nullptr, 1, &swapChainData.swapChain.get(), ¤tBuffer.value ) ); + std::this_thread::sleep_for( std::chrono::milliseconds( 1000 ) ); /* VULKAN_KEY_END */ device->waitIdle(); } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (std::runtime_error& err) + catch ( std::runtime_error & err ) { std::cout << "std::runtime_error: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/SeparateImageSampler/SeparateImageSampler.cpp b/samples/SeparateImageSampler/SeparateImageSampler.cpp index f0f246b..142b7ff 100644 --- a/samples/SeparateImageSampler/SeparateImageSampler.cpp +++ b/samples/SeparateImageSampler/SeparateImageSampler.cpp @@ -19,13 +19,14 @@ #include "../utils/math.hpp" #include "../utils/shaders.hpp" #include "../utils/utils.hpp" -#include "vulkan/vulkan.hpp" #include "SPIRV/GlslangToSpv.h" +#include "vulkan/vulkan.hpp" + #include #include -static char const* AppName = "SeparateImageSampler"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "SeparateImageSampler"; +static char const * EngineName = "Vulkan.hpp"; const std::string fragmentShaderTextTS_T_C = R"( #version 400 @@ -56,164 +57,225 @@ void main() } )"; - -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { - vk::UniqueInstance instance = vk::su::createInstance(AppName, EngineName, {}, vk::su::getInstanceExtensions()); -#if !defined(NDEBUG) - vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger(instance); + vk::UniqueInstance instance = vk::su::createInstance( AppName, EngineName, {}, vk::su::getInstanceExtensions() ); +#if !defined( NDEBUG ) + vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger( instance ); #endif vk::PhysicalDevice physicalDevice = instance->enumeratePhysicalDevices().front(); - vk::su::SurfaceData surfaceData(instance, AppName, vk::Extent2D(500, 500)); + vk::su::SurfaceData surfaceData( instance, AppName, vk::Extent2D( 500, 500 ) ); - std::pair graphicsAndPresentQueueFamilyIndex = vk::su::findGraphicsAndPresentQueueFamilyIndex(physicalDevice, *surfaceData.surface); - vk::UniqueDevice device = vk::su::createDevice(physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions()); + std::pair graphicsAndPresentQueueFamilyIndex = + vk::su::findGraphicsAndPresentQueueFamilyIndex( physicalDevice, *surfaceData.surface ); + vk::UniqueDevice device = + vk::su::createDevice( physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions() ); - vk::UniqueCommandPool commandPool = vk::su::createCommandPool(device, graphicsAndPresentQueueFamilyIndex.first); - vk::UniqueCommandBuffer commandBuffer = std::move(device->allocateCommandBuffersUnique(vk::CommandBufferAllocateInfo(commandPool.get(), vk::CommandBufferLevel::ePrimary, 1)).front()); + vk::UniqueCommandPool commandPool = vk::su::createCommandPool( device, graphicsAndPresentQueueFamilyIndex.first ); + vk::UniqueCommandBuffer commandBuffer = std::move( device + ->allocateCommandBuffersUnique( vk::CommandBufferAllocateInfo( + commandPool.get(), vk::CommandBufferLevel::ePrimary, 1 ) ) + .front() ); - vk::Queue graphicsQueue = device->getQueue(graphicsAndPresentQueueFamilyIndex.first, 0); - vk::Queue presentQueue = device->getQueue(graphicsAndPresentQueueFamilyIndex.second, 0); + vk::Queue graphicsQueue = device->getQueue( graphicsAndPresentQueueFamilyIndex.first, 0 ); + vk::Queue presentQueue = device->getQueue( graphicsAndPresentQueueFamilyIndex.second, 0 ); - vk::su::SwapChainData swapChainData(physicalDevice, device, *surfaceData.surface, surfaceData.extent, vk::ImageUsageFlagBits::eColorAttachment | vk::ImageUsageFlagBits::eTransferSrc, - vk::UniqueSwapchainKHR(), graphicsAndPresentQueueFamilyIndex.first, graphicsAndPresentQueueFamilyIndex.second); + vk::su::SwapChainData swapChainData( physicalDevice, + device, + *surfaceData.surface, + surfaceData.extent, + vk::ImageUsageFlagBits::eColorAttachment | + vk::ImageUsageFlagBits::eTransferSrc, + vk::UniqueSwapchainKHR(), + graphicsAndPresentQueueFamilyIndex.first, + graphicsAndPresentQueueFamilyIndex.second ); - vk::su::DepthBufferData depthBufferData(physicalDevice, device, vk::Format::eD16Unorm, surfaceData.extent); + vk::su::DepthBufferData depthBufferData( physicalDevice, device, vk::Format::eD16Unorm, surfaceData.extent ); - vk::su::BufferData uniformBufferData(physicalDevice, device, sizeof(glm::mat4x4), vk::BufferUsageFlagBits::eUniformBuffer); - vk::su::copyToDevice(device, uniformBufferData.deviceMemory, vk::su::createModelViewProjectionClipMatrix(surfaceData.extent)); + vk::su::BufferData uniformBufferData( + physicalDevice, device, sizeof( glm::mat4x4 ), vk::BufferUsageFlagBits::eUniformBuffer ); + vk::su::copyToDevice( + device, uniformBufferData.deviceMemory, vk::su::createModelViewProjectionClipMatrix( surfaceData.extent ) ); - vk::UniqueRenderPass renderPass = vk::su::createRenderPass(device, vk::su::pickSurfaceFormat(physicalDevice.getSurfaceFormatsKHR(surfaceData.surface.get())).format, depthBufferData.format, - vk::AttachmentLoadOp::eClear); + vk::UniqueRenderPass renderPass = vk::su::createRenderPass( + device, + vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( surfaceData.surface.get() ) ).format, + depthBufferData.format, + vk::AttachmentLoadOp::eClear ); glslang::InitializeProcess(); - vk::UniqueShaderModule vertexShaderModule = vk::su::createShaderModule(device, vk::ShaderStageFlagBits::eVertex, vertexShaderText_PT_T); - vk::UniqueShaderModule fragmentShaderModule = vk::su::createShaderModule(device, vk::ShaderStageFlagBits::eFragment, fragmentShaderTextTS_T_C); + vk::UniqueShaderModule vertexShaderModule = + vk::su::createShaderModule( device, vk::ShaderStageFlagBits::eVertex, vertexShaderText_PT_T ); + vk::UniqueShaderModule fragmentShaderModule = + vk::su::createShaderModule( device, vk::ShaderStageFlagBits::eFragment, fragmentShaderTextTS_T_C ); glslang::FinalizeProcess(); - std::vector framebuffers = vk::su::createFramebuffers(device, renderPass, swapChainData.imageViews, depthBufferData.imageView, surfaceData.extent); + std::vector framebuffers = vk::su::createFramebuffers( + device, renderPass, swapChainData.imageViews, depthBufferData.imageView, surfaceData.extent ); - vk::su::BufferData vertexBufferData(physicalDevice, device, sizeof(texturedCubeData), vk::BufferUsageFlagBits::eVertexBuffer); - vk::su::copyToDevice(device, vertexBufferData.deviceMemory, texturedCubeData, sizeof(texturedCubeData) / sizeof(texturedCubeData[0])); + vk::su::BufferData vertexBufferData( + physicalDevice, device, sizeof( texturedCubeData ), vk::BufferUsageFlagBits::eVertexBuffer ); + vk::su::copyToDevice( device, + vertexBufferData.deviceMemory, + texturedCubeData, + sizeof( texturedCubeData ) / sizeof( texturedCubeData[0] ) ); /* VULKAN_KEY_START */ - commandBuffer->begin(vk::CommandBufferBeginInfo()); + commandBuffer->begin( vk::CommandBufferBeginInfo() ); // Create the separate image - vk::su::TextureData textureData(physicalDevice, device); - textureData.setImage(device, commandBuffer, vk::su::MonochromeImageGenerator({ 118, 185, 0 })); + vk::su::TextureData textureData( physicalDevice, device ); + textureData.setImage( device, commandBuffer, vk::su::MonochromeImageGenerator( { 118, 185, 0 } ) ); // Create the separate sampler - vk::UniqueSampler sampler = device->createSamplerUnique(vk::SamplerCreateInfo(vk::SamplerCreateFlags(), vk::Filter::eNearest, vk::Filter::eNearest, vk::SamplerMipmapMode::eNearest, - vk::SamplerAddressMode::eClampToEdge, vk::SamplerAddressMode::eClampToEdge, vk::SamplerAddressMode::eClampToEdge, - 0.0f, false, 1.0f, false, vk::CompareOp::eNever, 0.0f, 0.0f, vk::BorderColor::eFloatOpaqueWhite)); + vk::UniqueSampler sampler = + device->createSamplerUnique( vk::SamplerCreateInfo( vk::SamplerCreateFlags(), + vk::Filter::eNearest, + vk::Filter::eNearest, + vk::SamplerMipmapMode::eNearest, + vk::SamplerAddressMode::eClampToEdge, + vk::SamplerAddressMode::eClampToEdge, + vk::SamplerAddressMode::eClampToEdge, + 0.0f, + false, + 1.0f, + false, + vk::CompareOp::eNever, + 0.0f, + 0.0f, + vk::BorderColor::eFloatOpaqueWhite ) ); // Create binding and layout for the following, matching contents of shader // binding 0 = uniform buffer (MVP) // binding 1 = texture2D // binding 2 = sampler - std::array resourceBindings = - { - vk::DescriptorSetLayoutBinding(0, vk::DescriptorType::eUniformBuffer, 1, vk::ShaderStageFlagBits::eVertex), - vk::DescriptorSetLayoutBinding(1, vk::DescriptorType::eSampledImage, 1, vk::ShaderStageFlagBits::eFragment), - vk::DescriptorSetLayoutBinding(2, vk::DescriptorType::eSampler, 1, vk::ShaderStageFlagBits::eFragment) + std::array resourceBindings = { + vk::DescriptorSetLayoutBinding( 0, vk::DescriptorType::eUniformBuffer, 1, vk::ShaderStageFlagBits::eVertex ), + vk::DescriptorSetLayoutBinding( 1, vk::DescriptorType::eSampledImage, 1, vk::ShaderStageFlagBits::eFragment ), + vk::DescriptorSetLayoutBinding( 2, vk::DescriptorType::eSampler, 1, vk::ShaderStageFlagBits::eFragment ) }; - vk::UniqueDescriptorSetLayout descriptorSetLayout = device->createDescriptorSetLayoutUnique(vk::DescriptorSetLayoutCreateInfo(vk::DescriptorSetLayoutCreateFlags(), - static_cast(resourceBindings.size()), - resourceBindings.data())); + vk::UniqueDescriptorSetLayout descriptorSetLayout = device->createDescriptorSetLayoutUnique( + vk::DescriptorSetLayoutCreateInfo( vk::DescriptorSetLayoutCreateFlags(), + static_cast( resourceBindings.size() ), + resourceBindings.data() ) ); // Create pipeline layout - vk::UniquePipelineLayout pipelineLayout = device->createPipelineLayoutUnique(vk::PipelineLayoutCreateInfo(vk::PipelineLayoutCreateFlags(), 1, &(*descriptorSetLayout))); + vk::UniquePipelineLayout pipelineLayout = device->createPipelineLayoutUnique( + vk::PipelineLayoutCreateInfo( vk::PipelineLayoutCreateFlags(), 1, &( *descriptorSetLayout ) ) ); // Create a single pool to contain data for the descriptor set - std::array poolSizes = - { - vk::DescriptorPoolSize(vk::DescriptorType::eUniformBuffer, 1), - vk::DescriptorPoolSize(vk::DescriptorType::eSampledImage, 1), - vk::DescriptorPoolSize(vk::DescriptorType::eSampler, 1) - }; - vk::UniqueDescriptorPool descriptorPool = device->createDescriptorPoolUnique(vk::DescriptorPoolCreateInfo(vk::DescriptorPoolCreateFlagBits::eFreeDescriptorSet, 1, - static_cast(poolSizes.size()), poolSizes.data())); + std::array poolSizes = { vk::DescriptorPoolSize( vk::DescriptorType::eUniformBuffer, 1 ), + vk::DescriptorPoolSize( vk::DescriptorType::eSampledImage, 1 ), + vk::DescriptorPoolSize( vk::DescriptorType::eSampler, 1 ) }; + vk::UniqueDescriptorPool descriptorPool = device->createDescriptorPoolUnique( + vk::DescriptorPoolCreateInfo( vk::DescriptorPoolCreateFlagBits::eFreeDescriptorSet, + 1, + static_cast( poolSizes.size() ), + poolSizes.data() ) ); // Populate descriptor sets - vk::UniqueDescriptorSet descriptorSet = std::move(device->allocateDescriptorSetsUnique(vk::DescriptorSetAllocateInfo(*descriptorPool, 1, &*descriptorSetLayout)).front()); + vk::UniqueDescriptorSet descriptorSet = std::move( + device->allocateDescriptorSetsUnique( vk::DescriptorSetAllocateInfo( *descriptorPool, 1, &*descriptorSetLayout ) ) + .front() ); - vk::DescriptorBufferInfo bufferInfo(uniformBufferData.buffer.get(), 0, sizeof(glm::mat4x4)); - vk::DescriptorImageInfo imageInfo(textureData.textureSampler.get(), textureData.imageData->imageView.get(), vk::ImageLayout::eShaderReadOnlyOptimal); - vk::DescriptorImageInfo samplerInfo(sampler.get(), {}, {}); - std::array descriptorWrites = - { - vk::WriteDescriptorSet(*descriptorSet, 0, 0, 1, vk::DescriptorType::eUniformBuffer, nullptr, &bufferInfo), - vk::WriteDescriptorSet(*descriptorSet, 1, 0, 1, vk::DescriptorType::eSampledImage, &imageInfo), - vk::WriteDescriptorSet(*descriptorSet, 2, 0, 1, vk::DescriptorType::eSampler, &samplerInfo) + vk::DescriptorBufferInfo bufferInfo( uniformBufferData.buffer.get(), 0, sizeof( glm::mat4x4 ) ); + vk::DescriptorImageInfo imageInfo( textureData.textureSampler.get(), + textureData.imageData->imageView.get(), + vk::ImageLayout::eShaderReadOnlyOptimal ); + vk::DescriptorImageInfo samplerInfo( sampler.get(), {}, {} ); + std::array descriptorWrites = { + vk::WriteDescriptorSet( *descriptorSet, 0, 0, 1, vk::DescriptorType::eUniformBuffer, nullptr, &bufferInfo ), + vk::WriteDescriptorSet( *descriptorSet, 1, 0, 1, vk::DescriptorType::eSampledImage, &imageInfo ), + vk::WriteDescriptorSet( *descriptorSet, 2, 0, 1, vk::DescriptorType::eSampler, &samplerInfo ) }; - device->updateDescriptorSets(descriptorWrites, nullptr); + device->updateDescriptorSets( descriptorWrites, nullptr ); /* VULKAN_KEY_END */ - vk::UniquePipelineCache pipelineCache = device->createPipelineCacheUnique(vk::PipelineCacheCreateInfo()); + vk::UniquePipelineCache pipelineCache = device->createPipelineCacheUnique( vk::PipelineCacheCreateInfo() ); - vk::UniquePipeline graphicsPipeline = vk::su::createGraphicsPipeline(device, pipelineCache, std::make_pair(*vertexShaderModule, nullptr), std::make_pair(*fragmentShaderModule, nullptr), - sizeof(texturedCubeData[0]), { { vk::Format::eR32G32B32A32Sfloat, 0 }, { vk::Format::eR32G32Sfloat, 16 } }, - vk::FrontFace::eClockwise, true, pipelineLayout, renderPass); + vk::UniquePipeline graphicsPipeline = + vk::su::createGraphicsPipeline( device, + pipelineCache, + std::make_pair( *vertexShaderModule, nullptr ), + std::make_pair( *fragmentShaderModule, nullptr ), + sizeof( texturedCubeData[0] ), + { { vk::Format::eR32G32B32A32Sfloat, 0 }, { vk::Format::eR32G32Sfloat, 16 } }, + vk::FrontFace::eClockwise, + true, + pipelineLayout, + renderPass ); // Get the index of the next available swapchain image: - vk::UniqueSemaphore imageAcquiredSemaphore = device->createSemaphoreUnique(vk::SemaphoreCreateInfo()); - vk::ResultValue currentBuffer = device->acquireNextImageKHR(swapChainData.swapChain.get(), vk::su::FenceTimeout, imageAcquiredSemaphore.get(), nullptr); - assert(currentBuffer.result == vk::Result::eSuccess); - assert(currentBuffer.value < framebuffers.size()); + vk::UniqueSemaphore imageAcquiredSemaphore = device->createSemaphoreUnique( vk::SemaphoreCreateInfo() ); + vk::ResultValue currentBuffer = device->acquireNextImageKHR( + swapChainData.swapChain.get(), vk::su::FenceTimeout, imageAcquiredSemaphore.get(), nullptr ); + assert( currentBuffer.result == vk::Result::eSuccess ); + assert( currentBuffer.value < framebuffers.size() ); vk::ClearValue clearValues[2]; - clearValues[0].color = vk::ClearColorValue(std::array({ 0.2f, 0.2f, 0.2f, 0.2f })); - clearValues[1].depthStencil = vk::ClearDepthStencilValue(1.0f, 0); + clearValues[0].color = vk::ClearColorValue( std::array( { 0.2f, 0.2f, 0.2f, 0.2f } ) ); + clearValues[1].depthStencil = vk::ClearDepthStencilValue( 1.0f, 0 ); - vk::RenderPassBeginInfo renderPassBeginInfo(renderPass.get(), framebuffers[currentBuffer.value].get(), vk::Rect2D(vk::Offset2D(0, 0), surfaceData.extent), 2, clearValues); - commandBuffer->beginRenderPass(renderPassBeginInfo, vk::SubpassContents::eInline); + vk::RenderPassBeginInfo renderPassBeginInfo( renderPass.get(), + framebuffers[currentBuffer.value].get(), + vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ), + 2, + clearValues ); + commandBuffer->beginRenderPass( renderPassBeginInfo, vk::SubpassContents::eInline ); - commandBuffer->bindPipeline(vk::PipelineBindPoint::eGraphics, graphicsPipeline.get()); - commandBuffer->bindDescriptorSets(vk::PipelineBindPoint::eGraphics, pipelineLayout.get(), 0, descriptorSet.get(), nullptr); + commandBuffer->bindPipeline( vk::PipelineBindPoint::eGraphics, graphicsPipeline.get() ); + commandBuffer->bindDescriptorSets( + vk::PipelineBindPoint::eGraphics, pipelineLayout.get(), 0, descriptorSet.get(), nullptr ); - commandBuffer->bindVertexBuffers(0, *vertexBufferData.buffer, {0}); - commandBuffer->setViewport(0, vk::Viewport(0.0f, 0.0f, static_cast(surfaceData.extent.width), static_cast(surfaceData.extent.height), 0.0f, 1.0f)); - commandBuffer->setScissor(0, vk::Rect2D(vk::Offset2D(0, 0), surfaceData.extent)); + commandBuffer->bindVertexBuffers( 0, *vertexBufferData.buffer, { 0 } ); + commandBuffer->setViewport( 0, + vk::Viewport( 0.0f, + 0.0f, + static_cast( surfaceData.extent.width ), + static_cast( surfaceData.extent.height ), + 0.0f, + 1.0f ) ); + commandBuffer->setScissor( 0, vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ) ); - commandBuffer->draw(12 * 3, 1, 0, 0); + commandBuffer->draw( 12 * 3, 1, 0, 0 ); commandBuffer->endRenderPass(); commandBuffer->end(); - vk::UniqueFence drawFence = device->createFenceUnique(vk::FenceCreateInfo()); + vk::UniqueFence drawFence = device->createFenceUnique( vk::FenceCreateInfo() ); - vk::PipelineStageFlags waitDestinationStageMask(vk::PipelineStageFlagBits::eColorAttachmentOutput); - vk::SubmitInfo submitInfo(1, &imageAcquiredSemaphore.get(), &waitDestinationStageMask, 1, &commandBuffer.get()); - graphicsQueue.submit(submitInfo, drawFence.get()); + vk::PipelineStageFlags waitDestinationStageMask( vk::PipelineStageFlagBits::eColorAttachmentOutput ); + vk::SubmitInfo submitInfo( 1, &imageAcquiredSemaphore.get(), &waitDestinationStageMask, 1, &commandBuffer.get() ); + graphicsQueue.submit( submitInfo, drawFence.get() ); - while (vk::Result::eTimeout == device->waitForFences(drawFence.get(), VK_TRUE, vk::su::FenceTimeout)) + while ( vk::Result::eTimeout == device->waitForFences( drawFence.get(), VK_TRUE, vk::su::FenceTimeout ) ) ; - presentQueue.presentKHR(vk::PresentInfoKHR(0, nullptr, 1, &swapChainData.swapChain.get(), ¤tBuffer.value)); - std::this_thread::sleep_for(std::chrono::milliseconds(1000)); + presentQueue.presentKHR( + vk::PresentInfoKHR( 0, nullptr, 1, &swapChainData.swapChain.get(), ¤tBuffer.value ) ); + std::this_thread::sleep_for( std::chrono::milliseconds( 1000 ) ); device->waitIdle(); } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (std::runtime_error& err) + catch ( std::runtime_error & err ) { std::cout << "std::runtime_error: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/SurfaceCapabilities/SurfaceCapabilities.cpp b/samples/SurfaceCapabilities/SurfaceCapabilities.cpp index 4a058d8..8483b24 100644 --- a/samples/SurfaceCapabilities/SurfaceCapabilities.cpp +++ b/samples/SurfaceCapabilities/SurfaceCapabilities.cpp @@ -17,132 +17,171 @@ #include "../utils/utils.hpp" #include "vulkan/vulkan.hpp" + #include #include #include -static char const* AppName = "SurfaceCapabilities"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "SurfaceCapabilities"; +static char const * EngineName = "Vulkan.hpp"; -void cout(vk::SurfaceCapabilitiesKHR const& surfaceCapabilities) +void cout( vk::SurfaceCapabilitiesKHR const & surfaceCapabilities ) { std::cout << "\tCapabilities:\n"; - std::cout << "\t\t" << "currentExtent = " << surfaceCapabilities.currentExtent.width << " x " << surfaceCapabilities.currentExtent.height << "\n"; - std::cout << "\t\t" << "currentTransform = " << vk::to_string(surfaceCapabilities.currentTransform) << "\n"; - std::cout << "\t\t" << "maxImageArrayLayers = " << surfaceCapabilities.maxImageArrayLayers << "\n"; - std::cout << "\t\t" << "maxImageCount = " << surfaceCapabilities.maxImageCount << "\n"; - std::cout << "\t\t" << "maxImageExtent = " << surfaceCapabilities.maxImageExtent.width << " x " << surfaceCapabilities.maxImageExtent.height << "\n"; - std::cout << "\t\t" << "minImageCount = " << surfaceCapabilities.minImageCount << "\n"; - std::cout << "\t\t" << "minImageExtent = " << surfaceCapabilities.minImageExtent.width << " x " << surfaceCapabilities.minImageExtent.height << "\n"; - std::cout << "\t\t" << "supportedCompositeAlpha = " << vk::to_string(surfaceCapabilities.supportedCompositeAlpha) << "\n"; - std::cout << "\t\t" << "supportedTransforms = " << vk::to_string(surfaceCapabilities.supportedTransforms) << "\n"; - std::cout << "\t\t" << "supportedUsageFlags = " << vk::to_string(surfaceCapabilities.supportedUsageFlags) << "\n"; + std::cout << "\t\t" + << "currentExtent = " << surfaceCapabilities.currentExtent.width << " x " + << surfaceCapabilities.currentExtent.height << "\n"; + std::cout << "\t\t" + << "currentTransform = " << vk::to_string( surfaceCapabilities.currentTransform ) << "\n"; + std::cout << "\t\t" + << "maxImageArrayLayers = " << surfaceCapabilities.maxImageArrayLayers << "\n"; + std::cout << "\t\t" + << "maxImageCount = " << surfaceCapabilities.maxImageCount << "\n"; + std::cout << "\t\t" + << "maxImageExtent = " << surfaceCapabilities.maxImageExtent.width << " x " + << surfaceCapabilities.maxImageExtent.height << "\n"; + std::cout << "\t\t" + << "minImageCount = " << surfaceCapabilities.minImageCount << "\n"; + std::cout << "\t\t" + << "minImageExtent = " << surfaceCapabilities.minImageExtent.width << " x " + << surfaceCapabilities.minImageExtent.height << "\n"; + std::cout << "\t\t" + << "supportedCompositeAlpha = " << vk::to_string( surfaceCapabilities.supportedCompositeAlpha ) << "\n"; + std::cout << "\t\t" + << "supportedTransforms = " << vk::to_string( surfaceCapabilities.supportedTransforms ) << "\n"; + std::cout << "\t\t" + << "supportedUsageFlags = " << vk::to_string( surfaceCapabilities.supportedUsageFlags ) << "\n"; std::cout << "\n"; } -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { // need to initialize the dynamic dispatcher before the very first vulkan call -#if (VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1) - static vk::DynamicLoader dl; - PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = dl.getProcAddress("vkGetInstanceProcAddr"); - VULKAN_HPP_DEFAULT_DISPATCHER.init(vkGetInstanceProcAddr); +#if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + static vk::DynamicLoader dl; + PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = + dl.getProcAddress( "vkGetInstanceProcAddr" ); + VULKAN_HPP_DEFAULT_DISPATCHER.init( vkGetInstanceProcAddr ); #endif std::vector instanceExtensionProperties = vk::enumerateInstanceExtensionProperties(); - bool supportsGetSurfaceCapabilities2 = (std::find_if(instanceExtensionProperties.begin(), instanceExtensionProperties.end(), [](vk::ExtensionProperties const& ep) { return strcmp(ep.extensionName, VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME) == 0; }) != instanceExtensionProperties.end()); + bool supportsGetSurfaceCapabilities2 = + ( std::find_if( instanceExtensionProperties.begin(), + instanceExtensionProperties.end(), + []( vk::ExtensionProperties const & ep ) { + return strcmp( ep.extensionName, VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME ) == 0; + } ) != instanceExtensionProperties.end() ); std::vector extensions = vk::su::getInstanceExtensions(); - if (supportsGetSurfaceCapabilities2) + if ( supportsGetSurfaceCapabilities2 ) { - extensions.push_back(VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME); + extensions.push_back( VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME ); } - vk::UniqueInstance instance = vk::su::createInstance(AppName, EngineName, {}, extensions); -#if !defined(NDEBUG) - vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger(instance); + vk::UniqueInstance instance = vk::su::createInstance( AppName, EngineName, {}, extensions ); +#if !defined( NDEBUG ) + vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger( instance ); #endif // enumerate the physicalDevices std::vector physicalDevices = instance->enumeratePhysicalDevices(); - vk::su::SurfaceData surfaceData(instance, AppName, vk::Extent2D(500, 500)); + vk::su::SurfaceData surfaceData( instance, AppName, vk::Extent2D( 500, 500 ) ); /* VULKAN_KEY_START */ std::cout << std::boolalpha; - for (size_t i=0 ; i extensionProperties = physicalDevices[i].enumerateDeviceExtensionProperties(); + std::vector extensionProperties = + physicalDevices[i].enumerateDeviceExtensionProperties(); std::cout << "PhysicalDevice " << i << "\n"; - if (supportsGetSurfaceCapabilities2) + if ( supportsGetSurfaceCapabilities2 ) { - auto surfaceCapabilities2 = physicalDevices[i].getSurfaceCapabilities2KHR(*surfaceData.surface); + auto surfaceCapabilities2 = + physicalDevices[i] + .getSurfaceCapabilities2KHR( *surfaceData.surface ); - vk::SurfaceCapabilitiesKHR const& surfaceCapabilities = surfaceCapabilities2.get().surfaceCapabilities; - cout(surfaceCapabilities); + vk::SurfaceCapabilitiesKHR const & surfaceCapabilities = + surfaceCapabilities2.get().surfaceCapabilities; + cout( surfaceCapabilities ); - if (vk::su::contains(extensionProperties, "VK_AMD_display_native_hdr")) + if ( vk::su::contains( extensionProperties, "VK_AMD_display_native_hdr" ) ) { - vk::DisplayNativeHdrSurfaceCapabilitiesAMD displayNativeHdrSurfaceCapabilities = surfaceCapabilities2.get(); + vk::DisplayNativeHdrSurfaceCapabilitiesAMD displayNativeHdrSurfaceCapabilities = + surfaceCapabilities2.get(); std::cout << "\tDisplayNativeHdrSurfaceCapabilitiesAMD:\n"; - std::cout << "\t\t" << "localDimmingSupport = " << static_cast(displayNativeHdrSurfaceCapabilities.localDimmingSupport) << "\n"; + std::cout << "\t\t" + << "localDimmingSupport = " + << static_cast( displayNativeHdrSurfaceCapabilities.localDimmingSupport ) << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_KHR_shared_presentable_image")) + if ( vk::su::contains( extensionProperties, "VK_KHR_shared_presentable_image" ) ) { - vk::SharedPresentSurfaceCapabilitiesKHR sharedPresentSurfaceCapabilities = surfaceCapabilities2.get(); + vk::SharedPresentSurfaceCapabilitiesKHR sharedPresentSurfaceCapabilities = + surfaceCapabilities2.get(); std::cout << "\tSharedPresentSurfaceCapabilitiesKHR:\n"; - std::cout << "\t\t" << "sharedPresentSupportedUsageFlags = " << vk::to_string(sharedPresentSurfaceCapabilities.sharedPresentSupportedUsageFlags) << "\n"; + std::cout << "\t\t" + << "sharedPresentSupportedUsageFlags = " + << vk::to_string( sharedPresentSurfaceCapabilities.sharedPresentSupportedUsageFlags ) << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_EXT_full_screen_exclusive")) + if ( vk::su::contains( extensionProperties, "VK_EXT_full_screen_exclusive" ) ) { - vk::SurfaceCapabilitiesFullScreenExclusiveEXT surfaceCapabilitiesFullScreenExclusive = surfaceCapabilities2.get(); + vk::SurfaceCapabilitiesFullScreenExclusiveEXT surfaceCapabilitiesFullScreenExclusive = + surfaceCapabilities2.get(); std::cout << "\tSurfaceCapabilitiesFullScreenExclusiveEXT:\n"; - std::cout << "\t\t" << "fullScreenExclusiveSupported = " << static_cast(surfaceCapabilitiesFullScreenExclusive.fullScreenExclusiveSupported) << "\n"; + std::cout << "\t\t" + << "fullScreenExclusiveSupported = " + << static_cast( surfaceCapabilitiesFullScreenExclusive.fullScreenExclusiveSupported ) << "\n"; std::cout << "\n"; } - if (vk::su::contains(extensionProperties, "VK_KHR_surface_protected_capabilities")) + if ( vk::su::contains( extensionProperties, "VK_KHR_surface_protected_capabilities" ) ) { - vk::SurfaceProtectedCapabilitiesKHR surfaceProtectedCapabilities = surfaceCapabilities2.get(); + vk::SurfaceProtectedCapabilitiesKHR surfaceProtectedCapabilities = + surfaceCapabilities2.get(); std::cout << "\tSurfaceProtectedCapabilitiesKHR:\n"; - std::cout << "\t\t" << "supportsProtected = " << static_cast(surfaceProtectedCapabilities.supportsProtected) << "\n"; + std::cout << "\t\t" + << "supportsProtected = " << static_cast( surfaceProtectedCapabilities.supportsProtected ) + << "\n"; std::cout << "\n"; } } else { - vk::SurfaceCapabilitiesKHR surfaceCapabilities = physicalDevices[i].getSurfaceCapabilitiesKHR(*surfaceData.surface); - cout(surfaceCapabilities); + vk::SurfaceCapabilitiesKHR surfaceCapabilities = + physicalDevices[i].getSurfaceCapabilitiesKHR( *surfaceData.surface ); + cout( surfaceCapabilities ); } } /* VULKAN_KEY_END */ } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (std::runtime_error& err) + catch ( std::runtime_error & err ) { std::cout << "std::runtime_error: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/SurfaceFormats/SurfaceFormats.cpp b/samples/SurfaceFormats/SurfaceFormats.cpp index a579e65..cde6cc2 100644 --- a/samples/SurfaceFormats/SurfaceFormats.cpp +++ b/samples/SurfaceFormats/SurfaceFormats.cpp @@ -17,59 +17,63 @@ #include "../utils/utils.hpp" #include "vulkan/vulkan.hpp" + #include #include #include -static char const* AppName = "SurfaceFormats"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "SurfaceFormats"; +static char const * EngineName = "Vulkan.hpp"; -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { - vk::UniqueInstance instance = vk::su::createInstance(AppName, EngineName, {}, vk::su::getInstanceExtensions()); -#if !defined(NDEBUG) - vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger(instance); + vk::UniqueInstance instance = vk::su::createInstance( AppName, EngineName, {}, vk::su::getInstanceExtensions() ); +#if !defined( NDEBUG ) + vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger( instance ); #endif // enumerate the physicalDevices std::vector physicalDevices = instance->enumeratePhysicalDevices(); - vk::su::SurfaceData surfaceData(instance, AppName, vk::Extent2D(500, 500)); + vk::su::SurfaceData surfaceData( instance, AppName, vk::Extent2D( 500, 500 ) ); /* VULKAN_KEY_START */ std::cout << std::boolalpha; - for (size_t i=0 ; i surfaceFormats = physicalDevices[i].getSurfaceFormatsKHR(*surfaceData.surface); - for (size_t j = 0; j < surfaceFormats.size(); j++) + std::vector surfaceFormats = + physicalDevices[i].getSurfaceFormatsKHR( *surfaceData.surface ); + for ( size_t j = 0; j < surfaceFormats.size(); j++ ) { std::cout << "\tFormat " << j << "\n"; - std::cout << "\t\t" << "colorSpace = " << vk::to_string(surfaceFormats[j].colorSpace) << "\n"; - std::cout << "\t\t" << "format = " << vk::to_string(surfaceFormats[j].format) << "\n"; + std::cout << "\t\t" + << "colorSpace = " << vk::to_string( surfaceFormats[j].colorSpace ) << "\n"; + std::cout << "\t\t" + << "format = " << vk::to_string( surfaceFormats[j].format ) << "\n"; std::cout << "\n"; } } /* VULKAN_KEY_END */ } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (std::runtime_error& err) + catch ( std::runtime_error & err ) { std::cout << "std::runtime_error: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/Template/Template.cpp b/samples/Template/Template.cpp index 28d28f7..e168664 100644 --- a/samples/Template/Template.cpp +++ b/samples/Template/Template.cpp @@ -19,125 +19,181 @@ #include "../utils/math.hpp" #include "../utils/shaders.hpp" #include "../utils/utils.hpp" -#include "vulkan/vulkan.hpp" #include "SPIRV/GlslangToSpv.h" +#include "vulkan/vulkan.hpp" + #include #include -static char const* AppName = "Template"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "Template"; +static char const * EngineName = "Vulkan.hpp"; -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { - vk::UniqueInstance instance = vk::su::createInstance(AppName, EngineName, {}, vk::su::getInstanceExtensions()); -#if !defined(NDEBUG) - vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger(instance); + vk::UniqueInstance instance = vk::su::createInstance( AppName, EngineName, {}, vk::su::getInstanceExtensions() ); +#if !defined( NDEBUG ) + vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger( instance ); #endif vk::PhysicalDevice physicalDevice = instance->enumeratePhysicalDevices().front(); - vk::su::SurfaceData surfaceData(instance, AppName, vk::Extent2D(500, 500)); + vk::su::SurfaceData surfaceData( instance, AppName, vk::Extent2D( 500, 500 ) ); - std::pair graphicsAndPresentQueueFamilyIndex = vk::su::findGraphicsAndPresentQueueFamilyIndex(physicalDevice, *surfaceData.surface); - vk::UniqueDevice device = vk::su::createDevice(physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions()); + std::pair graphicsAndPresentQueueFamilyIndex = + vk::su::findGraphicsAndPresentQueueFamilyIndex( physicalDevice, *surfaceData.surface ); + vk::UniqueDevice device = + vk::su::createDevice( physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions() ); - vk::UniqueCommandPool commandPool = vk::su::createCommandPool(device, graphicsAndPresentQueueFamilyIndex.first); - vk::UniqueCommandBuffer commandBuffer = std::move(device->allocateCommandBuffersUnique(vk::CommandBufferAllocateInfo(commandPool.get(), vk::CommandBufferLevel::ePrimary, 1)).front()); + vk::UniqueCommandPool commandPool = vk::su::createCommandPool( device, graphicsAndPresentQueueFamilyIndex.first ); + vk::UniqueCommandBuffer commandBuffer = std::move( device + ->allocateCommandBuffersUnique( vk::CommandBufferAllocateInfo( + commandPool.get(), vk::CommandBufferLevel::ePrimary, 1 ) ) + .front() ); - vk::Queue graphicsQueue = device->getQueue(graphicsAndPresentQueueFamilyIndex.first, 0); - vk::Queue presentQueue = device->getQueue(graphicsAndPresentQueueFamilyIndex.second, 0); + vk::Queue graphicsQueue = device->getQueue( graphicsAndPresentQueueFamilyIndex.first, 0 ); + vk::Queue presentQueue = device->getQueue( graphicsAndPresentQueueFamilyIndex.second, 0 ); - vk::su::SwapChainData swapChainData(physicalDevice, device, *surfaceData.surface, surfaceData.extent, vk::ImageUsageFlagBits::eColorAttachment | vk::ImageUsageFlagBits::eTransferSrc, - vk::UniqueSwapchainKHR(), graphicsAndPresentQueueFamilyIndex.first, graphicsAndPresentQueueFamilyIndex.second); + vk::su::SwapChainData swapChainData( physicalDevice, + device, + *surfaceData.surface, + surfaceData.extent, + vk::ImageUsageFlagBits::eColorAttachment | + vk::ImageUsageFlagBits::eTransferSrc, + vk::UniqueSwapchainKHR(), + graphicsAndPresentQueueFamilyIndex.first, + graphicsAndPresentQueueFamilyIndex.second ); - vk::su::DepthBufferData depthBufferData(physicalDevice, device, vk::Format::eD16Unorm, surfaceData.extent); + vk::su::DepthBufferData depthBufferData( physicalDevice, device, vk::Format::eD16Unorm, surfaceData.extent ); - vk::su::TextureData textureData(physicalDevice, device); + vk::su::TextureData textureData( physicalDevice, device ); - commandBuffer->begin(vk::CommandBufferBeginInfo()); - textureData.setImage(device, commandBuffer, vk::su::CheckerboardImageGenerator()); + commandBuffer->begin( vk::CommandBufferBeginInfo() ); + textureData.setImage( device, commandBuffer, vk::su::CheckerboardImageGenerator() ); - vk::su::BufferData uniformBufferData(physicalDevice, device, sizeof(glm::mat4x4), vk::BufferUsageFlagBits::eUniformBuffer); - vk::su::copyToDevice(device, uniformBufferData.deviceMemory, vk::su::createModelViewProjectionClipMatrix(surfaceData.extent)); + vk::su::BufferData uniformBufferData( + physicalDevice, device, sizeof( glm::mat4x4 ), vk::BufferUsageFlagBits::eUniformBuffer ); + vk::su::copyToDevice( + device, uniformBufferData.deviceMemory, vk::su::createModelViewProjectionClipMatrix( surfaceData.extent ) ); - vk::UniqueDescriptorSetLayout descriptorSetLayout = vk::su::createDescriptorSetLayout(device, - { {vk::DescriptorType::eUniformBuffer, 1, vk::ShaderStageFlagBits::eVertex}, {vk::DescriptorType::eCombinedImageSampler, 1, vk::ShaderStageFlagBits::eFragment} }); - vk::UniquePipelineLayout pipelineLayout = device->createPipelineLayoutUnique(vk::PipelineLayoutCreateInfo(vk::PipelineLayoutCreateFlags(), 1, &descriptorSetLayout.get())); + vk::UniqueDescriptorSetLayout descriptorSetLayout = vk::su::createDescriptorSetLayout( + device, + { { vk::DescriptorType::eUniformBuffer, 1, vk::ShaderStageFlagBits::eVertex }, + { vk::DescriptorType::eCombinedImageSampler, 1, vk::ShaderStageFlagBits::eFragment } } ); + vk::UniquePipelineLayout pipelineLayout = device->createPipelineLayoutUnique( + vk::PipelineLayoutCreateInfo( vk::PipelineLayoutCreateFlags(), 1, &descriptorSetLayout.get() ) ); - vk::UniqueRenderPass renderPass = vk::su::createRenderPass(device, vk::su::pickSurfaceFormat(physicalDevice.getSurfaceFormatsKHR(surfaceData.surface.get())).format, depthBufferData.format); + vk::UniqueRenderPass renderPass = vk::su::createRenderPass( + device, + vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( surfaceData.surface.get() ) ).format, + depthBufferData.format ); glslang::InitializeProcess(); - vk::UniqueShaderModule vertexShaderModule = vk::su::createShaderModule(device, vk::ShaderStageFlagBits::eVertex, vertexShaderText_PT_T); - vk::UniqueShaderModule fragmentShaderModule = vk::su::createShaderModule(device, vk::ShaderStageFlagBits::eFragment, fragmentShaderText_T_C); + vk::UniqueShaderModule vertexShaderModule = + vk::su::createShaderModule( device, vk::ShaderStageFlagBits::eVertex, vertexShaderText_PT_T ); + vk::UniqueShaderModule fragmentShaderModule = + vk::su::createShaderModule( device, vk::ShaderStageFlagBits::eFragment, fragmentShaderText_T_C ); glslang::FinalizeProcess(); - std::vector framebuffers = vk::su::createFramebuffers(device, renderPass, swapChainData.imageViews, depthBufferData.imageView, surfaceData.extent); + std::vector framebuffers = vk::su::createFramebuffers( + device, renderPass, swapChainData.imageViews, depthBufferData.imageView, surfaceData.extent ); - vk::su::BufferData vertexBufferData(physicalDevice, device, sizeof(texturedCubeData), vk::BufferUsageFlagBits::eVertexBuffer); - vk::su::copyToDevice(device, vertexBufferData.deviceMemory, texturedCubeData, sizeof(texturedCubeData) / sizeof(texturedCubeData[0])); + vk::su::BufferData vertexBufferData( + physicalDevice, device, sizeof( texturedCubeData ), vk::BufferUsageFlagBits::eVertexBuffer ); + vk::su::copyToDevice( device, + vertexBufferData.deviceMemory, + texturedCubeData, + sizeof( texturedCubeData ) / sizeof( texturedCubeData[0] ) ); - vk::UniqueDescriptorPool descriptorPool = vk::su::createDescriptorPool(device, { {vk::DescriptorType::eUniformBuffer, 1}, {vk::DescriptorType::eCombinedImageSampler, 1} }); - vk::UniqueDescriptorSet descriptorSet = std::move(device->allocateDescriptorSetsUnique(vk::DescriptorSetAllocateInfo(*descriptorPool, 1, &*descriptorSetLayout)).front()); - vk::su::updateDescriptorSets(device, descriptorSet, {{vk::DescriptorType::eUniformBuffer, uniformBufferData.buffer, vk::UniqueBufferView()}}, textureData); + vk::UniqueDescriptorPool descriptorPool = vk::su::createDescriptorPool( + device, { { vk::DescriptorType::eUniformBuffer, 1 }, { vk::DescriptorType::eCombinedImageSampler, 1 } } ); + vk::UniqueDescriptorSet descriptorSet = std::move( + device->allocateDescriptorSetsUnique( vk::DescriptorSetAllocateInfo( *descriptorPool, 1, &*descriptorSetLayout ) ) + .front() ); + vk::su::updateDescriptorSets( + device, + descriptorSet, + { { vk::DescriptorType::eUniformBuffer, uniformBufferData.buffer, vk::UniqueBufferView() } }, + textureData ); - vk::UniquePipelineCache pipelineCache = device->createPipelineCacheUnique(vk::PipelineCacheCreateInfo()); - vk::UniquePipeline graphicsPipeline = vk::su::createGraphicsPipeline(device, pipelineCache, std::make_pair(*vertexShaderModule, nullptr), std::make_pair(*fragmentShaderModule, nullptr), - sizeof(texturedCubeData[0]), {{ vk::Format::eR32G32B32A32Sfloat, 0 }, { vk::Format::eR32G32Sfloat, 16 }}, - vk::FrontFace::eClockwise, true, pipelineLayout, renderPass); + vk::UniquePipelineCache pipelineCache = device->createPipelineCacheUnique( vk::PipelineCacheCreateInfo() ); + vk::UniquePipeline graphicsPipeline = + vk::su::createGraphicsPipeline( device, + pipelineCache, + std::make_pair( *vertexShaderModule, nullptr ), + std::make_pair( *fragmentShaderModule, nullptr ), + sizeof( texturedCubeData[0] ), + { { vk::Format::eR32G32B32A32Sfloat, 0 }, { vk::Format::eR32G32Sfloat, 16 } }, + vk::FrontFace::eClockwise, + true, + pipelineLayout, + renderPass ); // Get the index of the next available swapchain image: - vk::UniqueSemaphore imageAcquiredSemaphore = device->createSemaphoreUnique(vk::SemaphoreCreateInfo()); - vk::ResultValue currentBuffer = device->acquireNextImageKHR(swapChainData.swapChain.get(), vk::su::FenceTimeout, imageAcquiredSemaphore.get(), nullptr); - assert(currentBuffer.result == vk::Result::eSuccess); - assert(currentBuffer.value < framebuffers.size()); + vk::UniqueSemaphore imageAcquiredSemaphore = device->createSemaphoreUnique( vk::SemaphoreCreateInfo() ); + vk::ResultValue currentBuffer = device->acquireNextImageKHR( + swapChainData.swapChain.get(), vk::su::FenceTimeout, imageAcquiredSemaphore.get(), nullptr ); + assert( currentBuffer.result == vk::Result::eSuccess ); + assert( currentBuffer.value < framebuffers.size() ); vk::ClearValue clearValues[2]; - clearValues[0].color = vk::ClearColorValue(std::array({ 0.2f, 0.2f, 0.2f, 0.2f })); - clearValues[1].depthStencil = vk::ClearDepthStencilValue(1.0f, 0); - vk::RenderPassBeginInfo renderPassBeginInfo(renderPass.get(), framebuffers[currentBuffer.value].get(), vk::Rect2D(vk::Offset2D(0, 0), surfaceData.extent), 2, clearValues); + clearValues[0].color = vk::ClearColorValue( std::array( { 0.2f, 0.2f, 0.2f, 0.2f } ) ); + clearValues[1].depthStencil = vk::ClearDepthStencilValue( 1.0f, 0 ); + vk::RenderPassBeginInfo renderPassBeginInfo( renderPass.get(), + framebuffers[currentBuffer.value].get(), + vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ), + 2, + clearValues ); - commandBuffer->beginRenderPass(renderPassBeginInfo, vk::SubpassContents::eInline); - commandBuffer->bindPipeline(vk::PipelineBindPoint::eGraphics, graphicsPipeline.get()); - commandBuffer->bindDescriptorSets(vk::PipelineBindPoint::eGraphics, pipelineLayout.get(), 0, descriptorSet.get(), nullptr); + commandBuffer->beginRenderPass( renderPassBeginInfo, vk::SubpassContents::eInline ); + commandBuffer->bindPipeline( vk::PipelineBindPoint::eGraphics, graphicsPipeline.get() ); + commandBuffer->bindDescriptorSets( + vk::PipelineBindPoint::eGraphics, pipelineLayout.get(), 0, descriptorSet.get(), nullptr ); - commandBuffer->bindVertexBuffers(0, *vertexBufferData.buffer, {0}); - commandBuffer->setViewport(0, vk::Viewport(0.0f, 0.0f, static_cast(surfaceData.extent.width), static_cast(surfaceData.extent.height), 0.0f, 1.0f)); - commandBuffer->setScissor(0, vk::Rect2D(vk::Offset2D(0, 0), surfaceData.extent)); + commandBuffer->bindVertexBuffers( 0, *vertexBufferData.buffer, { 0 } ); + commandBuffer->setViewport( 0, + vk::Viewport( 0.0f, + 0.0f, + static_cast( surfaceData.extent.width ), + static_cast( surfaceData.extent.height ), + 0.0f, + 1.0f ) ); + commandBuffer->setScissor( 0, vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ) ); - commandBuffer->draw(12 * 3, 1, 0, 0); + commandBuffer->draw( 12 * 3, 1, 0, 0 ); commandBuffer->endRenderPass(); commandBuffer->end(); - vk::UniqueFence drawFence = device->createFenceUnique(vk::FenceCreateInfo()); + vk::UniqueFence drawFence = device->createFenceUnique( vk::FenceCreateInfo() ); - vk::PipelineStageFlags waitDestinationStageMask(vk::PipelineStageFlagBits::eColorAttachmentOutput); - vk::SubmitInfo submitInfo(1, &imageAcquiredSemaphore.get(), &waitDestinationStageMask, 1, &commandBuffer.get()); - graphicsQueue.submit(submitInfo, drawFence.get()); + vk::PipelineStageFlags waitDestinationStageMask( vk::PipelineStageFlagBits::eColorAttachmentOutput ); + vk::SubmitInfo submitInfo( 1, &imageAcquiredSemaphore.get(), &waitDestinationStageMask, 1, &commandBuffer.get() ); + graphicsQueue.submit( submitInfo, drawFence.get() ); - while (vk::Result::eTimeout == device->waitForFences(drawFence.get(), VK_TRUE, vk::su::FenceTimeout)) + while ( vk::Result::eTimeout == device->waitForFences( drawFence.get(), VK_TRUE, vk::su::FenceTimeout ) ) ; - presentQueue.presentKHR(vk::PresentInfoKHR(0, nullptr, 1, &swapChainData.swapChain.get(), ¤tBuffer.value)); - std::this_thread::sleep_for(std::chrono::milliseconds(1000)); + presentQueue.presentKHR( + vk::PresentInfoKHR( 0, nullptr, 1, &swapChainData.swapChain.get(), ¤tBuffer.value ) ); + std::this_thread::sleep_for( std::chrono::milliseconds( 1000 ) ); device->waitIdle(); } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (std::runtime_error& err) + catch ( std::runtime_error & err ) { std::cout << "std::runtime_error: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/TexelBuffer/TexelBuffer.cpp b/samples/TexelBuffer/TexelBuffer.cpp index 01b8ec7..a356531 100644 --- a/samples/TexelBuffer/TexelBuffer.cpp +++ b/samples/TexelBuffer/TexelBuffer.cpp @@ -19,13 +19,14 @@ #include "../utils/math.hpp" #include "../utils/shaders.hpp" #include "../utils/utils.hpp" -#include "vulkan/vulkan.hpp" #include "SPIRV/GlslangToSpv.h" +#include "vulkan/vulkan.hpp" + #include #include -static char const* AppName = "TexelBuffer"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "TexelBuffer"; +static char const * EngineName = "Vulkan.hpp"; static const std::string vertexShaderText = R"( #version 400 @@ -52,128 +53,179 @@ void main() } )"; -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { - const float texels[] = {118.0f / 255.0f, 185.0f / 255.0f, 0.0f}; + const float texels[] = { 118.0f / 255.0f, 185.0f / 255.0f, 0.0f }; try { - vk::UniqueInstance instance = vk::su::createInstance(AppName, EngineName, {}, vk::su::getInstanceExtensions()); -#if !defined(NDEBUG) - vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger(instance); + vk::UniqueInstance instance = vk::su::createInstance( AppName, EngineName, {}, vk::su::getInstanceExtensions() ); +#if !defined( NDEBUG ) + vk::UniqueDebugUtilsMessengerEXT debugUtilsMessenger = vk::su::createDebugUtilsMessenger( instance ); #endif vk::PhysicalDevice physicalDevice = instance->enumeratePhysicalDevices().front(); vk::PhysicalDeviceProperties physicalDeviceProperties = physicalDevice.getProperties(); - if (physicalDeviceProperties.limits.maxTexelBufferElements < 4) + if ( physicalDeviceProperties.limits.maxTexelBufferElements < 4 ) { std::cout << "maxTexelBufferElements too small\n"; - exit(-1); + exit( -1 ); } - vk::Format texelFormat = vk::Format::eR32Sfloat; - vk::FormatProperties formatProperties = physicalDevice.getFormatProperties(texelFormat); - if (!(formatProperties.bufferFeatures & vk::FormatFeatureFlagBits::eUniformTexelBuffer)) + vk::Format texelFormat = vk::Format::eR32Sfloat; + vk::FormatProperties formatProperties = physicalDevice.getFormatProperties( texelFormat ); + if ( !( formatProperties.bufferFeatures & vk::FormatFeatureFlagBits::eUniformTexelBuffer ) ) { std::cout << "R32_SFLOAT format unsupported for texel buffer\n"; - exit(-1); + exit( -1 ); } - vk::su::SurfaceData surfaceData(instance, AppName, vk::Extent2D(500, 500)); + vk::su::SurfaceData surfaceData( instance, AppName, vk::Extent2D( 500, 500 ) ); - std::pair graphicsAndPresentQueueFamilyIndex = vk::su::findGraphicsAndPresentQueueFamilyIndex(physicalDevice, *surfaceData.surface); - vk::UniqueDevice device = vk::su::createDevice(physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions()); + std::pair graphicsAndPresentQueueFamilyIndex = + vk::su::findGraphicsAndPresentQueueFamilyIndex( physicalDevice, *surfaceData.surface ); + vk::UniqueDevice device = + vk::su::createDevice( physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions() ); - vk::UniqueCommandPool commandPool = vk::su::createCommandPool(device, graphicsAndPresentQueueFamilyIndex.first); - vk::UniqueCommandBuffer commandBuffer = std::move(device->allocateCommandBuffersUnique(vk::CommandBufferAllocateInfo(commandPool.get(), vk::CommandBufferLevel::ePrimary, 1)).front()); + vk::UniqueCommandPool commandPool = vk::su::createCommandPool( device, graphicsAndPresentQueueFamilyIndex.first ); + vk::UniqueCommandBuffer commandBuffer = std::move( device + ->allocateCommandBuffersUnique( vk::CommandBufferAllocateInfo( + commandPool.get(), vk::CommandBufferLevel::ePrimary, 1 ) ) + .front() ); - vk::Queue graphicsQueue = device->getQueue(graphicsAndPresentQueueFamilyIndex.first, 0); - vk::Queue presentQueue = device->getQueue(graphicsAndPresentQueueFamilyIndex.second, 0); + vk::Queue graphicsQueue = device->getQueue( graphicsAndPresentQueueFamilyIndex.first, 0 ); + vk::Queue presentQueue = device->getQueue( graphicsAndPresentQueueFamilyIndex.second, 0 ); - vk::su::SwapChainData swapChainData(physicalDevice, device, *surfaceData.surface, surfaceData.extent, vk::ImageUsageFlagBits::eColorAttachment | vk::ImageUsageFlagBits::eTransferSrc, - vk::UniqueSwapchainKHR(), graphicsAndPresentQueueFamilyIndex.first, graphicsAndPresentQueueFamilyIndex.second); + vk::su::SwapChainData swapChainData( physicalDevice, + device, + *surfaceData.surface, + surfaceData.extent, + vk::ImageUsageFlagBits::eColorAttachment | + vk::ImageUsageFlagBits::eTransferSrc, + vk::UniqueSwapchainKHR(), + graphicsAndPresentQueueFamilyIndex.first, + graphicsAndPresentQueueFamilyIndex.second ); - vk::su::BufferData texelBufferData(physicalDevice, device, sizeof(texels), vk::BufferUsageFlagBits::eUniformTexelBuffer); - texelBufferData.upload(device, texels); + vk::su::BufferData texelBufferData( + physicalDevice, device, sizeof( texels ), vk::BufferUsageFlagBits::eUniformTexelBuffer ); + texelBufferData.upload( device, texels ); - vk::UniqueBufferView texelBufferView = device->createBufferViewUnique(vk::BufferViewCreateInfo({}, *texelBufferData.buffer, texelFormat, 0, sizeof(texels))); + vk::UniqueBufferView texelBufferView = device->createBufferViewUnique( + vk::BufferViewCreateInfo( {}, *texelBufferData.buffer, texelFormat, 0, sizeof( texels ) ) ); - vk::UniqueDescriptorSetLayout descriptorSetLayout = vk::su::createDescriptorSetLayout(device, { {vk::DescriptorType::eUniformTexelBuffer, 1, vk::ShaderStageFlagBits::eVertex} }); - vk::UniquePipelineLayout pipelineLayout = device->createPipelineLayoutUnique(vk::PipelineLayoutCreateInfo(vk::PipelineLayoutCreateFlags(), 1, &descriptorSetLayout.get())); + vk::UniqueDescriptorSetLayout descriptorSetLayout = vk::su::createDescriptorSetLayout( + device, { { vk::DescriptorType::eUniformTexelBuffer, 1, vk::ShaderStageFlagBits::eVertex } } ); + vk::UniquePipelineLayout pipelineLayout = device->createPipelineLayoutUnique( + vk::PipelineLayoutCreateInfo( vk::PipelineLayoutCreateFlags(), 1, &descriptorSetLayout.get() ) ); - vk::UniqueRenderPass renderPass = vk::su::createRenderPass(device, vk::su::pickSurfaceFormat(physicalDevice.getSurfaceFormatsKHR(surfaceData.surface.get())).format, vk::Format::eUndefined); + vk::UniqueRenderPass renderPass = vk::su::createRenderPass( + device, + vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( surfaceData.surface.get() ) ).format, + vk::Format::eUndefined ); glslang::InitializeProcess(); - vk::UniqueShaderModule vertexShaderModule = vk::su::createShaderModule(device, vk::ShaderStageFlagBits::eVertex, vertexShaderText); - vk::UniqueShaderModule fragmentShaderModule = vk::su::createShaderModule(device, vk::ShaderStageFlagBits::eFragment, fragmentShaderText_C_C); + vk::UniqueShaderModule vertexShaderModule = + vk::su::createShaderModule( device, vk::ShaderStageFlagBits::eVertex, vertexShaderText ); + vk::UniqueShaderModule fragmentShaderModule = + vk::su::createShaderModule( device, vk::ShaderStageFlagBits::eFragment, fragmentShaderText_C_C ); glslang::FinalizeProcess(); - std::vector framebuffers = vk::su::createFramebuffers(device, renderPass, swapChainData.imageViews, vk::UniqueImageView(), surfaceData.extent); + std::vector framebuffers = vk::su::createFramebuffers( + device, renderPass, swapChainData.imageViews, vk::UniqueImageView(), surfaceData.extent ); - vk::UniqueDescriptorPool descriptorPool = vk::su::createDescriptorPool(device, { {vk::DescriptorType::eUniformTexelBuffer, 1} }); - vk::UniqueDescriptorSet descriptorSet = std::move(device->allocateDescriptorSetsUnique(vk::DescriptorSetAllocateInfo(*descriptorPool, 1, &*descriptorSetLayout)).front()); - vk::su::updateDescriptorSets(device, descriptorSet, {{vk::DescriptorType::eUniformTexelBuffer, texelBufferData.buffer, texelBufferView}}, {}); + vk::UniqueDescriptorPool descriptorPool = + vk::su::createDescriptorPool( device, { { vk::DescriptorType::eUniformTexelBuffer, 1 } } ); + vk::UniqueDescriptorSet descriptorSet = std::move( + device->allocateDescriptorSetsUnique( vk::DescriptorSetAllocateInfo( *descriptorPool, 1, &*descriptorSetLayout ) ) + .front() ); + vk::su::updateDescriptorSets( + device, + descriptorSet, + { { vk::DescriptorType::eUniformTexelBuffer, texelBufferData.buffer, texelBufferView } }, + {} ); - vk::UniquePipelineCache pipelineCache = device->createPipelineCacheUnique(vk::PipelineCacheCreateInfo()); - vk::UniquePipeline graphicsPipeline = vk::su::createGraphicsPipeline(device, pipelineCache, std::make_pair(*vertexShaderModule, nullptr), std::make_pair(*fragmentShaderModule, nullptr), - 0, {}, vk::FrontFace::eClockwise, false, pipelineLayout, renderPass); + vk::UniquePipelineCache pipelineCache = device->createPipelineCacheUnique( vk::PipelineCacheCreateInfo() ); + vk::UniquePipeline graphicsPipeline = + vk::su::createGraphicsPipeline( device, + pipelineCache, + std::make_pair( *vertexShaderModule, nullptr ), + std::make_pair( *fragmentShaderModule, nullptr ), + 0, + {}, + vk::FrontFace::eClockwise, + false, + pipelineLayout, + renderPass ); /* VULKAN_KEY_START */ // Get the index of the next available swapchain image: - vk::UniqueSemaphore imageAcquiredSemaphore = device->createSemaphoreUnique(vk::SemaphoreCreateInfo()); - vk::ResultValue currentBuffer = device->acquireNextImageKHR(swapChainData.swapChain.get(), vk::su::FenceTimeout, imageAcquiredSemaphore.get(), nullptr); - assert(currentBuffer.result == vk::Result::eSuccess); - assert(currentBuffer.value < framebuffers.size()); + vk::UniqueSemaphore imageAcquiredSemaphore = device->createSemaphoreUnique( vk::SemaphoreCreateInfo() ); + vk::ResultValue currentBuffer = device->acquireNextImageKHR( + swapChainData.swapChain.get(), vk::su::FenceTimeout, imageAcquiredSemaphore.get(), nullptr ); + assert( currentBuffer.result == vk::Result::eSuccess ); + assert( currentBuffer.value < framebuffers.size() ); - commandBuffer->begin(vk::CommandBufferBeginInfo()); + commandBuffer->begin( vk::CommandBufferBeginInfo() ); vk::ClearValue clearValue; - clearValue.color = vk::ClearColorValue(std::array({ 0.2f, 0.2f, 0.2f, 0.2f })); - vk::RenderPassBeginInfo renderPassBeginInfo(renderPass.get(), framebuffers[currentBuffer.value].get(), vk::Rect2D(vk::Offset2D(0, 0), surfaceData.extent), 1, &clearValue); + clearValue.color = vk::ClearColorValue( std::array( { 0.2f, 0.2f, 0.2f, 0.2f } ) ); + vk::RenderPassBeginInfo renderPassBeginInfo( renderPass.get(), + framebuffers[currentBuffer.value].get(), + vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ), + 1, + &clearValue ); - commandBuffer->beginRenderPass(renderPassBeginInfo, vk::SubpassContents::eInline); - commandBuffer->bindPipeline(vk::PipelineBindPoint::eGraphics, graphicsPipeline.get()); - commandBuffer->bindDescriptorSets(vk::PipelineBindPoint::eGraphics, pipelineLayout.get(), 0, descriptorSet.get(), nullptr); + commandBuffer->beginRenderPass( renderPassBeginInfo, vk::SubpassContents::eInline ); + commandBuffer->bindPipeline( vk::PipelineBindPoint::eGraphics, graphicsPipeline.get() ); + commandBuffer->bindDescriptorSets( + vk::PipelineBindPoint::eGraphics, pipelineLayout.get(), 0, descriptorSet.get(), nullptr ); - commandBuffer->setViewport(0, vk::Viewport(0.0f, 0.0f, static_cast(surfaceData.extent.width), static_cast(surfaceData.extent.height), 0.0f, 1.0f)); - commandBuffer->setScissor(0, vk::Rect2D(vk::Offset2D(0, 0), surfaceData.extent)); + commandBuffer->setViewport( 0, + vk::Viewport( 0.0f, + 0.0f, + static_cast( surfaceData.extent.width ), + static_cast( surfaceData.extent.height ), + 0.0f, + 1.0f ) ); + commandBuffer->setScissor( 0, vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ) ); - commandBuffer->draw(3, 1, 0, 0); + commandBuffer->draw( 3, 1, 0, 0 ); commandBuffer->endRenderPass(); commandBuffer->end(); - vk::UniqueFence drawFence = device->createFenceUnique(vk::FenceCreateInfo()); + vk::UniqueFence drawFence = device->createFenceUnique( vk::FenceCreateInfo() ); - vk::PipelineStageFlags waitDestinationStageMask(vk::PipelineStageFlagBits::eColorAttachmentOutput); - vk::SubmitInfo submitInfo(1, &imageAcquiredSemaphore.get(), &waitDestinationStageMask, 1, &commandBuffer.get()); - graphicsQueue.submit(submitInfo, drawFence.get()); + vk::PipelineStageFlags waitDestinationStageMask( vk::PipelineStageFlagBits::eColorAttachmentOutput ); + vk::SubmitInfo submitInfo( 1, &imageAcquiredSemaphore.get(), &waitDestinationStageMask, 1, &commandBuffer.get() ); + graphicsQueue.submit( submitInfo, drawFence.get() ); - while (vk::Result::eTimeout == device->waitForFences(drawFence.get(), VK_TRUE, vk::su::FenceTimeout)) + while ( vk::Result::eTimeout == device->waitForFences( drawFence.get(), VK_TRUE, vk::su::FenceTimeout ) ) ; - presentQueue.presentKHR(vk::PresentInfoKHR(0, nullptr, 1, &swapChainData.swapChain.get(), ¤tBuffer.value)); - std::this_thread::sleep_for(std::chrono::milliseconds(1000)); + presentQueue.presentKHR( + vk::PresentInfoKHR( 0, nullptr, 1, &swapChainData.swapChain.get(), ¤tBuffer.value ) ); + std::this_thread::sleep_for( std::chrono::milliseconds( 1000 ) ); /* VULKAN_KEY_END */ device->waitIdle(); } - catch (vk::SystemError& err) + catch ( vk::SystemError & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (std::runtime_error& err) + catch ( std::runtime_error & err ) { std::cout << "std::runtime_error: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/samples/utils/math.cpp b/samples/utils/math.cpp index 613a439..e9b8011 100644 --- a/samples/utils/math.cpp +++ b/samples/utils/math.cpp @@ -19,19 +19,35 @@ namespace vk { namespace su { - glm::mat4x4 createModelViewProjectionClipMatrix(vk::Extent2D const& extent) + glm::mat4x4 createModelViewProjectionClipMatrix( vk::Extent2D const & extent ) { - float fov = glm::radians(45.0f); - if (extent.width > extent.height) + float fov = glm::radians( 45.0f ); + if ( extent.width > extent.height ) { - fov *= static_cast(extent.height) / static_cast(extent.width); + fov *= static_cast( extent.height ) / static_cast( extent.width ); } - glm::mat4x4 model = glm::mat4x4(1.0f); - glm::mat4x4 view = glm::lookAt(glm::vec3(-5.0f, 3.0f, -10.0f), glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, -1.0f, 0.0f)); - glm::mat4x4 projection = glm::perspective(fov, 1.0f, 0.1f, 100.0f); - glm::mat4x4 clip = glm::mat4x4(1.0f, 0.0f, 0.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.5f, 0.0f, 0.0f, 0.0f, 0.5f, 1.0f); // vulkan clip space has inverted y and half z ! + glm::mat4x4 model = glm::mat4x4( 1.0f ); + glm::mat4x4 view = + glm::lookAt( glm::vec3( -5.0f, 3.0f, -10.0f ), glm::vec3( 0.0f, 0.0f, 0.0f ), glm::vec3( 0.0f, -1.0f, 0.0f ) ); + glm::mat4x4 projection = glm::perspective( fov, 1.0f, 0.1f, 100.0f ); + glm::mat4x4 clip = glm::mat4x4( 1.0f, + 0.0f, + 0.0f, + 0.0f, + 0.0f, + -1.0f, + 0.0f, + 0.0f, + 0.0f, + 0.0f, + 0.5f, + 0.0f, + 0.0f, + 0.0f, + 0.5f, + 1.0f ); // vulkan clip space has inverted y and half z ! return clip * projection * view * model; } - } -} + } // namespace su +} // namespace vk diff --git a/samples/utils/math.hpp b/samples/utils/math.hpp index 42a726b..ac701f8 100644 --- a/samples/utils/math.hpp +++ b/samples/utils/math.hpp @@ -15,13 +15,14 @@ #include #define GLM_FORCE_RADIANS -#pragma warning(disable:4201) // disable warning C4201: nonstandard extension used: nameless struct/union; needed to get glm/detail/type_vec?.hpp without warnings +#pragma warning( disable : 4201 ) // disable warning C4201: nonstandard extension used: nameless struct/union; needed + // to get glm/detail/type_vec?.hpp without warnings #include namespace vk { namespace su { - glm::mat4x4 createModelViewProjectionClipMatrix(vk::Extent2D const& extent); + glm::mat4x4 createModelViewProjectionClipMatrix( vk::Extent2D const & extent ); } -} +} // namespace vk diff --git a/samples/utils/shaders.cpp b/samples/utils/shaders.cpp index b4639a2..8a4acc9 100644 --- a/samples/utils/shaders.cpp +++ b/samples/utils/shaders.cpp @@ -14,85 +14,88 @@ // #include "shaders.hpp" -#include "vulkan/vulkan.hpp" -#include "StandAlone/ResourceLimits.h" + #include "SPIRV/GlslangToSpv.h" +#include "StandAlone/ResourceLimits.h" +#include "vulkan/vulkan.hpp" namespace vk { namespace su { - - EShLanguage translateShaderStage(vk::ShaderStageFlagBits stage) + EShLanguage translateShaderStage( vk::ShaderStageFlagBits stage ) { - switch (stage) + switch ( stage ) { - case vk::ShaderStageFlagBits::eVertex: return EShLangVertex; - case vk::ShaderStageFlagBits::eTessellationControl: return EShLangTessControl; - case vk::ShaderStageFlagBits::eTessellationEvaluation: return EShLangTessEvaluation; - case vk::ShaderStageFlagBits::eGeometry: return EShLangGeometry; - case vk::ShaderStageFlagBits::eFragment: return EShLangFragment; - case vk::ShaderStageFlagBits::eCompute: return EShLangCompute; - case vk::ShaderStageFlagBits::eRaygenNV: return EShLangRayGenNV; - case vk::ShaderStageFlagBits::eAnyHitNV: return EShLangAnyHitNV; - case vk::ShaderStageFlagBits::eClosestHitNV: return EShLangClosestHitNV; - case vk::ShaderStageFlagBits::eMissNV: return EShLangMissNV; - case vk::ShaderStageFlagBits::eIntersectionNV: return EShLangIntersectNV; - case vk::ShaderStageFlagBits::eCallableNV: return EShLangCallableNV; - case vk::ShaderStageFlagBits::eTaskNV: return EShLangTaskNV; - case vk::ShaderStageFlagBits::eMeshNV: return EShLangMeshNV; - default: - assert(false && "Unknown shader stage"); - return EShLangVertex; + case vk::ShaderStageFlagBits::eVertex: return EShLangVertex; + case vk::ShaderStageFlagBits::eTessellationControl: return EShLangTessControl; + case vk::ShaderStageFlagBits::eTessellationEvaluation: return EShLangTessEvaluation; + case vk::ShaderStageFlagBits::eGeometry: return EShLangGeometry; + case vk::ShaderStageFlagBits::eFragment: return EShLangFragment; + case vk::ShaderStageFlagBits::eCompute: return EShLangCompute; + case vk::ShaderStageFlagBits::eRaygenNV: return EShLangRayGenNV; + case vk::ShaderStageFlagBits::eAnyHitNV: return EShLangAnyHitNV; + case vk::ShaderStageFlagBits::eClosestHitNV: return EShLangClosestHitNV; + case vk::ShaderStageFlagBits::eMissNV: return EShLangMissNV; + case vk::ShaderStageFlagBits::eIntersectionNV: return EShLangIntersectNV; + case vk::ShaderStageFlagBits::eCallableNV: return EShLangCallableNV; + case vk::ShaderStageFlagBits::eTaskNV: return EShLangTaskNV; + case vk::ShaderStageFlagBits::eMeshNV: return EShLangMeshNV; + default: assert( false && "Unknown shader stage" ); return EShLangVertex; } } - bool GLSLtoSPV(const vk::ShaderStageFlagBits shaderType, std::string const& glslShader, std::vector &spvShader) + bool GLSLtoSPV( const vk::ShaderStageFlagBits shaderType, + std::string const & glslShader, + std::vector & spvShader ) { - EShLanguage stage = translateShaderStage(shaderType); + EShLanguage stage = translateShaderStage( shaderType ); - const char *shaderStrings[1]; + const char * shaderStrings[1]; shaderStrings[0] = glslShader.data(); - glslang::TShader shader(stage); - shader.setStrings(shaderStrings, 1); + glslang::TShader shader( stage ); + shader.setStrings( shaderStrings, 1 ); // Enable SPIR-V and Vulkan rules when parsing GLSL - EShMessages messages = (EShMessages)(EShMsgSpvRules | EShMsgVulkanRules); + EShMessages messages = ( EShMessages )( EShMsgSpvRules | EShMsgVulkanRules ); - if (!shader.parse(&glslang::DefaultTBuiltInResource, 100, false, messages)) + if ( !shader.parse( &glslang::DefaultTBuiltInResource, 100, false, messages ) ) { - puts(shader.getInfoLog()); - puts(shader.getInfoDebugLog()); + puts( shader.getInfoLog() ); + puts( shader.getInfoDebugLog() ); return false; // something didn't work } glslang::TProgram program; - program.addShader(&shader); + program.addShader( &shader ); // // Program-level processing... // - if (!program.link(messages)) + if ( !program.link( messages ) ) { - puts(shader.getInfoLog()); - puts(shader.getInfoDebugLog()); - fflush(stdout); + puts( shader.getInfoLog() ); + puts( shader.getInfoDebugLog() ); + fflush( stdout ); return false; } - glslang::GlslangToSpv(*program.getIntermediate(stage), spvShader); + glslang::GlslangToSpv( *program.getIntermediate( stage ), spvShader ); return true; } - vk::UniqueShaderModule createShaderModule(vk::UniqueDevice &device, vk::ShaderStageFlagBits shaderStage, std::string const& shaderText) + vk::UniqueShaderModule createShaderModule( vk::UniqueDevice & device, + vk::ShaderStageFlagBits shaderStage, + std::string const & shaderText ) { std::vector shaderSPV; - bool ok = GLSLtoSPV(shaderStage, shaderText, shaderSPV); - assert(ok); + bool ok = GLSLtoSPV( shaderStage, shaderText, shaderSPV ); + assert( ok ); - return device->createShaderModuleUnique(vk::ShaderModuleCreateInfo(vk::ShaderModuleCreateFlags(), shaderSPV.size() * sizeof(unsigned int), shaderSPV.data())); + return device->createShaderModuleUnique( vk::ShaderModuleCreateInfo( + vk::ShaderModuleCreateFlags(), shaderSPV.size() * sizeof( unsigned int ), shaderSPV.data() ) ); } - } -} + } // namespace su +} // namespace vk diff --git a/samples/utils/shaders.hpp b/samples/utils/shaders.hpp index d116c79..5c9f2cc 100644 --- a/samples/utils/shaders.hpp +++ b/samples/utils/shaders.hpp @@ -14,6 +14,7 @@ // #include "vulkan/vulkan.hpp" + #include #include @@ -21,12 +22,15 @@ namespace vk { namespace su { - vk::UniqueShaderModule createShaderModule(vk::UniqueDevice &device, vk::ShaderStageFlagBits shaderStage, std::string const& shaderText); - - bool GLSLtoSPV(const vk::ShaderStageFlagBits shaderType, std::string const& glslShader, std::vector &spvShader); - } -} + vk::UniqueShaderModule createShaderModule( vk::UniqueDevice & device, + vk::ShaderStageFlagBits shaderStage, + std::string const & shaderText ); + bool GLSLtoSPV( const vk::ShaderStageFlagBits shaderType, + std::string const & glslShader, + std::vector & spvShader ); + } // namespace su +} // namespace vk // vertex shader with (P)osition and (C)olor in and (C)olor out const std::string vertexShaderText_PC_C = R"( @@ -76,7 +80,6 @@ void main() } )"; - // fragment shader with (C)olor in and (C)olor out const std::string fragmentShaderText_C_C = R"( #version 400 @@ -112,4 +115,3 @@ void main() outColor = texture(tex, inTexCoord); } )"; - diff --git a/samples/utils/utils.cpp b/samples/utils/utils.cpp index 1c44823..203e0d5 100644 --- a/samples/utils/utils.cpp +++ b/samples/utils/utils.cpp @@ -14,11 +14,13 @@ // #include "utils.hpp" + #include "vulkan/vulkan.hpp" + #include #include -#if (VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1) +#if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE #endif @@ -26,403 +28,595 @@ namespace vk { namespace su { - vk::UniqueDeviceMemory allocateMemory(vk::UniqueDevice const& device, vk::PhysicalDeviceMemoryProperties const& memoryProperties, vk::MemoryRequirements const& memoryRequirements, - vk::MemoryPropertyFlags memoryPropertyFlags) + vk::UniqueDeviceMemory allocateMemory( vk::UniqueDevice const & device, + vk::PhysicalDeviceMemoryProperties const & memoryProperties, + vk::MemoryRequirements const & memoryRequirements, + vk::MemoryPropertyFlags memoryPropertyFlags ) { - uint32_t memoryTypeIndex = findMemoryType(memoryProperties, memoryRequirements.memoryTypeBits, memoryPropertyFlags); + uint32_t memoryTypeIndex = + findMemoryType( memoryProperties, memoryRequirements.memoryTypeBits, memoryPropertyFlags ); - return device->allocateMemoryUnique(vk::MemoryAllocateInfo(memoryRequirements.size, memoryTypeIndex)); + return device->allocateMemoryUnique( vk::MemoryAllocateInfo( memoryRequirements.size, memoryTypeIndex ) ); } - bool contains(std::vector const& extensionProperties, std::string const& extensionName) + bool contains( std::vector const & extensionProperties, std::string const & extensionName ) { - return std::find_if(extensionProperties.begin(), extensionProperties.end(), [&extensionName](vk::ExtensionProperties const& ep) { return extensionName == ep.extensionName; }) != extensionProperties.end(); + return std::find_if( extensionProperties.begin(), + extensionProperties.end(), + [&extensionName]( vk::ExtensionProperties const & ep ) { + return extensionName == ep.extensionName; + } ) != extensionProperties.end(); } - vk::UniqueCommandPool createCommandPool(vk::UniqueDevice &device, uint32_t queueFamilyIndex) + vk::UniqueCommandPool createCommandPool( vk::UniqueDevice & device, uint32_t queueFamilyIndex ) { - vk::CommandPoolCreateInfo commandPoolCreateInfo(vk::CommandPoolCreateFlagBits::eResetCommandBuffer, queueFamilyIndex); - return device->createCommandPoolUnique(commandPoolCreateInfo); + vk::CommandPoolCreateInfo commandPoolCreateInfo( vk::CommandPoolCreateFlagBits::eResetCommandBuffer, + queueFamilyIndex ); + return device->createCommandPoolUnique( commandPoolCreateInfo ); } - vk::UniqueDebugUtilsMessengerEXT createDebugUtilsMessenger(vk::UniqueInstance &instance) + vk::UniqueDebugUtilsMessengerEXT createDebugUtilsMessenger( vk::UniqueInstance & instance ) { - vk::DebugUtilsMessageSeverityFlagsEXT severityFlags(vk::DebugUtilsMessageSeverityFlagBitsEXT::eWarning | vk::DebugUtilsMessageSeverityFlagBitsEXT::eError); - vk::DebugUtilsMessageTypeFlagsEXT messageTypeFlags(vk::DebugUtilsMessageTypeFlagBitsEXT::eGeneral | vk::DebugUtilsMessageTypeFlagBitsEXT::ePerformance | vk::DebugUtilsMessageTypeFlagBitsEXT::eValidation); - return instance->createDebugUtilsMessengerEXTUnique(vk::DebugUtilsMessengerCreateInfoEXT({}, severityFlags, messageTypeFlags, &vk::su::debugUtilsMessengerCallback)); + vk::DebugUtilsMessageSeverityFlagsEXT severityFlags( vk::DebugUtilsMessageSeverityFlagBitsEXT::eWarning | + vk::DebugUtilsMessageSeverityFlagBitsEXT::eError ); + vk::DebugUtilsMessageTypeFlagsEXT messageTypeFlags( vk::DebugUtilsMessageTypeFlagBitsEXT::eGeneral | + vk::DebugUtilsMessageTypeFlagBitsEXT::ePerformance | + vk::DebugUtilsMessageTypeFlagBitsEXT::eValidation ); + return instance->createDebugUtilsMessengerEXTUnique( vk::DebugUtilsMessengerCreateInfoEXT( + {}, severityFlags, messageTypeFlags, &vk::su::debugUtilsMessengerCallback ) ); } - vk::UniqueDescriptorPool createDescriptorPool(vk::UniqueDevice &device, std::vector const& poolSizes) + vk::UniqueDescriptorPool createDescriptorPool( vk::UniqueDevice & device, + std::vector const & poolSizes ) { - assert(!poolSizes.empty()); - uint32_t maxSets = std::accumulate(poolSizes.begin(), poolSizes.end(), 0, [](uint32_t sum, vk::DescriptorPoolSize const& dps) { return sum + dps.descriptorCount; }); - assert(0 < maxSets); + assert( !poolSizes.empty() ); + uint32_t maxSets = + std::accumulate( poolSizes.begin(), poolSizes.end(), 0, []( uint32_t sum, vk::DescriptorPoolSize const & dps ) { + return sum + dps.descriptorCount; + } ); + assert( 0 < maxSets ); - vk::DescriptorPoolCreateInfo descriptorPoolCreateInfo(vk::DescriptorPoolCreateFlagBits::eFreeDescriptorSet, maxSets, checked_cast(poolSizes.size()), poolSizes.data()); - return device->createDescriptorPoolUnique(descriptorPoolCreateInfo); + vk::DescriptorPoolCreateInfo descriptorPoolCreateInfo( vk::DescriptorPoolCreateFlagBits::eFreeDescriptorSet, + maxSets, + checked_cast( poolSizes.size() ), + poolSizes.data() ); + return device->createDescriptorPoolUnique( descriptorPoolCreateInfo ); } - vk::UniqueDescriptorSetLayout createDescriptorSetLayout(vk::UniqueDevice const& device, std::vector> const& bindingData, - vk::DescriptorSetLayoutCreateFlags flags) + vk::UniqueDescriptorSetLayout createDescriptorSetLayout( + vk::UniqueDevice const & device, + std::vector> const & bindingData, + vk::DescriptorSetLayoutCreateFlags flags ) { - std::vector bindings(bindingData.size()); - for (size_t i = 0; i < bindingData.size(); i++) + std::vector bindings( bindingData.size() ); + for ( size_t i = 0; i < bindingData.size(); i++ ) { - bindings[i] = vk::DescriptorSetLayoutBinding(checked_cast(i), std::get<0>(bindingData[i]), std::get<1>(bindingData[i]), std::get<2>(bindingData[i])); + bindings[i] = vk::DescriptorSetLayoutBinding( checked_cast( i ), + std::get<0>( bindingData[i] ), + std::get<1>( bindingData[i] ), + std::get<2>( bindingData[i] ) ); } - return device->createDescriptorSetLayoutUnique(vk::DescriptorSetLayoutCreateInfo(flags, checked_cast(bindings.size()), bindings.data())); + return device->createDescriptorSetLayoutUnique( + vk::DescriptorSetLayoutCreateInfo( flags, checked_cast( bindings.size() ), bindings.data() ) ); } - vk::UniqueDevice createDevice(vk::PhysicalDevice physicalDevice, uint32_t queueFamilyIndex, std::vector const& extensions, vk::PhysicalDeviceFeatures const* physicalDeviceFeatures, - void const* pNext) + vk::UniqueDevice createDevice( vk::PhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + std::vector const & extensions, + vk::PhysicalDeviceFeatures const * physicalDeviceFeatures, + void const * pNext ) { - std::vector enabledExtensions; - enabledExtensions.reserve(extensions.size()); - for (auto const& ext : extensions) + std::vector enabledExtensions; + enabledExtensions.reserve( extensions.size() ); + for ( auto const & ext : extensions ) { - enabledExtensions.push_back(ext.data()); + enabledExtensions.push_back( ext.data() ); } // create a UniqueDevice - float queuePriority = 0.0f; - vk::DeviceQueueCreateInfo deviceQueueCreateInfo(vk::DeviceQueueCreateFlags(), queueFamilyIndex, 1, &queuePriority); - vk::DeviceCreateInfo deviceCreateInfo(vk::DeviceCreateFlags(), 1, &deviceQueueCreateInfo, 0, nullptr, checked_cast(enabledExtensions.size()), enabledExtensions.data(), physicalDeviceFeatures); + float queuePriority = 0.0f; + vk::DeviceQueueCreateInfo deviceQueueCreateInfo( + vk::DeviceQueueCreateFlags(), queueFamilyIndex, 1, &queuePriority ); + vk::DeviceCreateInfo deviceCreateInfo( vk::DeviceCreateFlags(), + 1, + &deviceQueueCreateInfo, + 0, + nullptr, + checked_cast( enabledExtensions.size() ), + enabledExtensions.data(), + physicalDeviceFeatures ); deviceCreateInfo.pNext = pNext; - return physicalDevice.createDeviceUnique(deviceCreateInfo); + return physicalDevice.createDeviceUnique( deviceCreateInfo ); } - std::vector createFramebuffers(vk::UniqueDevice &device, vk::UniqueRenderPass &renderPass, std::vector const& imageViews, vk::UniqueImageView const& depthImageView, vk::Extent2D const& extent) + std::vector createFramebuffers( vk::UniqueDevice & device, + vk::UniqueRenderPass & renderPass, + std::vector const & imageViews, + vk::UniqueImageView const & depthImageView, + vk::Extent2D const & extent ) { vk::ImageView attachments[2]; attachments[1] = depthImageView.get(); - vk::FramebufferCreateInfo framebufferCreateInfo(vk::FramebufferCreateFlags(), *renderPass, depthImageView ? 2 : 1, attachments, extent.width, extent.height, 1); + vk::FramebufferCreateInfo framebufferCreateInfo( vk::FramebufferCreateFlags(), + *renderPass, + depthImageView ? 2 : 1, + attachments, + extent.width, + extent.height, + 1 ); std::vector framebuffers; - framebuffers.reserve(imageViews.size()); - for (auto const& view : imageViews) + framebuffers.reserve( imageViews.size() ); + for ( auto const & view : imageViews ) { attachments[0] = view.get(); - framebuffers.push_back(device->createFramebufferUnique(framebufferCreateInfo)); + framebuffers.push_back( device->createFramebufferUnique( framebufferCreateInfo ) ); } return framebuffers; } - vk::UniquePipeline createGraphicsPipeline(vk::UniqueDevice const& device, vk::UniquePipelineCache const& pipelineCache, - std::pair const& vertexShaderData, - std::pair const& fragmentShaderData, uint32_t vertexStride, - std::vector> const& vertexInputAttributeFormatOffset, vk::FrontFace frontFace, bool depthBuffered - , vk::UniquePipelineLayout const& pipelineLayout, vk::UniqueRenderPass const& renderPass) + vk::UniquePipeline + createGraphicsPipeline( vk::UniqueDevice const & device, + vk::UniquePipelineCache const & pipelineCache, + std::pair const & vertexShaderData, + std::pair const & fragmentShaderData, + uint32_t vertexStride, + std::vector> const & vertexInputAttributeFormatOffset, + vk::FrontFace frontFace, + bool depthBuffered, + vk::UniquePipelineLayout const & pipelineLayout, + vk::UniqueRenderPass const & renderPass ) { - vk::PipelineShaderStageCreateInfo pipelineShaderStageCreateInfos[2] = - { - vk::PipelineShaderStageCreateInfo(vk::PipelineShaderStageCreateFlags(), vk::ShaderStageFlagBits::eVertex, vertexShaderData.first, "main", vertexShaderData.second), - vk::PipelineShaderStageCreateInfo(vk::PipelineShaderStageCreateFlags(), vk::ShaderStageFlagBits::eFragment, fragmentShaderData.first, "main", fragmentShaderData.second) + vk::PipelineShaderStageCreateInfo pipelineShaderStageCreateInfos[2] = { + vk::PipelineShaderStageCreateInfo( vk::PipelineShaderStageCreateFlags(), + vk::ShaderStageFlagBits::eVertex, + vertexShaderData.first, + "main", + vertexShaderData.second ), + vk::PipelineShaderStageCreateInfo( vk::PipelineShaderStageCreateFlags(), + vk::ShaderStageFlagBits::eFragment, + fragmentShaderData.first, + "main", + fragmentShaderData.second ) }; std::vector vertexInputAttributeDescriptions; - vk::PipelineVertexInputStateCreateInfo pipelineVertexInputStateCreateInfo; - vk::VertexInputBindingDescription vertexInputBindingDescription(0, vertexStride); - - if (0 < vertexStride) + vk::PipelineVertexInputStateCreateInfo pipelineVertexInputStateCreateInfo; + vk::VertexInputBindingDescription vertexInputBindingDescription( 0, vertexStride ); + + if ( 0 < vertexStride ) { - vertexInputAttributeDescriptions.reserve(vertexInputAttributeFormatOffset.size()); - for (uint32_t i=0 ; i(vertexInputAttributeDescriptions.size()); + pipelineVertexInputStateCreateInfo.pVertexBindingDescriptions = &vertexInputBindingDescription; + pipelineVertexInputStateCreateInfo.vertexAttributeDescriptionCount = + vk::su::checked_cast( vertexInputAttributeDescriptions.size() ); pipelineVertexInputStateCreateInfo.pVertexAttributeDescriptions = vertexInputAttributeDescriptions.data(); } - vk::PipelineInputAssemblyStateCreateInfo pipelineInputAssemblyStateCreateInfo(vk::PipelineInputAssemblyStateCreateFlags(), vk::PrimitiveTopology::eTriangleList); + vk::PipelineInputAssemblyStateCreateInfo pipelineInputAssemblyStateCreateInfo( + vk::PipelineInputAssemblyStateCreateFlags(), vk::PrimitiveTopology::eTriangleList ); - vk::PipelineViewportStateCreateInfo pipelineViewportStateCreateInfo(vk::PipelineViewportStateCreateFlags(), 1, nullptr, 1, nullptr); + vk::PipelineViewportStateCreateInfo pipelineViewportStateCreateInfo( + vk::PipelineViewportStateCreateFlags(), 1, nullptr, 1, nullptr ); - vk::PipelineRasterizationStateCreateInfo pipelineRasterizationStateCreateInfo(vk::PipelineRasterizationStateCreateFlags(), false, false, vk::PolygonMode::eFill, vk::CullModeFlagBits::eBack, - frontFace, false, 0.0f, 0.0f, 0.0f, 1.0f); + vk::PipelineRasterizationStateCreateInfo pipelineRasterizationStateCreateInfo( + vk::PipelineRasterizationStateCreateFlags(), + false, + false, + vk::PolygonMode::eFill, + vk::CullModeFlagBits::eBack, + frontFace, + false, + 0.0f, + 0.0f, + 0.0f, + 1.0f ); - vk::PipelineMultisampleStateCreateInfo pipelineMultisampleStateCreateInfo({}, vk::SampleCountFlagBits::e1); + vk::PipelineMultisampleStateCreateInfo pipelineMultisampleStateCreateInfo( {}, vk::SampleCountFlagBits::e1 ); - vk::StencilOpState stencilOpState(vk::StencilOp::eKeep, vk::StencilOp::eKeep, vk::StencilOp::eKeep, vk::CompareOp::eAlways); - vk::PipelineDepthStencilStateCreateInfo pipelineDepthStencilStateCreateInfo(vk::PipelineDepthStencilStateCreateFlags(), depthBuffered, depthBuffered, vk::CompareOp::eLessOrEqual, false, - false, stencilOpState, stencilOpState); + vk::StencilOpState stencilOpState( + vk::StencilOp::eKeep, vk::StencilOp::eKeep, vk::StencilOp::eKeep, vk::CompareOp::eAlways ); + vk::PipelineDepthStencilStateCreateInfo pipelineDepthStencilStateCreateInfo( + vk::PipelineDepthStencilStateCreateFlags(), + depthBuffered, + depthBuffered, + vk::CompareOp::eLessOrEqual, + false, + false, + stencilOpState, + stencilOpState ); - vk::ColorComponentFlags colorComponentFlags(vk::ColorComponentFlagBits::eR | vk::ColorComponentFlagBits::eG | vk::ColorComponentFlagBits::eB | vk::ColorComponentFlagBits::eA); - vk::PipelineColorBlendAttachmentState pipelineColorBlendAttachmentState(false, vk::BlendFactor::eZero, vk::BlendFactor::eZero, vk::BlendOp::eAdd, vk::BlendFactor::eZero, - vk::BlendFactor::eZero, vk::BlendOp::eAdd, colorComponentFlags); - vk::PipelineColorBlendStateCreateInfo pipelineColorBlendStateCreateInfo(vk::PipelineColorBlendStateCreateFlags(), false, vk::LogicOp::eNoOp, 1, &pipelineColorBlendAttachmentState, - { { 1.0f, 1.0f, 1.0f, 1.0f } }); + vk::ColorComponentFlags colorComponentFlags( vk::ColorComponentFlagBits::eR | vk::ColorComponentFlagBits::eG | + vk::ColorComponentFlagBits::eB | vk::ColorComponentFlagBits::eA ); + vk::PipelineColorBlendAttachmentState pipelineColorBlendAttachmentState( false, + vk::BlendFactor::eZero, + vk::BlendFactor::eZero, + vk::BlendOp::eAdd, + vk::BlendFactor::eZero, + vk::BlendFactor::eZero, + vk::BlendOp::eAdd, + colorComponentFlags ); + vk::PipelineColorBlendStateCreateInfo pipelineColorBlendStateCreateInfo( vk::PipelineColorBlendStateCreateFlags(), + false, + vk::LogicOp::eNoOp, + 1, + &pipelineColorBlendAttachmentState, + { { 1.0f, 1.0f, 1.0f, 1.0f } } ); - vk::DynamicState dynamicStates[2] = { vk::DynamicState::eViewport, vk::DynamicState::eScissor }; - vk::PipelineDynamicStateCreateInfo pipelineDynamicStateCreateInfo(vk::PipelineDynamicStateCreateFlags(), 2, dynamicStates); + vk::DynamicState dynamicStates[2] = { vk::DynamicState::eViewport, vk::DynamicState::eScissor }; + vk::PipelineDynamicStateCreateInfo pipelineDynamicStateCreateInfo( + vk::PipelineDynamicStateCreateFlags(), 2, dynamicStates ); - vk::GraphicsPipelineCreateInfo graphicsPipelineCreateInfo(vk::PipelineCreateFlags(), 2, pipelineShaderStageCreateInfos, &pipelineVertexInputStateCreateInfo, - &pipelineInputAssemblyStateCreateInfo, nullptr, &pipelineViewportStateCreateInfo, &pipelineRasterizationStateCreateInfo, - &pipelineMultisampleStateCreateInfo, &pipelineDepthStencilStateCreateInfo, &pipelineColorBlendStateCreateInfo, - &pipelineDynamicStateCreateInfo, pipelineLayout.get(), renderPass.get()); + vk::GraphicsPipelineCreateInfo graphicsPipelineCreateInfo( vk::PipelineCreateFlags(), + 2, + pipelineShaderStageCreateInfos, + &pipelineVertexInputStateCreateInfo, + &pipelineInputAssemblyStateCreateInfo, + nullptr, + &pipelineViewportStateCreateInfo, + &pipelineRasterizationStateCreateInfo, + &pipelineMultisampleStateCreateInfo, + &pipelineDepthStencilStateCreateInfo, + &pipelineColorBlendStateCreateInfo, + &pipelineDynamicStateCreateInfo, + pipelineLayout.get(), + renderPass.get() ); - auto result = device->createGraphicsPipelineUnique(pipelineCache.get(), graphicsPipelineCreateInfo); - assert(result.result == vk::Result::eSuccess); - return std::move(result.value); + auto result = device->createGraphicsPipelineUnique( pipelineCache.get(), graphicsPipelineCreateInfo ); + assert( result.result == vk::Result::eSuccess ); + return std::move( result.value ); } - vk::UniqueInstance createInstance(std::string const& appName, std::string const& engineName, std::vector const& layers, std::vector const& extensions, - uint32_t apiVersion) + vk::UniqueInstance createInstance( std::string const & appName, + std::string const & engineName, + std::vector const & layers, + std::vector const & extensions, + uint32_t apiVersion ) { -#if (VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1) - static vk::DynamicLoader dl; - PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = dl.getProcAddress("vkGetInstanceProcAddr"); - VULKAN_HPP_DEFAULT_DISPATCHER.init(vkGetInstanceProcAddr); +#if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + static vk::DynamicLoader dl; + PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = + dl.getProcAddress( "vkGetInstanceProcAddr" ); + VULKAN_HPP_DEFAULT_DISPATCHER.init( vkGetInstanceProcAddr ); #endif -#if !defined(NDEBUG) - std::vector layerProperties = vk::enumerateInstanceLayerProperties(); +#if !defined( NDEBUG ) + std::vector layerProperties = vk::enumerateInstanceLayerProperties(); std::vector extensionProperties = vk::enumerateInstanceExtensionProperties(); #endif - std::vector enabledLayers; - enabledLayers.reserve(layers.size()); - for (auto const& layer : layers) + std::vector enabledLayers; + enabledLayers.reserve( layers.size() ); + for ( auto const & layer : layers ) { - assert(std::find_if(layerProperties.begin(), layerProperties.end(), [layer](vk::LayerProperties const& lp) { return layer == lp.layerName; }) != layerProperties.end()); - enabledLayers.push_back(layer.data()); + assert( + std::find_if( layerProperties.begin(), layerProperties.end(), [layer]( vk::LayerProperties const & lp ) { + return layer == lp.layerName; + } ) != layerProperties.end() ); + enabledLayers.push_back( layer.data() ); } -#if !defined(NDEBUG) +#if !defined( NDEBUG ) // Enable standard validation layer to find as much errors as possible! - if (std::find(layers.begin(), layers.end(), "VK_LAYER_KHRONOS_validation") == layers.end() - && std::find_if(layerProperties.begin(), layerProperties.end(), [](vk::LayerProperties const& lp) { return (strcmp("VK_LAYER_KHRONOS_validation", lp.layerName) == 0); }) != layerProperties.end()) + if ( std::find( layers.begin(), layers.end(), "VK_LAYER_KHRONOS_validation" ) == layers.end() && + std::find_if( layerProperties.begin(), layerProperties.end(), []( vk::LayerProperties const & lp ) { + return ( strcmp( "VK_LAYER_KHRONOS_validation", lp.layerName ) == 0 ); + } ) != layerProperties.end() ) { - enabledLayers.push_back("VK_LAYER_KHRONOS_validation"); + enabledLayers.push_back( "VK_LAYER_KHRONOS_validation" ); } - if (std::find(layers.begin(), layers.end(), "VK_LAYER_LUNARG_assistant_layer") == layers.end() - && std::find_if(layerProperties.begin(), layerProperties.end(), [](vk::LayerProperties const& lp) { return (strcmp("VK_LAYER_LUNARG_assistant_layer", lp.layerName) == 0); }) != layerProperties.end()) + if ( std::find( layers.begin(), layers.end(), "VK_LAYER_LUNARG_assistant_layer" ) == layers.end() && + std::find_if( layerProperties.begin(), layerProperties.end(), []( vk::LayerProperties const & lp ) { + return ( strcmp( "VK_LAYER_LUNARG_assistant_layer", lp.layerName ) == 0 ); + } ) != layerProperties.end() ) { - enabledLayers.push_back("VK_LAYER_LUNARG_assistant_layer"); + enabledLayers.push_back( "VK_LAYER_LUNARG_assistant_layer" ); } #endif - std::vector enabledExtensions; - enabledExtensions.reserve(extensions.size()); - for (auto const& ext : extensions) + std::vector enabledExtensions; + enabledExtensions.reserve( extensions.size() ); + for ( auto const & ext : extensions ) { - assert(std::find_if(extensionProperties.begin(), extensionProperties.end(), [ext](vk::ExtensionProperties const& ep) { return ext == ep.extensionName; }) != extensionProperties.end()); - enabledExtensions.push_back(ext.data()); + assert( std::find_if( + extensionProperties.begin(), extensionProperties.end(), [ext]( vk::ExtensionProperties const & ep ) { + return ext == ep.extensionName; + } ) != extensionProperties.end() ); + enabledExtensions.push_back( ext.data() ); } -#if !defined(NDEBUG) - if (std::find(extensions.begin(), extensions.end(), VK_EXT_DEBUG_UTILS_EXTENSION_NAME) == extensions.end() - && std::find_if(extensionProperties.begin(), extensionProperties.end(), [](vk::ExtensionProperties const& ep) { return (strcmp(VK_EXT_DEBUG_UTILS_EXTENSION_NAME, ep.extensionName) == 0); }) != extensionProperties.end()) +#if !defined( NDEBUG ) + if ( std::find( extensions.begin(), extensions.end(), VK_EXT_DEBUG_UTILS_EXTENSION_NAME ) == extensions.end() && + std::find_if( + extensionProperties.begin(), extensionProperties.end(), []( vk::ExtensionProperties const & ep ) { + return ( strcmp( VK_EXT_DEBUG_UTILS_EXTENSION_NAME, ep.extensionName ) == 0 ); + } ) != extensionProperties.end() ) { - enabledExtensions.push_back(VK_EXT_DEBUG_UTILS_EXTENSION_NAME); + enabledExtensions.push_back( VK_EXT_DEBUG_UTILS_EXTENSION_NAME ); } #endif // create a UniqueInstance - vk::ApplicationInfo applicationInfo(appName.c_str(), 1, engineName.c_str(), 1, apiVersion); -#if defined(NDEBUG) + vk::ApplicationInfo applicationInfo( appName.c_str(), 1, engineName.c_str(), 1, apiVersion ); +#if defined( NDEBUG ) // in non-debug mode just use the InstanceCreateInfo for instance creation - vk::StructureChain instanceCreateInfo({ {}, &applicationInfo, checked_cast(enabledLayers.size()), enabledLayers.data(), - checked_cast(enabledExtensions.size()), enabledExtensions.data() }); + vk::StructureChain instanceCreateInfo( + { {}, + &applicationInfo, + checked_cast( enabledLayers.size() ), + enabledLayers.data(), + checked_cast( enabledExtensions.size() ), + enabledExtensions.data() } ); #else // in debug mode, addionally use the debugUtilsMessengerCallback in instance creation! - vk::DebugUtilsMessageSeverityFlagsEXT severityFlags(vk::DebugUtilsMessageSeverityFlagBitsEXT::eWarning | vk::DebugUtilsMessageSeverityFlagBitsEXT::eError); - vk::DebugUtilsMessageTypeFlagsEXT messageTypeFlags(vk::DebugUtilsMessageTypeFlagBitsEXT::eGeneral | vk::DebugUtilsMessageTypeFlagBitsEXT::ePerformance | vk::DebugUtilsMessageTypeFlagBitsEXT::eValidation); - vk::StructureChain instanceCreateInfo - ( - { {}, &applicationInfo, checked_cast(enabledLayers.size()), enabledLayers.data(), checked_cast(enabledExtensions.size()), enabledExtensions.data() }, - { {}, severityFlags, messageTypeFlags, &vk::su::debugUtilsMessengerCallback } - ); + vk::DebugUtilsMessageSeverityFlagsEXT severityFlags( vk::DebugUtilsMessageSeverityFlagBitsEXT::eWarning | + vk::DebugUtilsMessageSeverityFlagBitsEXT::eError ); + vk::DebugUtilsMessageTypeFlagsEXT messageTypeFlags( vk::DebugUtilsMessageTypeFlagBitsEXT::eGeneral | + vk::DebugUtilsMessageTypeFlagBitsEXT::ePerformance | + vk::DebugUtilsMessageTypeFlagBitsEXT::eValidation ); + vk::StructureChain instanceCreateInfo( + { {}, + &applicationInfo, + checked_cast( enabledLayers.size() ), + enabledLayers.data(), + checked_cast( enabledExtensions.size() ), + enabledExtensions.data() }, + { {}, severityFlags, messageTypeFlags, &vk::su::debugUtilsMessengerCallback } ); #endif - vk::UniqueInstance instance = vk::createInstanceUnique(instanceCreateInfo.get()); + vk::UniqueInstance instance = vk::createInstanceUnique( instanceCreateInfo.get() ); -#if (VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1) +#if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) // initialize function pointers for instance - VULKAN_HPP_DEFAULT_DISPATCHER.init(*instance); + VULKAN_HPP_DEFAULT_DISPATCHER.init( *instance ); #endif return instance; } - vk::UniqueRenderPass createRenderPass(vk::UniqueDevice &device, vk::Format colorFormat, vk::Format depthFormat, vk::AttachmentLoadOp loadOp, vk::ImageLayout colorFinalLayout) + vk::UniqueRenderPass createRenderPass( vk::UniqueDevice & device, + vk::Format colorFormat, + vk::Format depthFormat, + vk::AttachmentLoadOp loadOp, + vk::ImageLayout colorFinalLayout ) { std::vector attachmentDescriptions; - assert(colorFormat != vk::Format::eUndefined); - attachmentDescriptions.push_back(vk::AttachmentDescription(vk::AttachmentDescriptionFlags(), colorFormat, vk::SampleCountFlagBits::e1, loadOp, vk::AttachmentStoreOp::eStore, - vk::AttachmentLoadOp::eDontCare, vk::AttachmentStoreOp::eDontCare, vk::ImageLayout::eUndefined, colorFinalLayout)); - if (depthFormat != vk::Format::eUndefined) + assert( colorFormat != vk::Format::eUndefined ); + attachmentDescriptions.push_back( vk::AttachmentDescription( vk::AttachmentDescriptionFlags(), + colorFormat, + vk::SampleCountFlagBits::e1, + loadOp, + vk::AttachmentStoreOp::eStore, + vk::AttachmentLoadOp::eDontCare, + vk::AttachmentStoreOp::eDontCare, + vk::ImageLayout::eUndefined, + colorFinalLayout ) ); + if ( depthFormat != vk::Format::eUndefined ) { - attachmentDescriptions.push_back(vk::AttachmentDescription(vk::AttachmentDescriptionFlags(), depthFormat, vk::SampleCountFlagBits::e1, loadOp, vk::AttachmentStoreOp::eDontCare, - vk::AttachmentLoadOp::eDontCare, vk::AttachmentStoreOp::eDontCare, vk::ImageLayout::eUndefined, - vk::ImageLayout::eDepthStencilAttachmentOptimal)); + attachmentDescriptions.push_back( + vk::AttachmentDescription( vk::AttachmentDescriptionFlags(), + depthFormat, + vk::SampleCountFlagBits::e1, + loadOp, + vk::AttachmentStoreOp::eDontCare, + vk::AttachmentLoadOp::eDontCare, + vk::AttachmentStoreOp::eDontCare, + vk::ImageLayout::eUndefined, + vk::ImageLayout::eDepthStencilAttachmentOptimal ) ); } - vk::AttachmentReference colorAttachment(0, vk::ImageLayout::eColorAttachmentOptimal); - vk::AttachmentReference depthAttachment(1, vk::ImageLayout::eDepthStencilAttachmentOptimal); - vk::SubpassDescription subpassDescription(vk::SubpassDescriptionFlags(), vk::PipelineBindPoint::eGraphics, 0, nullptr, 1, &colorAttachment, nullptr, - (depthFormat != vk::Format::eUndefined) ? &depthAttachment : nullptr); - return device->createRenderPassUnique(vk::RenderPassCreateInfo(vk::RenderPassCreateFlags(), static_cast(attachmentDescriptions.size()), attachmentDescriptions.data(), 1, - &subpassDescription)); + vk::AttachmentReference colorAttachment( 0, vk::ImageLayout::eColorAttachmentOptimal ); + vk::AttachmentReference depthAttachment( 1, vk::ImageLayout::eDepthStencilAttachmentOptimal ); + vk::SubpassDescription subpassDescription( vk::SubpassDescriptionFlags(), + vk::PipelineBindPoint::eGraphics, + 0, + nullptr, + 1, + &colorAttachment, + nullptr, + ( depthFormat != vk::Format::eUndefined ) ? &depthAttachment + : nullptr ); + return device->createRenderPassUnique( + vk::RenderPassCreateInfo( vk::RenderPassCreateFlags(), + static_cast( attachmentDescriptions.size() ), + attachmentDescriptions.data(), + 1, + &subpassDescription ) ); } - VKAPI_ATTR VkBool32 VKAPI_CALL debugUtilsMessengerCallback(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, - VkDebugUtilsMessengerCallbackDataEXT const * pCallbackData, void * /*pUserData*/) + VKAPI_ATTR VkBool32 VKAPI_CALL + debugUtilsMessengerCallback( VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VkDebugUtilsMessageTypeFlagsEXT messageTypes, + VkDebugUtilsMessengerCallbackDataEXT const * pCallbackData, + void * /*pUserData*/ ) { - std::cerr << vk::to_string(static_cast(messageSeverity)) << ": " << vk::to_string(static_cast(messageTypes)) << ":\n"; - std::cerr << "\t" << "messageIDName = <" << pCallbackData->pMessageIdName << ">\n"; - std::cerr << "\t" << "messageIdNumber = " << pCallbackData->messageIdNumber << "\n"; - std::cerr << "\t" << "message = <" << pCallbackData->pMessage << ">\n"; - if (0 < pCallbackData->queueLabelCount) + std::cerr << vk::to_string( static_cast( messageSeverity ) ) << ": " + << vk::to_string( static_cast( messageTypes ) ) << ":\n"; + std::cerr << "\t" + << "messageIDName = <" << pCallbackData->pMessageIdName << ">\n"; + std::cerr << "\t" + << "messageIdNumber = " << pCallbackData->messageIdNumber << "\n"; + std::cerr << "\t" + << "message = <" << pCallbackData->pMessage << ">\n"; + if ( 0 < pCallbackData->queueLabelCount ) { - std::cerr << "\t" << "Queue Labels:\n"; - for (uint8_t i = 0; i < pCallbackData->queueLabelCount; i++) + std::cerr << "\t" + << "Queue Labels:\n"; + for ( uint8_t i = 0; i < pCallbackData->queueLabelCount; i++ ) { - std::cerr << "\t\t" << "labelName = <" << pCallbackData->pQueueLabels[i].pLabelName << ">\n"; + std::cerr << "\t\t" + << "labelName = <" << pCallbackData->pQueueLabels[i].pLabelName << ">\n"; } } - if (0 < pCallbackData->cmdBufLabelCount) + if ( 0 < pCallbackData->cmdBufLabelCount ) { - std::cerr << "\t" << "CommandBuffer Labels:\n"; - for (uint8_t i = 0; i < pCallbackData->cmdBufLabelCount; i++) + std::cerr << "\t" + << "CommandBuffer Labels:\n"; + for ( uint8_t i = 0; i < pCallbackData->cmdBufLabelCount; i++ ) { - std::cerr << "\t\t" << "labelName = <" << pCallbackData->pCmdBufLabels[i].pLabelName << ">\n"; + std::cerr << "\t\t" + << "labelName = <" << pCallbackData->pCmdBufLabels[i].pLabelName << ">\n"; } } - if (0 < pCallbackData->objectCount) + if ( 0 < pCallbackData->objectCount ) { - std::cerr << "\t" << "Objects:\n"; - for (uint8_t i = 0; i < pCallbackData->objectCount; i++) + std::cerr << "\t" + << "Objects:\n"; + for ( uint8_t i = 0; i < pCallbackData->objectCount; i++ ) { - std::cerr << "\t\t" << "Object " << i << "\n"; - std::cerr << "\t\t\t" << "objectType = " << vk::to_string(static_cast(pCallbackData->pObjects[i].objectType)) << "\n"; - std::cerr << "\t\t\t" << "objectHandle = " << pCallbackData->pObjects[i].objectHandle << "\n"; - if (pCallbackData->pObjects[i].pObjectName) + std::cerr << "\t\t" + << "Object " << i << "\n"; + std::cerr << "\t\t\t" + << "objectType = " + << vk::to_string( static_cast( pCallbackData->pObjects[i].objectType ) ) << "\n"; + std::cerr << "\t\t\t" + << "objectHandle = " << pCallbackData->pObjects[i].objectHandle << "\n"; + if ( pCallbackData->pObjects[i].pObjectName ) { - std::cerr << "\t\t\t" << "objectName = <" << pCallbackData->pObjects[i].pObjectName << ">\n"; + std::cerr << "\t\t\t" + << "objectName = <" << pCallbackData->pObjects[i].pObjectName << ">\n"; } } } return VK_TRUE; } - uint32_t findGraphicsQueueFamilyIndex(std::vector const& queueFamilyProperties) + uint32_t findGraphicsQueueFamilyIndex( std::vector const & queueFamilyProperties ) { // get the first index into queueFamiliyProperties which supports graphics - size_t graphicsQueueFamilyIndex = std::distance(queueFamilyProperties.begin(), std::find_if(queueFamilyProperties.begin(), queueFamilyProperties.end(), - [](vk::QueueFamilyProperties const& qfp) { return qfp.queueFlags & vk::QueueFlagBits::eGraphics; })); - assert(graphicsQueueFamilyIndex < queueFamilyProperties.size()); + size_t graphicsQueueFamilyIndex = std::distance( + queueFamilyProperties.begin(), + std::find_if( + queueFamilyProperties.begin(), queueFamilyProperties.end(), []( vk::QueueFamilyProperties const & qfp ) { + return qfp.queueFlags & vk::QueueFlagBits::eGraphics; + } ) ); + assert( graphicsQueueFamilyIndex < queueFamilyProperties.size() ); - return checked_cast(graphicsQueueFamilyIndex); + return checked_cast( graphicsQueueFamilyIndex ); } - std::pair findGraphicsAndPresentQueueFamilyIndex(vk::PhysicalDevice physicalDevice, vk::SurfaceKHR const& surface) + std::pair findGraphicsAndPresentQueueFamilyIndex( vk::PhysicalDevice physicalDevice, + vk::SurfaceKHR const & surface ) { std::vector queueFamilyProperties = physicalDevice.getQueueFamilyProperties(); - assert(queueFamilyProperties.size() < std::numeric_limits::max()); + assert( queueFamilyProperties.size() < std::numeric_limits::max() ); - uint32_t graphicsQueueFamilyIndex = findGraphicsQueueFamilyIndex(queueFamilyProperties); - if (physicalDevice.getSurfaceSupportKHR(graphicsQueueFamilyIndex, surface)) + uint32_t graphicsQueueFamilyIndex = findGraphicsQueueFamilyIndex( queueFamilyProperties ); + if ( physicalDevice.getSurfaceSupportKHR( graphicsQueueFamilyIndex, surface ) ) { - return std::make_pair(graphicsQueueFamilyIndex, graphicsQueueFamilyIndex); // the first graphicsQueueFamilyIndex does also support presents + return std::make_pair( + graphicsQueueFamilyIndex, + graphicsQueueFamilyIndex ); // the first graphicsQueueFamilyIndex does also support presents } - // the graphicsQueueFamilyIndex doesn't support present -> look for an other family index that supports both graphics and present - for (size_t i = 0; i < queueFamilyProperties.size(); i++) + // the graphicsQueueFamilyIndex doesn't support present -> look for an other family index that supports both + // graphics and present + for ( size_t i = 0; i < queueFamilyProperties.size(); i++ ) { - if ((queueFamilyProperties[i].queueFlags & vk::QueueFlagBits::eGraphics) && physicalDevice.getSurfaceSupportKHR(static_cast(i), surface)) + if ( ( queueFamilyProperties[i].queueFlags & vk::QueueFlagBits::eGraphics ) && + physicalDevice.getSurfaceSupportKHR( static_cast( i ), surface ) ) { - return std::make_pair(static_cast(i), static_cast(i)); + return std::make_pair( static_cast( i ), static_cast( i ) ); } } - // there's nothing like a single family index that supports both graphics and present -> look for an other family index that supports present - for (size_t i = 0; i < queueFamilyProperties.size(); i++) + // there's nothing like a single family index that supports both graphics and present -> look for an other family + // index that supports present + for ( size_t i = 0; i < queueFamilyProperties.size(); i++ ) { - if (physicalDevice.getSurfaceSupportKHR(static_cast(i), surface)) + if ( physicalDevice.getSurfaceSupportKHR( static_cast( i ), surface ) ) { - return std::make_pair(graphicsQueueFamilyIndex, static_cast(i)); + return std::make_pair( graphicsQueueFamilyIndex, static_cast( i ) ); } } - throw std::runtime_error("Could not find queues for both graphics or present -> terminating"); + throw std::runtime_error( "Could not find queues for both graphics or present -> terminating" ); } - uint32_t findMemoryType(vk::PhysicalDeviceMemoryProperties const& memoryProperties, uint32_t typeBits, vk::MemoryPropertyFlags requirementsMask) + uint32_t findMemoryType( vk::PhysicalDeviceMemoryProperties const & memoryProperties, + uint32_t typeBits, + vk::MemoryPropertyFlags requirementsMask ) { - uint32_t typeIndex = uint32_t(~0); - for (uint32_t i = 0; i < memoryProperties.memoryTypeCount; i++) + uint32_t typeIndex = uint32_t( ~0 ); + for ( uint32_t i = 0; i < memoryProperties.memoryTypeCount; i++ ) { - if ((typeBits & 1) && ((memoryProperties.memoryTypes[i].propertyFlags & requirementsMask) == requirementsMask)) + if ( ( typeBits & 1 ) && + ( ( memoryProperties.memoryTypes[i].propertyFlags & requirementsMask ) == requirementsMask ) ) { typeIndex = i; break; } typeBits >>= 1; } - assert(typeIndex != ~0); + assert( typeIndex != ~0 ); return typeIndex; } std::vector getDeviceExtensions() { - return{ VK_KHR_SWAPCHAIN_EXTENSION_NAME }; + return { VK_KHR_SWAPCHAIN_EXTENSION_NAME }; } std::vector getInstanceExtensions() { std::vector extensions; - extensions.push_back(VK_KHR_SURFACE_EXTENSION_NAME); -#if defined(VK_USE_PLATFORM_ANDROID_KHR) - extensions.push_back(VK_KHR_ANDROID_SURFACE_EXTENSION_NAME); -#elif defined(VK_USE_PLATFORM_IOS_MVK) - extensions.push_back(VK_MVK_IOS_SURFACE_EXTENSION_NAME); -#elif defined(VK_USE_PLATFORM_MACOS_MVK) - extensions.push_back(VK_MVK_MACOS_SURFACE_EXTENSION_NAME); -#elif defined(VK_USE_PLATFORM_MIR_KHR) - extensions.push_back(VK_KHR_MIR_SURFACE_EXTENSION_NAME); -#elif defined(VK_USE_PLATFORM_VI_NN) - extensions.push_back(VK_NN_VI_SURFACE_EXTENSION_NAME); -#elif defined(VK_USE_PLATFORM_WAYLAND_KHR) - extensions.push_back(VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME); -#elif defined(VK_USE_PLATFORM_WIN32_KHR) - extensions.push_back(VK_KHR_WIN32_SURFACE_EXTENSION_NAME); -#elif defined(VK_USE_PLATFORM_XCB_KHR) - extensions.push_back(VK_KHR_XCB_SURFACE_EXTENSION_NAME); -#elif defined(VK_USE_PLATFORM_XLIB_KHR) - extensions.push_back(VK_KHR_XLIB_SURFACE_EXTENSION_NAME); -#elif defined(VK_USE_PLATFORM_XLIB_XRANDR_EXT) - extensions.push_back(VK_EXT_ACQUIRE_XLIB_DISPLAY_EXTENSION_NAME); + extensions.push_back( VK_KHR_SURFACE_EXTENSION_NAME ); +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + extensions.push_back( VK_KHR_ANDROID_SURFACE_EXTENSION_NAME ); +#elif defined( VK_USE_PLATFORM_IOS_MVK ) + extensions.push_back( VK_MVK_IOS_SURFACE_EXTENSION_NAME ); +#elif defined( VK_USE_PLATFORM_MACOS_MVK ) + extensions.push_back( VK_MVK_MACOS_SURFACE_EXTENSION_NAME ); +#elif defined( VK_USE_PLATFORM_MIR_KHR ) + extensions.push_back( VK_KHR_MIR_SURFACE_EXTENSION_NAME ); +#elif defined( VK_USE_PLATFORM_VI_NN ) + extensions.push_back( VK_NN_VI_SURFACE_EXTENSION_NAME ); +#elif defined( VK_USE_PLATFORM_WAYLAND_KHR ) + extensions.push_back( VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME ); +#elif defined( VK_USE_PLATFORM_WIN32_KHR ) + extensions.push_back( VK_KHR_WIN32_SURFACE_EXTENSION_NAME ); +#elif defined( VK_USE_PLATFORM_XCB_KHR ) + extensions.push_back( VK_KHR_XCB_SURFACE_EXTENSION_NAME ); +#elif defined( VK_USE_PLATFORM_XLIB_KHR ) + extensions.push_back( VK_KHR_XLIB_SURFACE_EXTENSION_NAME ); +#elif defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + extensions.push_back( VK_EXT_ACQUIRE_XLIB_DISPLAY_EXTENSION_NAME ); #endif return extensions; } - vk::Format pickDepthFormat(vk::PhysicalDevice const& physicalDevice) + vk::Format pickDepthFormat( vk::PhysicalDevice const & physicalDevice ) { - std::vector candidates = {vk::Format::eD32Sfloat, vk::Format::eD32SfloatS8Uint, vk::Format::eD24UnormS8Uint}; - for (vk::Format format : candidates) + std::vector candidates = { vk::Format::eD32Sfloat, + vk::Format::eD32SfloatS8Uint, + vk::Format::eD24UnormS8Uint }; + for ( vk::Format format : candidates ) { - vk::FormatProperties props = physicalDevice.getFormatProperties(format); + vk::FormatProperties props = physicalDevice.getFormatProperties( format ); - if (props.optimalTilingFeatures & vk::FormatFeatureFlagBits::eDepthStencilAttachment) + if ( props.optimalTilingFeatures & vk::FormatFeatureFlagBits::eDepthStencilAttachment ) { return format; } } - throw std::runtime_error("failed to find supported format!"); + throw std::runtime_error( "failed to find supported format!" ); } - vk::PresentModeKHR pickPresentMode(std::vector const& presentModes) + vk::PresentModeKHR pickPresentMode( std::vector const & presentModes ) { vk::PresentModeKHR pickedMode = vk::PresentModeKHR::eFifo; - for(const auto& presentMode : presentModes) + for ( const auto & presentMode : presentModes ) { - if(presentMode == vk::PresentModeKHR::eMailbox) + if ( presentMode == vk::PresentModeKHR::eMailbox ) { pickedMode = presentMode; break; } - if(presentMode == vk::PresentModeKHR::eImmediate) + if ( presentMode == vk::PresentModeKHR::eImmediate ) { pickedMode = presentMode; } @@ -430,13 +624,13 @@ namespace vk return pickedMode; } - vk::SurfaceFormatKHR pickSurfaceFormat(std::vector const& formats) + vk::SurfaceFormatKHR pickSurfaceFormat( std::vector const & formats ) { - assert(!formats.empty()); + assert( !formats.empty() ); vk::SurfaceFormatKHR pickedFormat = formats[0]; - if (formats.size() == 1) + if ( formats.size() == 1 ) { - if (formats[0].format == vk::Format::eUndefined) + if ( formats[0].format == vk::Format::eUndefined ) { pickedFormat.format = vk::Format::eB8G8R8A8Unorm; pickedFormat.colorSpace = vk::ColorSpaceKHR::eSrgbNonlinear; @@ -445,88 +639,74 @@ namespace vk else { // request several formats, the first found will be used - vk::Format requestedFormats[] = { vk::Format::eB8G8R8A8Unorm, vk::Format::eR8G8B8A8Unorm, vk::Format::eB8G8R8Unorm, vk::Format::eR8G8B8Unorm }; + vk::Format requestedFormats[] = { + vk::Format::eB8G8R8A8Unorm, vk::Format::eR8G8B8A8Unorm, vk::Format::eB8G8R8Unorm, vk::Format::eR8G8B8Unorm + }; vk::ColorSpaceKHR requestedColorSpace = vk::ColorSpaceKHR::eSrgbNonlinear; - for (size_t i = 0; i < sizeof(requestedFormats) / sizeof(requestedFormats[0]); i++) + for ( size_t i = 0; i < sizeof( requestedFormats ) / sizeof( requestedFormats[0] ); i++ ) { vk::Format requestedFormat = requestedFormats[i]; - auto it = std::find_if(formats.begin(), formats.end(), [requestedFormat, requestedColorSpace](vk::SurfaceFormatKHR const& f) { return (f.format == requestedFormat) && (f.colorSpace == requestedColorSpace); }); - if (it != formats.end()) + auto it = std::find_if( + formats.begin(), formats.end(), [requestedFormat, requestedColorSpace]( vk::SurfaceFormatKHR const & f ) { + return ( f.format == requestedFormat ) && ( f.colorSpace == requestedColorSpace ); + } ); + if ( it != formats.end() ) { pickedFormat = *it; break; } } } - assert(pickedFormat.colorSpace == vk::ColorSpaceKHR::eSrgbNonlinear); + assert( pickedFormat.colorSpace == vk::ColorSpaceKHR::eSrgbNonlinear ); return pickedFormat; } - void setImageLayout(vk::UniqueCommandBuffer const& commandBuffer, vk::Image image, vk::Format format, vk::ImageLayout oldImageLayout, vk::ImageLayout newImageLayout) + void setImageLayout( vk::UniqueCommandBuffer const & commandBuffer, + vk::Image image, + vk::Format format, + vk::ImageLayout oldImageLayout, + vk::ImageLayout newImageLayout ) { vk::AccessFlags sourceAccessMask; - switch (oldImageLayout) + switch ( oldImageLayout ) { - case vk::ImageLayout::eTransferDstOptimal: - sourceAccessMask = vk::AccessFlagBits::eTransferWrite; - break; - case vk::ImageLayout::ePreinitialized: - sourceAccessMask = vk::AccessFlagBits::eHostWrite; - break; - case vk::ImageLayout::eGeneral: // sourceAccessMask is empty - case vk::ImageLayout::eUndefined: - break; - default: - assert(false); - break; + case vk::ImageLayout::eTransferDstOptimal: sourceAccessMask = vk::AccessFlagBits::eTransferWrite; break; + case vk::ImageLayout::ePreinitialized: sourceAccessMask = vk::AccessFlagBits::eHostWrite; break; + case vk::ImageLayout::eGeneral: // sourceAccessMask is empty + case vk::ImageLayout::eUndefined: break; + default: assert( false ); break; } vk::PipelineStageFlags sourceStage; - switch (oldImageLayout) + switch ( oldImageLayout ) { case vk::ImageLayout::eGeneral: - case vk::ImageLayout::ePreinitialized: - sourceStage = vk::PipelineStageFlagBits::eHost; - break; - case vk::ImageLayout::eTransferDstOptimal: - sourceStage = vk::PipelineStageFlagBits::eTransfer; - break; - case vk::ImageLayout::eUndefined: - sourceStage = vk::PipelineStageFlagBits::eTopOfPipe; - break; - default: - assert(false); - break; + case vk::ImageLayout::ePreinitialized: sourceStage = vk::PipelineStageFlagBits::eHost; break; + case vk::ImageLayout::eTransferDstOptimal: sourceStage = vk::PipelineStageFlagBits::eTransfer; break; + case vk::ImageLayout::eUndefined: sourceStage = vk::PipelineStageFlagBits::eTopOfPipe; break; + default: assert( false ); break; } vk::AccessFlags destinationAccessMask; - switch (newImageLayout) + switch ( newImageLayout ) { case vk::ImageLayout::eColorAttachmentOptimal: destinationAccessMask = vk::AccessFlagBits::eColorAttachmentWrite; break; case vk::ImageLayout::eDepthStencilAttachmentOptimal: - destinationAccessMask = vk::AccessFlagBits::eDepthStencilAttachmentRead | vk::AccessFlagBits::eDepthStencilAttachmentWrite; - break; - case vk::ImageLayout::eGeneral: // empty destinationAccessMask - case vk::ImageLayout::ePresentSrcKHR: - break; - case vk::ImageLayout::eShaderReadOnlyOptimal: - destinationAccessMask = vk::AccessFlagBits::eShaderRead; - break; - case vk::ImageLayout::eTransferSrcOptimal: - destinationAccessMask = vk::AccessFlagBits::eTransferRead; - break; - case vk::ImageLayout::eTransferDstOptimal: - destinationAccessMask = vk::AccessFlagBits::eTransferWrite; - break; - default: - assert(false); + destinationAccessMask = + vk::AccessFlagBits::eDepthStencilAttachmentRead | vk::AccessFlagBits::eDepthStencilAttachmentWrite; break; + case vk::ImageLayout::eGeneral: // empty destinationAccessMask + case vk::ImageLayout::ePresentSrcKHR: break; + case vk::ImageLayout::eShaderReadOnlyOptimal: destinationAccessMask = vk::AccessFlagBits::eShaderRead; break; + case vk::ImageLayout::eTransferSrcOptimal: destinationAccessMask = vk::AccessFlagBits::eTransferRead; break; + case vk::ImageLayout::eTransferDstOptimal: destinationAccessMask = vk::AccessFlagBits::eTransferWrite; break; + default: assert( false ); break; } vk::PipelineStageFlags destinationStage; - switch (newImageLayout) + switch ( newImageLayout ) { case vk::ImageLayout::eColorAttachmentOptimal: destinationStage = vk::PipelineStageFlagBits::eColorAttachmentOutput; @@ -534,29 +714,21 @@ namespace vk case vk::ImageLayout::eDepthStencilAttachmentOptimal: destinationStage = vk::PipelineStageFlagBits::eEarlyFragmentTests; break; - case vk::ImageLayout::eGeneral: - destinationStage = vk::PipelineStageFlagBits::eHost; - break; - case vk::ImageLayout::ePresentSrcKHR: - destinationStage = vk::PipelineStageFlagBits::eBottomOfPipe; - break; + case vk::ImageLayout::eGeneral: destinationStage = vk::PipelineStageFlagBits::eHost; break; + case vk::ImageLayout::ePresentSrcKHR: destinationStage = vk::PipelineStageFlagBits::eBottomOfPipe; break; case vk::ImageLayout::eShaderReadOnlyOptimal: destinationStage = vk::PipelineStageFlagBits::eFragmentShader; break; case vk::ImageLayout::eTransferDstOptimal: - case vk::ImageLayout::eTransferSrcOptimal: - destinationStage = vk::PipelineStageFlagBits::eTransfer; - break; - default: - assert(false); - break; + case vk::ImageLayout::eTransferSrcOptimal: destinationStage = vk::PipelineStageFlagBits::eTransfer; break; + default: assert( false ); break; } vk::ImageAspectFlags aspectMask; - if (newImageLayout == vk::ImageLayout::eDepthStencilAttachmentOptimal) + if ( newImageLayout == vk::ImageLayout::eDepthStencilAttachmentOptimal ) { aspectMask = vk::ImageAspectFlagBits::eDepth; - if (format == vk::Format::eD32SfloatS8Uint || format == vk::Format::eD24UnormS8Uint) + if ( format == vk::Format::eD32SfloatS8Uint || format == vk::Format::eD24UnormS8Uint ) { aspectMask |= vk::ImageAspectFlagBits::eStencil; } @@ -566,204 +738,314 @@ namespace vk aspectMask = vk::ImageAspectFlagBits::eColor; } - vk::ImageSubresourceRange imageSubresourceRange(aspectMask, 0, 1, 0, 1); - vk::ImageMemoryBarrier imageMemoryBarrier(sourceAccessMask, destinationAccessMask, oldImageLayout, newImageLayout, VK_QUEUE_FAMILY_IGNORED, VK_QUEUE_FAMILY_IGNORED, image, imageSubresourceRange); - return commandBuffer->pipelineBarrier(sourceStage, destinationStage, {}, nullptr, nullptr, imageMemoryBarrier); + vk::ImageSubresourceRange imageSubresourceRange( aspectMask, 0, 1, 0, 1 ); + vk::ImageMemoryBarrier imageMemoryBarrier( sourceAccessMask, + destinationAccessMask, + oldImageLayout, + newImageLayout, + VK_QUEUE_FAMILY_IGNORED, + VK_QUEUE_FAMILY_IGNORED, + image, + imageSubresourceRange ); + return commandBuffer->pipelineBarrier( sourceStage, destinationStage, {}, nullptr, nullptr, imageMemoryBarrier ); } - void submitAndWait(vk::UniqueDevice &device, vk::Queue queue, vk::UniqueCommandBuffer &commandBuffer) + void submitAndWait( vk::UniqueDevice & device, vk::Queue queue, vk::UniqueCommandBuffer & commandBuffer ) { - vk::UniqueFence fence = device->createFenceUnique(vk::FenceCreateInfo()); + vk::UniqueFence fence = device->createFenceUnique( vk::FenceCreateInfo() ); vk::PipelineStageFlags pipelineStageFlags = vk::PipelineStageFlagBits::eColorAttachmentOutput; - queue.submit(vk::SubmitInfo(0, nullptr, &pipelineStageFlags, 1, &commandBuffer.get()), fence.get()); - while (vk::Result::eTimeout == device->waitForFences(fence.get(), VK_TRUE, vk::su::FenceTimeout)) + queue.submit( vk::SubmitInfo( 0, nullptr, &pipelineStageFlags, 1, &commandBuffer.get() ), fence.get() ); + while ( vk::Result::eTimeout == device->waitForFences( fence.get(), VK_TRUE, vk::su::FenceTimeout ) ) ; } - void updateDescriptorSets(vk::UniqueDevice const& device, vk::UniqueDescriptorSet const& descriptorSet, - std::vector> const& bufferData, vk::su::TextureData const& textureData, - uint32_t bindingOffset) + void updateDescriptorSets( + vk::UniqueDevice const & device, + vk::UniqueDescriptorSet const & descriptorSet, + std::vector> const & + bufferData, + vk::su::TextureData const & textureData, + uint32_t bindingOffset ) { std::vector bufferInfos; - bufferInfos.reserve(bufferData.size()); + bufferInfos.reserve( bufferData.size() ); std::vector writeDescriptorSets; - writeDescriptorSets.reserve(bufferData.size() + 1); + writeDescriptorSets.reserve( bufferData.size() + 1 ); uint32_t dstBinding = bindingOffset; - for (auto const& bd : bufferData) + for ( auto const & bd : bufferData ) { - bufferInfos.push_back(vk::DescriptorBufferInfo(*std::get<1>(bd), 0, VK_WHOLE_SIZE)); - writeDescriptorSets.push_back(vk::WriteDescriptorSet(*descriptorSet, dstBinding++, 0, 1, std::get<0>(bd), nullptr, &bufferInfos.back(), std::get<2>(bd) ? &*std::get<2>(bd) : nullptr)); + bufferInfos.push_back( vk::DescriptorBufferInfo( *std::get<1>( bd ), 0, VK_WHOLE_SIZE ) ); + writeDescriptorSets.push_back( vk::WriteDescriptorSet( *descriptorSet, + dstBinding++, + 0, + 1, + std::get<0>( bd ), + nullptr, + &bufferInfos.back(), + std::get<2>( bd ) ? &*std::get<2>( bd ) : nullptr ) ); } - vk::DescriptorImageInfo imageInfo(*textureData.textureSampler, *textureData.imageData->imageView, vk::ImageLayout::eShaderReadOnlyOptimal); - writeDescriptorSets.push_back(vk::WriteDescriptorSet(*descriptorSet, dstBinding, 0, 1, vk::DescriptorType::eCombinedImageSampler, &imageInfo, nullptr, nullptr)); + vk::DescriptorImageInfo imageInfo( + *textureData.textureSampler, *textureData.imageData->imageView, vk::ImageLayout::eShaderReadOnlyOptimal ); + writeDescriptorSets.push_back( vk::WriteDescriptorSet( + *descriptorSet, dstBinding, 0, 1, vk::DescriptorType::eCombinedImageSampler, &imageInfo, nullptr, nullptr ) ); - device->updateDescriptorSets(writeDescriptorSets, nullptr); + device->updateDescriptorSets( writeDescriptorSets, nullptr ); } - void updateDescriptorSets(vk::UniqueDevice const& device, vk::UniqueDescriptorSet const& descriptorSet, - std::vector> const& bufferData, - std::vector const& textureData, uint32_t bindingOffset) + void updateDescriptorSets( + vk::UniqueDevice const & device, + vk::UniqueDescriptorSet const & descriptorSet, + std::vector> const & + bufferData, + std::vector const & textureData, + uint32_t bindingOffset ) { std::vector bufferInfos; - bufferInfos.reserve(bufferData.size()); + bufferInfos.reserve( bufferData.size() ); std::vector writeDescriptorSets; - writeDescriptorSets.reserve(bufferData.size() + (textureData.empty() ? 0 : 1)); + writeDescriptorSets.reserve( bufferData.size() + ( textureData.empty() ? 0 : 1 ) ); uint32_t dstBinding = bindingOffset; - for (auto const& bd : bufferData) + for ( auto const & bd : bufferData ) { - bufferInfos.push_back(vk::DescriptorBufferInfo(*std::get<1>(bd), 0, VK_WHOLE_SIZE)); - writeDescriptorSets.push_back(vk::WriteDescriptorSet(*descriptorSet, dstBinding++, 0, 1, std::get<0>(bd), nullptr, &bufferInfos.back(), std::get<2>(bd) ? &*std::get<2>(bd) : nullptr)); + bufferInfos.push_back( vk::DescriptorBufferInfo( *std::get<1>( bd ), 0, VK_WHOLE_SIZE ) ); + writeDescriptorSets.push_back( vk::WriteDescriptorSet( *descriptorSet, + dstBinding++, + 0, + 1, + std::get<0>( bd ), + nullptr, + &bufferInfos.back(), + std::get<2>( bd ) ? &*std::get<2>( bd ) : nullptr ) ); } std::vector imageInfos; - if (!textureData.empty()) + if ( !textureData.empty() ) { - imageInfos.reserve(textureData.size()); - for (auto const& td : textureData) + imageInfos.reserve( textureData.size() ); + for ( auto const & td : textureData ) { - imageInfos.push_back(vk::DescriptorImageInfo(*td.textureSampler, *td.imageData->imageView, vk::ImageLayout::eShaderReadOnlyOptimal)); + imageInfos.push_back( vk::DescriptorImageInfo( + *td.textureSampler, *td.imageData->imageView, vk::ImageLayout::eShaderReadOnlyOptimal ) ); } - writeDescriptorSets.push_back(vk::WriteDescriptorSet(*descriptorSet, dstBinding, 0, checked_cast(imageInfos.size()), vk::DescriptorType::eCombinedImageSampler, imageInfos.data(), - nullptr, nullptr)); + writeDescriptorSets.push_back( vk::WriteDescriptorSet( *descriptorSet, + dstBinding, + 0, + checked_cast( imageInfos.size() ), + vk::DescriptorType::eCombinedImageSampler, + imageInfos.data(), + nullptr, + nullptr ) ); } - device->updateDescriptorSets(writeDescriptorSets, nullptr); + device->updateDescriptorSets( writeDescriptorSets, nullptr ); } - BufferData::BufferData(vk::PhysicalDevice const& physicalDevice, vk::UniqueDevice const& device, vk::DeviceSize size, vk::BufferUsageFlags usage, vk::MemoryPropertyFlags propertyFlags) -#if !defined(NDEBUG) - : m_size(size) - , m_usage(usage) - , m_propertyFlags(propertyFlags) + BufferData::BufferData( vk::PhysicalDevice const & physicalDevice, + vk::UniqueDevice const & device, + vk::DeviceSize size, + vk::BufferUsageFlags usage, + vk::MemoryPropertyFlags propertyFlags ) +#if !defined( NDEBUG ) + : m_size( size ), m_usage( usage ), m_propertyFlags( propertyFlags ) #endif { - buffer = device->createBufferUnique(vk::BufferCreateInfo(vk::BufferCreateFlags(), size, usage)); - deviceMemory = vk::su::allocateMemory(device, physicalDevice.getMemoryProperties(), device->getBufferMemoryRequirements(buffer.get()), propertyFlags); - device->bindBufferMemory(buffer.get(), deviceMemory.get(), 0); + buffer = device->createBufferUnique( vk::BufferCreateInfo( vk::BufferCreateFlags(), size, usage ) ); + deviceMemory = vk::su::allocateMemory( device, + physicalDevice.getMemoryProperties(), + device->getBufferMemoryRequirements( buffer.get() ), + propertyFlags ); + device->bindBufferMemory( buffer.get(), deviceMemory.get(), 0 ); } - DepthBufferData::DepthBufferData(vk::PhysicalDevice &physicalDevice, vk::UniqueDevice & device, vk::Format format, vk::Extent2D const& extent) - : ImageData(physicalDevice, device, format, extent, vk::ImageTiling::eOptimal, vk::ImageUsageFlagBits::eDepthStencilAttachment, vk::ImageLayout::eUndefined, - vk::MemoryPropertyFlagBits::eDeviceLocal, vk::ImageAspectFlagBits::eDepth) + DepthBufferData::DepthBufferData( vk::PhysicalDevice & physicalDevice, + vk::UniqueDevice & device, + vk::Format format, + vk::Extent2D const & extent ) + : ImageData( physicalDevice, + device, + format, + extent, + vk::ImageTiling::eOptimal, + vk::ImageUsageFlagBits::eDepthStencilAttachment, + vk::ImageLayout::eUndefined, + vk::MemoryPropertyFlagBits::eDeviceLocal, + vk::ImageAspectFlagBits::eDepth ) + {} + + ImageData::ImageData( vk::PhysicalDevice const & physicalDevice, + vk::UniqueDevice const & device, + vk::Format format_, + vk::Extent2D const & extent, + vk::ImageTiling tiling, + vk::ImageUsageFlags usage, + vk::ImageLayout initialLayout, + vk::MemoryPropertyFlags memoryProperties, + vk::ImageAspectFlags aspectMask ) + : format( format_ ) { + vk::ImageCreateInfo imageCreateInfo( vk::ImageCreateFlags(), + vk::ImageType::e2D, + format, + vk::Extent3D( extent, 1 ), + 1, + 1, + vk::SampleCountFlagBits::e1, + tiling, + usage | vk::ImageUsageFlagBits::eSampled, + vk::SharingMode::eExclusive, + 0, + nullptr, + initialLayout ); + image = device->createImageUnique( imageCreateInfo ); + + deviceMemory = vk::su::allocateMemory( device, + physicalDevice.getMemoryProperties(), + device->getImageMemoryRequirements( image.get() ), + memoryProperties ); + + device->bindImageMemory( image.get(), deviceMemory.get(), 0 ); + + vk::ComponentMapping componentMapping( + ComponentSwizzle::eR, ComponentSwizzle::eG, ComponentSwizzle::eB, ComponentSwizzle::eA ); + vk::ImageViewCreateInfo imageViewCreateInfo( vk::ImageViewCreateFlags(), + image.get(), + vk::ImageViewType::e2D, + format, + componentMapping, + vk::ImageSubresourceRange( aspectMask, 0, 1, 0, 1 ) ); + imageView = device->createImageViewUnique( imageViewCreateInfo ); } - ImageData::ImageData(vk::PhysicalDevice const& physicalDevice, vk::UniqueDevice const& device, vk::Format format_, vk::Extent2D const& extent, vk::ImageTiling tiling, - vk::ImageUsageFlags usage, vk::ImageLayout initialLayout, vk::MemoryPropertyFlags memoryProperties, vk::ImageAspectFlags aspectMask) - : format(format_) - { - vk::ImageCreateInfo imageCreateInfo(vk::ImageCreateFlags(), vk::ImageType::e2D, format, vk::Extent3D(extent, 1), 1, 1, - vk::SampleCountFlagBits::e1, tiling, usage | vk::ImageUsageFlagBits::eSampled, vk::SharingMode::eExclusive, 0, nullptr, initialLayout); - image = device->createImageUnique(imageCreateInfo); - - deviceMemory = vk::su::allocateMemory(device, physicalDevice.getMemoryProperties(), device->getImageMemoryRequirements(image.get()), memoryProperties); - - device->bindImageMemory(image.get(), deviceMemory.get(), 0); - - vk::ComponentMapping componentMapping(ComponentSwizzle::eR, ComponentSwizzle::eG, ComponentSwizzle::eB, ComponentSwizzle::eA); - vk::ImageViewCreateInfo imageViewCreateInfo(vk::ImageViewCreateFlags(), image.get(), vk::ImageViewType::e2D, format, componentMapping, vk::ImageSubresourceRange(aspectMask, 0, 1, 0, 1)); - imageView = device->createImageViewUnique(imageViewCreateInfo); - } - - SurfaceData::SurfaceData(vk::UniqueInstance &instance, std::string const &windowName, vk::Extent2D const &extent_) - : extent(extent_) - , window(vk::su::createWindow(windowName, extent)) + SurfaceData::SurfaceData( vk::UniqueInstance & instance, + std::string const & windowName, + vk::Extent2D const & extent_ ) + : extent( extent_ ), window( vk::su::createWindow( windowName, extent ) ) { VkSurfaceKHR _surface; - VkResult err = glfwCreateWindowSurface(VkInstance(instance.get()), window.handle, nullptr, &_surface); - if (err != VK_SUCCESS) - throw std::runtime_error("Failed to create window!"); - vk::ObjectDestroy _deleter(instance.get()); - surface = vk::UniqueSurfaceKHR(vk::SurfaceKHR(_surface), _deleter); + VkResult err = glfwCreateWindowSurface( VkInstance( instance.get() ), window.handle, nullptr, &_surface ); + if ( err != VK_SUCCESS ) + throw std::runtime_error( "Failed to create window!" ); + vk::ObjectDestroy _deleter( instance.get() ); + surface = vk::UniqueSurfaceKHR( vk::SurfaceKHR( _surface ), _deleter ); } - SwapChainData::SwapChainData(vk::PhysicalDevice const& physicalDevice, vk::UniqueDevice const& device, vk::SurfaceKHR const& surface, vk::Extent2D const& extent, vk::ImageUsageFlags usage, - vk::UniqueSwapchainKHR const& oldSwapChain, uint32_t graphicsQueueFamilyIndex, uint32_t presentQueueFamilyIndex) + SwapChainData::SwapChainData( vk::PhysicalDevice const & physicalDevice, + vk::UniqueDevice const & device, + vk::SurfaceKHR const & surface, + vk::Extent2D const & extent, + vk::ImageUsageFlags usage, + vk::UniqueSwapchainKHR const & oldSwapChain, + uint32_t graphicsQueueFamilyIndex, + uint32_t presentQueueFamilyIndex ) { - vk::SurfaceFormatKHR surfaceFormat = vk::su::pickSurfaceFormat(physicalDevice.getSurfaceFormatsKHR(surface)); - colorFormat = surfaceFormat.format; + vk::SurfaceFormatKHR surfaceFormat = vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( surface ) ); + colorFormat = surfaceFormat.format; - vk::SurfaceCapabilitiesKHR surfaceCapabilities = physicalDevice.getSurfaceCapabilitiesKHR(surface); - VkExtent2D swapchainExtent; - if (surfaceCapabilities.currentExtent.width == std::numeric_limits::max()) + vk::SurfaceCapabilitiesKHR surfaceCapabilities = physicalDevice.getSurfaceCapabilitiesKHR( surface ); + VkExtent2D swapchainExtent; + if ( surfaceCapabilities.currentExtent.width == std::numeric_limits::max() ) { // If the surface size is undefined, the size is set to the size of the images requested. - swapchainExtent.width = clamp(extent.width, surfaceCapabilities.minImageExtent.width, surfaceCapabilities.maxImageExtent.width); - swapchainExtent.height = clamp(extent.height, surfaceCapabilities.minImageExtent.height, surfaceCapabilities.maxImageExtent.height); + swapchainExtent.width = + clamp( extent.width, surfaceCapabilities.minImageExtent.width, surfaceCapabilities.maxImageExtent.width ); + swapchainExtent.height = + clamp( extent.height, surfaceCapabilities.minImageExtent.height, surfaceCapabilities.maxImageExtent.height ); } else { // If the surface size is defined, the swap chain size must match swapchainExtent = surfaceCapabilities.currentExtent; } - vk::SurfaceTransformFlagBitsKHR preTransform = (surfaceCapabilities.supportedTransforms & vk::SurfaceTransformFlagBitsKHR::eIdentity) ? vk::SurfaceTransformFlagBitsKHR::eIdentity : surfaceCapabilities.currentTransform; + vk::SurfaceTransformFlagBitsKHR preTransform = + ( surfaceCapabilities.supportedTransforms & vk::SurfaceTransformFlagBitsKHR::eIdentity ) + ? vk::SurfaceTransformFlagBitsKHR::eIdentity + : surfaceCapabilities.currentTransform; vk::CompositeAlphaFlagBitsKHR compositeAlpha = - (surfaceCapabilities.supportedCompositeAlpha & vk::CompositeAlphaFlagBitsKHR::ePreMultiplied) ? vk::CompositeAlphaFlagBitsKHR::ePreMultiplied : - (surfaceCapabilities.supportedCompositeAlpha & vk::CompositeAlphaFlagBitsKHR::ePostMultiplied) ? vk::CompositeAlphaFlagBitsKHR::ePostMultiplied : - (surfaceCapabilities.supportedCompositeAlpha & vk::CompositeAlphaFlagBitsKHR::eInherit) ? vk::CompositeAlphaFlagBitsKHR::eInherit : vk::CompositeAlphaFlagBitsKHR::eOpaque; - vk::PresentModeKHR presentMode = vk::su::pickPresentMode(physicalDevice.getSurfacePresentModesKHR(surface)); - vk::SwapchainCreateInfoKHR swapChainCreateInfo({}, surface, surfaceCapabilities.minImageCount, colorFormat, surfaceFormat.colorSpace, swapchainExtent, 1, usage, vk::SharingMode::eExclusive, - 0, nullptr, preTransform, compositeAlpha, presentMode, true, *oldSwapChain); - if (graphicsQueueFamilyIndex != presentQueueFamilyIndex) + ( surfaceCapabilities.supportedCompositeAlpha & vk::CompositeAlphaFlagBitsKHR::ePreMultiplied ) + ? vk::CompositeAlphaFlagBitsKHR::ePreMultiplied + : ( surfaceCapabilities.supportedCompositeAlpha & vk::CompositeAlphaFlagBitsKHR::ePostMultiplied ) + ? vk::CompositeAlphaFlagBitsKHR::ePostMultiplied + : ( surfaceCapabilities.supportedCompositeAlpha & vk::CompositeAlphaFlagBitsKHR::eInherit ) + ? vk::CompositeAlphaFlagBitsKHR::eInherit + : vk::CompositeAlphaFlagBitsKHR::eOpaque; + vk::PresentModeKHR presentMode = vk::su::pickPresentMode( physicalDevice.getSurfacePresentModesKHR( surface ) ); + vk::SwapchainCreateInfoKHR swapChainCreateInfo( {}, + surface, + surfaceCapabilities.minImageCount, + colorFormat, + surfaceFormat.colorSpace, + swapchainExtent, + 1, + usage, + vk::SharingMode::eExclusive, + 0, + nullptr, + preTransform, + compositeAlpha, + presentMode, + true, + *oldSwapChain ); + if ( graphicsQueueFamilyIndex != presentQueueFamilyIndex ) { uint32_t queueFamilyIndices[2] = { graphicsQueueFamilyIndex, presentQueueFamilyIndex }; - // If the graphics and present queues are from different queue families, we either have to explicitly transfer ownership of images between - // the queues, or we have to create the swapchain with imageSharingMode as vk::SharingMode::eConcurrent - swapChainCreateInfo.imageSharingMode = vk::SharingMode::eConcurrent; + // If the graphics and present queues are from different queue families, we either have to explicitly transfer + // ownership of images between the queues, or we have to create the swapchain with imageSharingMode as + // vk::SharingMode::eConcurrent + swapChainCreateInfo.imageSharingMode = vk::SharingMode::eConcurrent; swapChainCreateInfo.queueFamilyIndexCount = 2; - swapChainCreateInfo.pQueueFamilyIndices = queueFamilyIndices; + swapChainCreateInfo.pQueueFamilyIndices = queueFamilyIndices; } - swapChain = device->createSwapchainKHRUnique(swapChainCreateInfo); + swapChain = device->createSwapchainKHRUnique( swapChainCreateInfo ); - images = device->getSwapchainImagesKHR(swapChain.get()); + images = device->getSwapchainImagesKHR( swapChain.get() ); - imageViews.reserve(images.size()); - vk::ComponentMapping componentMapping(vk::ComponentSwizzle::eR, vk::ComponentSwizzle::eG, vk::ComponentSwizzle::eB, vk::ComponentSwizzle::eA); - vk::ImageSubresourceRange subResourceRange(vk::ImageAspectFlagBits::eColor, 0, 1, 0, 1); - for (auto image : images) + imageViews.reserve( images.size() ); + vk::ComponentMapping componentMapping( + vk::ComponentSwizzle::eR, vk::ComponentSwizzle::eG, vk::ComponentSwizzle::eB, vk::ComponentSwizzle::eA ); + vk::ImageSubresourceRange subResourceRange( vk::ImageAspectFlagBits::eColor, 0, 1, 0, 1 ); + for ( auto image : images ) { - vk::ImageViewCreateInfo imageViewCreateInfo(vk::ImageViewCreateFlags(), image, vk::ImageViewType::e2D, colorFormat, componentMapping, subResourceRange); - imageViews.push_back(device->createImageViewUnique(imageViewCreateInfo)); + vk::ImageViewCreateInfo imageViewCreateInfo( + vk::ImageViewCreateFlags(), image, vk::ImageViewType::e2D, colorFormat, componentMapping, subResourceRange ); + imageViews.push_back( device->createImageViewUnique( imageViewCreateInfo ) ); } } - CheckerboardImageGenerator::CheckerboardImageGenerator(std::array const& rgb0, std::array const& rgb1) - : m_rgb0(rgb0) - , m_rgb1(rgb1) + CheckerboardImageGenerator::CheckerboardImageGenerator( std::array const & rgb0, + std::array const & rgb1 ) + : m_rgb0( rgb0 ), m_rgb1( rgb1 ) {} - void CheckerboardImageGenerator::operator()(void* data, vk::Extent2D &extent) const + void CheckerboardImageGenerator::operator()( void * data, vk::Extent2D & extent ) const { // Checkerboard of 16x16 pixel squares - uint8_t *pImageMemory = static_cast(data); - for (uint32_t row = 0; row < extent.height; row++) + uint8_t * pImageMemory = static_cast( data ); + for ( uint32_t row = 0; row < extent.height; row++ ) { - for (uint32_t col = 0; col < extent.width; col++) + for ( uint32_t col = 0; col < extent.width; col++ ) { - std::array const& rgb = (((row & 0x10) == 0) ^ ((col & 0x10) == 0)) ? m_rgb1 : m_rgb0; - pImageMemory[0] = rgb[0]; - pImageMemory[1] = rgb[1]; - pImageMemory[2] = rgb[2]; - pImageMemory[3] = 255; + std::array const & rgb = ( ( ( row & 0x10 ) == 0 ) ^ ( ( col & 0x10 ) == 0 ) ) ? m_rgb1 : m_rgb0; + pImageMemory[0] = rgb[0]; + pImageMemory[1] = rgb[1]; + pImageMemory[2] = rgb[2]; + pImageMemory[3] = 255; pImageMemory += 4; } } } - MonochromeImageGenerator::MonochromeImageGenerator(std::array const& rgb) - : m_rgb(rgb) - {} + MonochromeImageGenerator::MonochromeImageGenerator( std::array const & rgb ) : m_rgb( rgb ) {} - void MonochromeImageGenerator::operator()(void* data, vk::Extent2D &extent) const + void MonochromeImageGenerator::operator()( void * data, vk::Extent2D & extent ) const { // fill in with the monochrome color - unsigned char *pImageMemory = static_cast(data); - for (uint32_t row = 0; row < extent.height; row++) + unsigned char * pImageMemory = static_cast( data ); + for ( uint32_t row = 0; row < extent.height; row++ ) { - for (uint32_t col = 0; col < extent.width; col++) + for ( uint32_t col = 0; col < extent.width; col++ ) { pImageMemory[0] = m_rgb[0]; pImageMemory[1] = m_rgb[1]; @@ -774,93 +1056,112 @@ namespace vk } } - PixelsImageGenerator::PixelsImageGenerator(vk::Extent2D const& extent, size_t channels, unsigned char const* pixels) - : m_extent(extent) - , m_channels(channels) - , m_pixels(pixels) + PixelsImageGenerator::PixelsImageGenerator( vk::Extent2D const & extent, + size_t channels, + unsigned char const * pixels ) + : m_extent( extent ), m_channels( channels ), m_pixels( pixels ) { - assert(m_channels == 4); + assert( m_channels == 4 ); } - void PixelsImageGenerator::operator()(void* data, vk::Extent2D & extent) const + void PixelsImageGenerator::operator()( void * data, vk::Extent2D & extent ) const { - assert(extent == m_extent); - memcpy(data, m_pixels, m_extent.width * m_extent.height * m_channels); + assert( extent == m_extent ); + memcpy( data, m_pixels, m_extent.width * m_extent.height * m_channels ); } - TextureData::TextureData(vk::PhysicalDevice const& physicalDevice, vk::UniqueDevice const& device, vk::Extent2D const& extent_, vk::ImageUsageFlags usageFlags, - vk::FormatFeatureFlags formatFeatureFlags, bool anisotropyEnable, bool forceStaging) - : format(vk::Format::eR8G8B8A8Unorm) - , extent(extent_) + TextureData::TextureData( vk::PhysicalDevice const & physicalDevice, + vk::UniqueDevice const & device, + vk::Extent2D const & extent_, + vk::ImageUsageFlags usageFlags, + vk::FormatFeatureFlags formatFeatureFlags, + bool anisotropyEnable, + bool forceStaging ) + : format( vk::Format::eR8G8B8A8Unorm ), extent( extent_ ) { vk::PhysicalDeviceMemoryProperties memoryProperties = physicalDevice.getMemoryProperties(); - vk::FormatProperties formatProperties = physicalDevice.getFormatProperties(format); + vk::FormatProperties formatProperties = physicalDevice.getFormatProperties( format ); formatFeatureFlags |= vk::FormatFeatureFlagBits::eSampledImage; - needsStaging = forceStaging || ((formatProperties.linearTilingFeatures & formatFeatureFlags) != formatFeatureFlags); - vk::ImageTiling imageTiling; - vk::ImageLayout initialLayout; + needsStaging = + forceStaging || ( ( formatProperties.linearTilingFeatures & formatFeatureFlags ) != formatFeatureFlags ); + vk::ImageTiling imageTiling; + vk::ImageLayout initialLayout; vk::MemoryPropertyFlags requirements; - if (needsStaging) + if ( needsStaging ) { - assert((formatProperties.optimalTilingFeatures & formatFeatureFlags) == formatFeatureFlags); - stagingBufferData = std::unique_ptr(new BufferData(physicalDevice, device, extent.width * extent.height * 4, vk::BufferUsageFlagBits::eTransferSrc)); - imageTiling = vk::ImageTiling::eOptimal; + assert( ( formatProperties.optimalTilingFeatures & formatFeatureFlags ) == formatFeatureFlags ); + stagingBufferData = std::unique_ptr( new BufferData( + physicalDevice, device, extent.width * extent.height * 4, vk::BufferUsageFlagBits::eTransferSrc ) ); + imageTiling = vk::ImageTiling::eOptimal; usageFlags |= vk::ImageUsageFlagBits::eTransferDst; initialLayout = vk::ImageLayout::eUndefined; } else { - imageTiling = vk::ImageTiling::eLinear; + imageTiling = vk::ImageTiling::eLinear; initialLayout = vk::ImageLayout::ePreinitialized; - requirements = vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostVisible; + requirements = vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostVisible; } - imageData = std::unique_ptr(new ImageData(physicalDevice, device, format, extent, imageTiling, usageFlags | vk::ImageUsageFlagBits::eSampled, initialLayout, requirements, - vk::ImageAspectFlagBits::eColor)); + imageData = std::unique_ptr( new ImageData( physicalDevice, + device, + format, + extent, + imageTiling, + usageFlags | vk::ImageUsageFlagBits::eSampled, + initialLayout, + requirements, + vk::ImageAspectFlagBits::eColor ) ); - textureSampler = device->createSamplerUnique(vk::SamplerCreateInfo(vk::SamplerCreateFlags(), vk::Filter::eLinear, vk::Filter::eLinear, vk::SamplerMipmapMode::eLinear, - vk::SamplerAddressMode::eRepeat, vk::SamplerAddressMode::eRepeat, vk::SamplerAddressMode::eRepeat, 0.0f, anisotropyEnable, - 16.0f, false, vk::CompareOp::eNever, 0.0f, 0.0f, vk::BorderColor::eFloatOpaqueBlack)); + textureSampler = device->createSamplerUnique( vk::SamplerCreateInfo( vk::SamplerCreateFlags(), + vk::Filter::eLinear, + vk::Filter::eLinear, + vk::SamplerMipmapMode::eLinear, + vk::SamplerAddressMode::eRepeat, + vk::SamplerAddressMode::eRepeat, + vk::SamplerAddressMode::eRepeat, + 0.0f, + anisotropyEnable, + 16.0f, + false, + vk::CompareOp::eNever, + 0.0f, + 0.0f, + vk::BorderColor::eFloatOpaqueBlack ) ); } - UUID::UUID(uint8_t const data[VK_UUID_SIZE]) + UUID::UUID( uint8_t const data[VK_UUID_SIZE] ) { - memcpy(m_data, data, VK_UUID_SIZE * sizeof(uint8_t)); + memcpy( m_data, data, VK_UUID_SIZE * sizeof( uint8_t ) ); } - WindowData::WindowData(GLFWwindow *wnd, std::string const &name, vk::Extent2D const &extent) - : handle{wnd} - , name{name} - , extent{extent} - { - } + WindowData::WindowData( GLFWwindow * wnd, std::string const & name, vk::Extent2D const & extent ) + : handle{ wnd }, name{ name }, extent{ extent } + {} - WindowData::WindowData(WindowData &&other) - : handle{} - , name{} - , extent{} + WindowData::WindowData( WindowData && other ) : handle{}, name{}, extent{} { - std::swap(handle, other.handle); - std::swap(name, other.name); - std::swap(extent, other.extent); + std::swap( handle, other.handle ); + std::swap( name, other.name ); + std::swap( extent, other.extent ); } WindowData::~WindowData() noexcept { - glfwDestroyWindow(handle); + glfwDestroyWindow( handle ); } - WindowData createWindow(std::string const &windowName, vk::Extent2D const &extent) + WindowData createWindow( std::string const & windowName, vk::Extent2D const & extent ) { struct glfwContext { glfwContext() { glfwInit(); - glfwSetErrorCallback([](int error, const char *msg) { + glfwSetErrorCallback( []( int error, const char * msg ) { std::cerr << "glfw: " << "(" << error << ") " << msg << std::endl; - }); + } ); } ~glfwContext() @@ -872,24 +1173,24 @@ namespace vk static auto glfwCtx = glfwContext(); (void)glfwCtx; - glfwWindowHint(GLFW_CLIENT_API, GLFW_NO_API); - GLFWwindow *window = glfwCreateWindow(extent.width, extent.height, windowName.c_str(), nullptr, nullptr); - return WindowData(window, windowName, extent); + glfwWindowHint( GLFW_CLIENT_API, GLFW_NO_API ); + GLFWwindow * window = glfwCreateWindow( extent.width, extent.height, windowName.c_str(), nullptr, nullptr ); + return WindowData( window, windowName, extent ); } - } -} + } // namespace su +} // namespace vk -std::ostream& operator<<(std::ostream& os, vk::su::UUID const& uuid) +std::ostream & operator<<( std::ostream & os, vk::su::UUID const & uuid ) { - os << std::setfill('0') << std::hex; - for (int j = 0; j < VK_UUID_SIZE; ++j) + os << std::setfill( '0' ) << std::hex; + for ( int j = 0; j < VK_UUID_SIZE; ++j ) { - os << std::setw(2) << static_cast(uuid.m_data[j]); - if (j == 3 || j == 5 || j == 7 || j == 9) + os << std::setw( 2 ) << static_cast( uuid.m_data[j] ); + if ( j == 3 || j == 5 || j == 7 || j == 9 ) { std::cout << '-'; } } - os << std::setfill(' ') << std::dec; + os << std::setfill( ' ' ) << std::dec; return os; } diff --git a/samples/utils/utils.hpp b/samples/utils/utils.hpp index b53b52d..5b227c1 100644 --- a/samples/utils/utils.hpp +++ b/samples/utils/utils.hpp @@ -19,7 +19,6 @@ #define GLFW_INCLUDE_NONE #include - #include #include @@ -30,121 +29,146 @@ namespace vk const uint64_t FenceTimeout = 100000000; template - void oneTimeSubmit(vk::UniqueCommandBuffer const& commandBuffer, vk::Queue const& queue, Func const& func) + void oneTimeSubmit( vk::UniqueCommandBuffer const & commandBuffer, vk::Queue const & queue, Func const & func ) { - commandBuffer->begin(vk::CommandBufferBeginInfo(vk::CommandBufferUsageFlagBits::eOneTimeSubmit)); - func(commandBuffer); + commandBuffer->begin( vk::CommandBufferBeginInfo( vk::CommandBufferUsageFlagBits::eOneTimeSubmit ) ); + func( commandBuffer ); commandBuffer->end(); - queue.submit(vk::SubmitInfo(0, nullptr, nullptr, 1, &(*commandBuffer)), nullptr); + queue.submit( vk::SubmitInfo( 0, nullptr, nullptr, 1, &( *commandBuffer ) ), nullptr ); queue.waitIdle(); } template - void oneTimeSubmit(vk::UniqueDevice const& device, vk::UniqueCommandPool const& commandPool, vk::Queue const& queue, Func const& func) + void oneTimeSubmit( vk::UniqueDevice const & device, + vk::UniqueCommandPool const & commandPool, + vk::Queue const & queue, + Func const & func ) { - vk::UniqueCommandBuffer commandBuffer = std::move(device->allocateCommandBuffersUnique(vk::CommandBufferAllocateInfo(*commandPool, vk::CommandBufferLevel::ePrimary, 1)).front()); - oneTimeSubmit(commandBuffer, queue, func); + vk::UniqueCommandBuffer commandBuffer = + std::move( device + ->allocateCommandBuffersUnique( + vk::CommandBufferAllocateInfo( *commandPool, vk::CommandBufferLevel::ePrimary, 1 ) ) + .front() ); + oneTimeSubmit( commandBuffer, queue, func ); } template - void copyToDevice(vk::UniqueDevice const& device, vk::UniqueDeviceMemory const& memory, T const* pData, size_t count, vk::DeviceSize stride = sizeof(T)) + void copyToDevice( vk::UniqueDevice const & device, + vk::UniqueDeviceMemory const & memory, + T const * pData, + size_t count, + vk::DeviceSize stride = sizeof( T ) ) { - assert(sizeof(T) <= stride); - uint8_t* deviceData = static_cast(device->mapMemory(memory.get(), 0, count * stride)); - if (stride == sizeof(T)) + assert( sizeof( T ) <= stride ); + uint8_t * deviceData = static_cast( device->mapMemory( memory.get(), 0, count * stride ) ); + if ( stride == sizeof( T ) ) { - memcpy(deviceData, pData, count * sizeof(T)); + memcpy( deviceData, pData, count * sizeof( T ) ); } else { - for (size_t i = 0; i < count; i++) + for ( size_t i = 0; i < count; i++ ) { - memcpy(deviceData, &pData[i], sizeof(T)); + memcpy( deviceData, &pData[i], sizeof( T ) ); deviceData += stride; } } - device->unmapMemory(memory.get()); + device->unmapMemory( memory.get() ); } template - void copyToDevice(vk::UniqueDevice const& device, vk::UniqueDeviceMemory const& memory, T const& data) + void copyToDevice( vk::UniqueDevice const & device, vk::UniqueDeviceMemory const & memory, T const & data ) { - copyToDevice(device, memory, &data, 1); + copyToDevice( device, memory, &data, 1 ); } - template - VULKAN_HPP_INLINE constexpr const T& clamp(const T& v, const T& lo, const T& hi) + template + VULKAN_HPP_INLINE constexpr const T & clamp( const T & v, const T & lo, const T & hi ) { return v < lo ? lo : hi < v ? hi : v; } - void setImageLayout(vk::UniqueCommandBuffer const &commandBuffer, vk::Image image, vk::Format format, vk::ImageLayout oldImageLayout, vk::ImageLayout newImageLayout); + void setImageLayout( vk::UniqueCommandBuffer const & commandBuffer, + vk::Image image, + vk::Format format, + vk::ImageLayout oldImageLayout, + vk::ImageLayout newImageLayout ); struct WindowData { - WindowData(GLFWwindow *wnd, std::string const& name, vk::Extent2D const& extent); - WindowData(const WindowData &) = delete; - WindowData(WindowData &&other); + WindowData( GLFWwindow * wnd, std::string const & name, vk::Extent2D const & extent ); + WindowData( const WindowData & ) = delete; + WindowData( WindowData && other ); ~WindowData() noexcept; - GLFWwindow *handle; - std::string name; + GLFWwindow * handle; + std::string name; vk::Extent2D extent; }; - WindowData createWindow(std::string const &windowName, vk::Extent2D const &extent); + WindowData createWindow( std::string const & windowName, vk::Extent2D const & extent ); struct BufferData { - BufferData(vk::PhysicalDevice const& physicalDevice, vk::UniqueDevice const& device, vk::DeviceSize size, vk::BufferUsageFlags usage, - vk::MemoryPropertyFlags propertyFlags = vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent); + BufferData( vk::PhysicalDevice const & physicalDevice, + vk::UniqueDevice const & device, + vk::DeviceSize size, + vk::BufferUsageFlags usage, + vk::MemoryPropertyFlags propertyFlags = vk::MemoryPropertyFlagBits::eHostVisible | + vk::MemoryPropertyFlagBits::eHostCoherent ); template - void upload(vk::UniqueDevice const& device, DataType const& data) const + void upload( vk::UniqueDevice const & device, DataType const & data ) const { - assert((m_propertyFlags & vk::MemoryPropertyFlagBits::eHostCoherent) && (m_propertyFlags & vk::MemoryPropertyFlagBits::eHostVisible)); - assert(sizeof(DataType) <= m_size); + assert( ( m_propertyFlags & vk::MemoryPropertyFlagBits::eHostCoherent ) && + ( m_propertyFlags & vk::MemoryPropertyFlagBits::eHostVisible ) ); + assert( sizeof( DataType ) <= m_size ); - void* dataPtr = device->mapMemory(*this->deviceMemory, 0, sizeof(DataType)); - memcpy(dataPtr, &data, sizeof(DataType)); - device->unmapMemory(*this->deviceMemory); + void * dataPtr = device->mapMemory( *this->deviceMemory, 0, sizeof( DataType ) ); + memcpy( dataPtr, &data, sizeof( DataType ) ); + device->unmapMemory( *this->deviceMemory ); } template - void upload(vk::UniqueDevice const& device, std::vector const& data, size_t stride = 0) const + void upload( vk::UniqueDevice const & device, std::vector const & data, size_t stride = 0 ) const { - assert(m_propertyFlags & vk::MemoryPropertyFlagBits::eHostVisible); + assert( m_propertyFlags & vk::MemoryPropertyFlagBits::eHostVisible ); - size_t elementSize = stride ? stride : sizeof(DataType); - assert(sizeof(DataType) <= elementSize); + size_t elementSize = stride ? stride : sizeof( DataType ); + assert( sizeof( DataType ) <= elementSize ); - copyToDevice(device, deviceMemory, data.data(), data.size(), elementSize); + copyToDevice( device, deviceMemory, data.data(), data.size(), elementSize ); } template - void upload(vk::PhysicalDevice const& physicalDevice, vk::UniqueDevice const& device, vk::UniqueCommandPool const& commandPool, vk::Queue queue, std::vector const& data, - size_t stride) const + void upload( vk::PhysicalDevice const & physicalDevice, + vk::UniqueDevice const & device, + vk::UniqueCommandPool const & commandPool, + vk::Queue queue, + std::vector const & data, + size_t stride ) const { - assert(m_usage & vk::BufferUsageFlagBits::eTransferDst); - assert(m_propertyFlags & vk::MemoryPropertyFlagBits::eDeviceLocal); + assert( m_usage & vk::BufferUsageFlagBits::eTransferDst ); + assert( m_propertyFlags & vk::MemoryPropertyFlagBits::eDeviceLocal ); - size_t elementSize = stride ? stride : sizeof(DataType); - assert(sizeof(DataType) <= elementSize); + size_t elementSize = stride ? stride : sizeof( DataType ); + assert( sizeof( DataType ) <= elementSize ); size_t dataSize = data.size() * elementSize; - assert(dataSize <= m_size); + assert( dataSize <= m_size ); - vk::su::BufferData stagingBuffer(physicalDevice, device, dataSize, vk::BufferUsageFlagBits::eTransferSrc); - copyToDevice(device, stagingBuffer.deviceMemory, data.data(), data.size(), elementSize); + vk::su::BufferData stagingBuffer( physicalDevice, device, dataSize, vk::BufferUsageFlagBits::eTransferSrc ); + copyToDevice( device, stagingBuffer.deviceMemory, data.data(), data.size(), elementSize ); - vk::su::oneTimeSubmit(device, commandPool, queue, - [&](vk::UniqueCommandBuffer const& commandBuffer) { commandBuffer->copyBuffer(*stagingBuffer.buffer, *this->buffer, vk::BufferCopy(0, 0, dataSize)); }); + vk::su::oneTimeSubmit( device, commandPool, queue, [&]( vk::UniqueCommandBuffer const & commandBuffer ) { + commandBuffer->copyBuffer( *stagingBuffer.buffer, *this->buffer, vk::BufferCopy( 0, 0, dataSize ) ); + } ); } - vk::UniqueBuffer buffer; - vk::UniqueDeviceMemory deviceMemory; -#if !defined(NDEBUG) - private: + vk::UniqueBuffer buffer; + vk::UniqueDeviceMemory deviceMemory; +#if !defined( NDEBUG ) + private: vk::DeviceSize m_size; vk::BufferUsageFlags m_usage; vk::MemoryPropertyFlags m_propertyFlags; @@ -153,104 +177,149 @@ namespace vk struct ImageData { - ImageData(vk::PhysicalDevice const& physicalDevice, vk::UniqueDevice const& device, vk::Format format, vk::Extent2D const& extent, vk::ImageTiling tiling, vk::ImageUsageFlags usage - , vk::ImageLayout initialLayout, vk::MemoryPropertyFlags memoryProperties, vk::ImageAspectFlags aspectMask); + ImageData( vk::PhysicalDevice const & physicalDevice, + vk::UniqueDevice const & device, + vk::Format format, + vk::Extent2D const & extent, + vk::ImageTiling tiling, + vk::ImageUsageFlags usage, + vk::ImageLayout initialLayout, + vk::MemoryPropertyFlags memoryProperties, + vk::ImageAspectFlags aspectMask ); - vk::Format format; - vk::UniqueImage image; - vk::UniqueDeviceMemory deviceMemory; - vk::UniqueImageView imageView; + vk::Format format; + vk::UniqueImage image; + vk::UniqueDeviceMemory deviceMemory; + vk::UniqueImageView imageView; }; struct DepthBufferData : public ImageData { - DepthBufferData(vk::PhysicalDevice &physicalDevice, vk::UniqueDevice & device, vk::Format format, vk::Extent2D const& extent); + DepthBufferData( vk::PhysicalDevice & physicalDevice, + vk::UniqueDevice & device, + vk::Format format, + vk::Extent2D const & extent ); }; struct SurfaceData { - SurfaceData(vk::UniqueInstance &instance, std::string const& windowName, vk::Extent2D const& extent); + SurfaceData( vk::UniqueInstance & instance, std::string const & windowName, vk::Extent2D const & extent ); - vk::Extent2D extent; - WindowData window; - vk::UniqueSurfaceKHR surface; + vk::Extent2D extent; + WindowData window; + vk::UniqueSurfaceKHR surface; }; struct SwapChainData { - SwapChainData(vk::PhysicalDevice const& physicalDevice, vk::UniqueDevice const& device, vk::SurfaceKHR const& surface, vk::Extent2D const& extent, vk::ImageUsageFlags usage, - vk::UniqueSwapchainKHR const& oldSwapChain, uint32_t graphicsFamilyIndex, uint32_t presentFamilyIndex); + SwapChainData( vk::PhysicalDevice const & physicalDevice, + vk::UniqueDevice const & device, + vk::SurfaceKHR const & surface, + vk::Extent2D const & extent, + vk::ImageUsageFlags usage, + vk::UniqueSwapchainKHR const & oldSwapChain, + uint32_t graphicsFamilyIndex, + uint32_t presentFamilyIndex ); - vk::Format colorFormat; - vk::UniqueSwapchainKHR swapChain; - std::vector images; - std::vector imageViews; + vk::Format colorFormat; + vk::UniqueSwapchainKHR swapChain; + std::vector images; + std::vector imageViews; }; class CheckerboardImageGenerator { public: - CheckerboardImageGenerator(std::array const& rgb0 = {0, 0, 0}, std::array const& rgb1 = {255, 255, 255}); + CheckerboardImageGenerator( std::array const & rgb0 = { 0, 0, 0 }, + std::array const & rgb1 = { 255, 255, 255 } ); - void operator()(void* data, vk::Extent2D &extent) const; + void operator()( void * data, vk::Extent2D & extent ) const; private: - std::array const& m_rgb0; - std::array const& m_rgb1; + std::array const & m_rgb0; + std::array const & m_rgb1; }; class MonochromeImageGenerator { - public: - MonochromeImageGenerator(std::array const& rgb); + public: + MonochromeImageGenerator( std::array const & rgb ); - void operator()(void* data, vk::Extent2D &extent) const; + void operator()( void * data, vk::Extent2D & extent ) const; - private: - std::array const& m_rgb; + private: + std::array const & m_rgb; }; class PixelsImageGenerator { - public: - PixelsImageGenerator(vk::Extent2D const& extent, size_t channels, unsigned char const* pixels); + public: + PixelsImageGenerator( vk::Extent2D const & extent, size_t channels, unsigned char const * pixels ); - void operator()(void* data, vk::Extent2D & extent) const; + void operator()( void * data, vk::Extent2D & extent ) const; - private: + private: vk::Extent2D m_extent; size_t m_channels; - unsigned char const* m_pixels; + unsigned char const * m_pixels; }; - struct TextureData { - TextureData(vk::PhysicalDevice const& physicalDevice, vk::UniqueDevice const& device, vk::Extent2D const& extent_ = {256, 256}, vk::ImageUsageFlags usageFlags = {}, - vk::FormatFeatureFlags formatFeatureFlags = {}, bool anisotropyEnable = false, bool forceStaging = false); + TextureData( vk::PhysicalDevice const & physicalDevice, + vk::UniqueDevice const & device, + vk::Extent2D const & extent_ = { 256, 256 }, + vk::ImageUsageFlags usageFlags = {}, + vk::FormatFeatureFlags formatFeatureFlags = {}, + bool anisotropyEnable = false, + bool forceStaging = false ); template - void setImage(vk::UniqueDevice const& device, vk::UniqueCommandBuffer const& commandBuffer, ImageGenerator const& imageGenerator) + void setImage( vk::UniqueDevice const & device, + vk::UniqueCommandBuffer const & commandBuffer, + ImageGenerator const & imageGenerator ) { - void* data = needsStaging - ? device->mapMemory(stagingBufferData->deviceMemory.get(), 0, device->getBufferMemoryRequirements(stagingBufferData->buffer.get()).size) - : device->mapMemory(imageData->deviceMemory.get(), 0, device->getImageMemoryRequirements(imageData->image.get()).size); - imageGenerator(data, extent); - device->unmapMemory(needsStaging ? stagingBufferData->deviceMemory.get() : imageData->deviceMemory.get()); + void * data = + needsStaging + ? device->mapMemory( stagingBufferData->deviceMemory.get(), + 0, + device->getBufferMemoryRequirements( stagingBufferData->buffer.get() ).size ) + : device->mapMemory( + imageData->deviceMemory.get(), 0, device->getImageMemoryRequirements( imageData->image.get() ).size ); + imageGenerator( data, extent ); + device->unmapMemory( needsStaging ? stagingBufferData->deviceMemory.get() : imageData->deviceMemory.get() ); - if (needsStaging) + if ( needsStaging ) { // Since we're going to blit to the texture image, set its layout to eTransferDstOptimal - vk::su::setImageLayout(commandBuffer, imageData->image.get(), imageData->format, vk::ImageLayout::eUndefined, vk::ImageLayout::eTransferDstOptimal); - vk::BufferImageCopy copyRegion(0, extent.width, extent.height, vk::ImageSubresourceLayers(vk::ImageAspectFlagBits::eColor, 0, 0, 1), vk::Offset3D(0, 0, 0), vk::Extent3D(extent, 1)); - commandBuffer->copyBufferToImage(stagingBufferData->buffer.get(), imageData->image.get(), vk::ImageLayout::eTransferDstOptimal, copyRegion); + vk::su::setImageLayout( commandBuffer, + imageData->image.get(), + imageData->format, + vk::ImageLayout::eUndefined, + vk::ImageLayout::eTransferDstOptimal ); + vk::BufferImageCopy copyRegion( 0, + extent.width, + extent.height, + vk::ImageSubresourceLayers( vk::ImageAspectFlagBits::eColor, 0, 0, 1 ), + vk::Offset3D( 0, 0, 0 ), + vk::Extent3D( extent, 1 ) ); + commandBuffer->copyBufferToImage( + stagingBufferData->buffer.get(), imageData->image.get(), vk::ImageLayout::eTransferDstOptimal, copyRegion ); // Set the layout for the texture image from eTransferDstOptimal to SHADER_READ_ONLY - vk::su::setImageLayout(commandBuffer, imageData->image.get(), imageData->format, vk::ImageLayout::eTransferDstOptimal, vk::ImageLayout::eShaderReadOnlyOptimal); + vk::su::setImageLayout( commandBuffer, + imageData->image.get(), + imageData->format, + vk::ImageLayout::eTransferDstOptimal, + vk::ImageLayout::eShaderReadOnlyOptimal ); } else { // If we can use the linear tiled image as a texture, just do it - vk::su::setImageLayout(commandBuffer, imageData->image.get(), imageData->format, vk::ImageLayout::ePreinitialized, vk::ImageLayout::eShaderReadOnlyOptimal); + vk::su::setImageLayout( commandBuffer, + imageData->image.get(), + imageData->format, + vk::ImageLayout::ePreinitialized, + vk::ImageLayout::eShaderReadOnlyOptimal ); } } @@ -264,59 +333,100 @@ namespace vk struct UUID { - public: - UUID(uint8_t const data[VK_UUID_SIZE]); + public: + UUID( uint8_t const data[VK_UUID_SIZE] ); uint8_t m_data[VK_UUID_SIZE]; }; - template - VULKAN_HPP_INLINE TargetType checked_cast(SourceType value) + VULKAN_HPP_INLINE TargetType checked_cast( SourceType value ) { - static_assert(sizeof(TargetType) <= sizeof(SourceType), "No need to cast from smaller to larger type!"); - static_assert(!std::numeric_limits::is_signed, "Only unsigned types supported!"); - static_assert(!std::numeric_limits::is_signed, "Only unsigned types supported!"); - assert(value <= std::numeric_limits::max()); - return static_cast(value); + static_assert( sizeof( TargetType ) <= sizeof( SourceType ), "No need to cast from smaller to larger type!" ); + static_assert( !std::numeric_limits::is_signed, "Only unsigned types supported!" ); + static_assert( !std::numeric_limits::is_signed, "Only unsigned types supported!" ); + assert( value <= std::numeric_limits::max() ); + return static_cast( value ); } - vk::UniqueDeviceMemory allocateMemory(vk::UniqueDevice const& device, vk::PhysicalDeviceMemoryProperties const& memoryProperties, vk::MemoryRequirements const& memoryRequirements, - vk::MemoryPropertyFlags memoryPropertyFlags); - bool contains(std::vector const& extensionProperties, std::string const& extensionName); - vk::UniqueCommandPool createCommandPool(vk::UniqueDevice &device, uint32_t queueFamilyIndex); - vk::UniqueDebugUtilsMessengerEXT createDebugUtilsMessenger(vk::UniqueInstance &instance); - vk::UniqueDescriptorPool createDescriptorPool(vk::UniqueDevice &device, std::vector const& poolSizes); - vk::UniqueDescriptorSetLayout createDescriptorSetLayout(vk::UniqueDevice const& device, std::vector> const& bindingData, - vk::DescriptorSetLayoutCreateFlags flags = {}); - vk::UniqueDevice createDevice(vk::PhysicalDevice physicalDevice, uint32_t queueFamilyIndex, std::vector const& extensions = {}, vk::PhysicalDeviceFeatures const* physicalDeviceFeatures = nullptr, void const* pNext = nullptr); - std::vector createFramebuffers(vk::UniqueDevice &device, vk::UniqueRenderPass &renderPass, std::vector const& imageViews, vk::UniqueImageView const& depthImageView, vk::Extent2D const& extent); - vk::UniquePipeline createGraphicsPipeline(vk::UniqueDevice const& device, vk::UniquePipelineCache const& pipelineCache, - std::pair const& vertexShaderData, - std::pair const& fragmentShaderData, uint32_t vertexStride, - std::vector> const& vertexInputAttributeFormatOffset, vk::FrontFace frontFace, bool depthBuffered, - vk::UniquePipelineLayout const& pipelineLayout, vk::UniqueRenderPass const& renderPass); - vk::UniqueInstance createInstance(std::string const& appName, std::string const& engineName, std::vector const& layers = {}, std::vector const& extensions = {}, - uint32_t apiVersion = VK_API_VERSION_1_0); - vk::UniqueRenderPass createRenderPass(vk::UniqueDevice &device, vk::Format colorFormat, vk::Format depthFormat, vk::AttachmentLoadOp loadOp = vk::AttachmentLoadOp::eClear, vk::ImageLayout colorFinalLayout = vk::ImageLayout::ePresentSrcKHR); - VKAPI_ATTR VkBool32 VKAPI_CALL debugUtilsMessengerCallback(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, VkDebugUtilsMessengerCallbackDataEXT const * pCallbackData, void * /*pUserData*/); - uint32_t findGraphicsQueueFamilyIndex(std::vector const& queueFamilyProperties); - std::pair findGraphicsAndPresentQueueFamilyIndex(vk::PhysicalDevice physicalDevice, vk::SurfaceKHR const& surface); - uint32_t findMemoryType(vk::PhysicalDeviceMemoryProperties const& memoryProperties, uint32_t typeBits, vk::MemoryPropertyFlags requirementsMask); - std::vector getDeviceExtensions(); - std::vector getInstanceExtensions(); - vk::Format pickDepthFormat(vk::PhysicalDevice const& physicalDevice); - vk::PresentModeKHR pickPresentMode(std::vector const& presentModes); - vk::SurfaceFormatKHR pickSurfaceFormat(std::vector const& formats); - void submitAndWait(vk::UniqueDevice &device, vk::Queue queue, vk::UniqueCommandBuffer &commandBuffer); - void updateDescriptorSets(vk::UniqueDevice const& device, vk::UniqueDescriptorSet const& descriptorSet, - std::vector> const& bufferData, vk::su::TextureData const& textureData, - uint32_t bindingOffset = 0); - void updateDescriptorSets(vk::UniqueDevice const& device, vk::UniqueDescriptorSet const& descriptorSet, - std::vector> const& bufferData, - std::vector const& textureData, uint32_t bindingOffset = 0); + vk::UniqueDeviceMemory allocateMemory( vk::UniqueDevice const & device, + vk::PhysicalDeviceMemoryProperties const & memoryProperties, + vk::MemoryRequirements const & memoryRequirements, + vk::MemoryPropertyFlags memoryPropertyFlags ); + bool contains( std::vector const & extensionProperties, + std::string const & extensionName ); + vk::UniqueCommandPool createCommandPool( vk::UniqueDevice & device, uint32_t queueFamilyIndex ); + vk::UniqueDebugUtilsMessengerEXT createDebugUtilsMessenger( vk::UniqueInstance & instance ); + vk::UniqueDescriptorPool createDescriptorPool( vk::UniqueDevice & device, + std::vector const & poolSizes ); + vk::UniqueDescriptorSetLayout createDescriptorSetLayout( + vk::UniqueDevice const & device, + std::vector> const & bindingData, + vk::DescriptorSetLayoutCreateFlags flags = {} ); + vk::UniqueDevice createDevice( vk::PhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + std::vector const & extensions = {}, + vk::PhysicalDeviceFeatures const * physicalDeviceFeatures = nullptr, + void const * pNext = nullptr ); + std::vector createFramebuffers( vk::UniqueDevice & device, + vk::UniqueRenderPass & renderPass, + std::vector const & imageViews, + vk::UniqueImageView const & depthImageView, + vk::Extent2D const & extent ); + vk::UniquePipeline + createGraphicsPipeline( vk::UniqueDevice const & device, + vk::UniquePipelineCache const & pipelineCache, + std::pair const & vertexShaderData, + std::pair const & fragmentShaderData, + uint32_t vertexStride, + std::vector> const & vertexInputAttributeFormatOffset, + vk::FrontFace frontFace, + bool depthBuffered, + vk::UniquePipelineLayout const & pipelineLayout, + vk::UniqueRenderPass const & renderPass ); + vk::UniqueInstance createInstance( std::string const & appName, + std::string const & engineName, + std::vector const & layers = {}, + std::vector const & extensions = {}, + uint32_t apiVersion = VK_API_VERSION_1_0 ); + vk::UniqueRenderPass createRenderPass( vk::UniqueDevice & device, + vk::Format colorFormat, + vk::Format depthFormat, + vk::AttachmentLoadOp loadOp = vk::AttachmentLoadOp::eClear, + vk::ImageLayout colorFinalLayout = vk::ImageLayout::ePresentSrcKHR ); + VKAPI_ATTR VkBool32 VKAPI_CALL + debugUtilsMessengerCallback( VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VkDebugUtilsMessageTypeFlagsEXT messageTypes, + VkDebugUtilsMessengerCallbackDataEXT const * pCallbackData, + void * /*pUserData*/ ); + uint32_t findGraphicsQueueFamilyIndex( std::vector const & queueFamilyProperties ); + std::pair findGraphicsAndPresentQueueFamilyIndex( vk::PhysicalDevice physicalDevice, + vk::SurfaceKHR const & surface ); + uint32_t findMemoryType( vk::PhysicalDeviceMemoryProperties const & memoryProperties, + uint32_t typeBits, + vk::MemoryPropertyFlags requirementsMask ); + std::vector getDeviceExtensions(); + std::vector getInstanceExtensions(); + vk::Format pickDepthFormat( vk::PhysicalDevice const & physicalDevice ); + vk::PresentModeKHR pickPresentMode( std::vector const & presentModes ); + vk::SurfaceFormatKHR pickSurfaceFormat( std::vector const & formats ); + void submitAndWait( vk::UniqueDevice & device, vk::Queue queue, vk::UniqueCommandBuffer & commandBuffer ); + void updateDescriptorSets( + vk::UniqueDevice const & device, + vk::UniqueDescriptorSet const & descriptorSet, + std::vector> const & + bufferData, + vk::su::TextureData const & textureData, + uint32_t bindingOffset = 0 ); + void updateDescriptorSets( + vk::UniqueDevice const & device, + vk::UniqueDescriptorSet const & descriptorSet, + std::vector> const & + bufferData, + std::vector const & textureData, + uint32_t bindingOffset = 0 ); - } -} + } // namespace su +} // namespace vk -std::ostream& operator<<(std::ostream& os, vk::su::UUID const& uuid); \ No newline at end of file +std::ostream & operator<<( std::ostream & os, vk::su::UUID const & uuid ); diff --git a/tests/DeviceFunctions/DeviceFunctions.cpp b/tests/DeviceFunctions/DeviceFunctions.cpp index 111efed..e65a90a 100644 --- a/tests/DeviceFunctions/DeviceFunctions.cpp +++ b/tests/DeviceFunctions/DeviceFunctions.cpp @@ -16,54 +16,61 @@ // Compile test on device functions #include "vulkan/vulkan.hpp" + #include -static char const* AppName = "DeviceFunctions"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "DeviceFunctions"; +static char const * EngineName = "Vulkan.hpp"; -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { - vk::ApplicationInfo appInfo(AppName, 1, EngineName, 1, VK_API_VERSION_1_1); - vk::UniqueInstance instance = vk::createInstanceUnique(vk::InstanceCreateInfo({}, &appInfo)); - vk::PhysicalDevice physicalDevice = instance->enumeratePhysicalDevices().front(); + vk::ApplicationInfo appInfo( AppName, 1, EngineName, 1, VK_API_VERSION_1_1 ); + vk::UniqueInstance instance = vk::createInstanceUnique( vk::InstanceCreateInfo( {}, &appInfo ) ); + vk::PhysicalDevice physicalDevice = instance->enumeratePhysicalDevices().front(); // get the QueueFamilyProperties of the first PhysicalDevice std::vector queueFamilyProperties = physicalDevice.getQueueFamilyProperties(); // get the first index into queueFamiliyProperties which supports graphics - size_t graphicsQueueFamilyIndex = std::distance(queueFamilyProperties.begin(), - std::find_if(queueFamilyProperties.begin(), - queueFamilyProperties.end(), - [](vk::QueueFamilyProperties const& qfp) { return qfp.queueFlags & vk::QueueFlagBits::eGraphics; })); - assert(graphicsQueueFamilyIndex < queueFamilyProperties.size()); + size_t graphicsQueueFamilyIndex = std::distance( + queueFamilyProperties.begin(), + std::find_if( + queueFamilyProperties.begin(), queueFamilyProperties.end(), []( vk::QueueFamilyProperties const & qfp ) { + return qfp.queueFlags & vk::QueueFlagBits::eGraphics; + } ) ); + assert( graphicsQueueFamilyIndex < queueFamilyProperties.size() ); // create a UniqueDevice - float queuePriority = 0.0f; - vk::DeviceQueueCreateInfo deviceQueueCreateInfo(vk::DeviceQueueCreateFlags(), static_cast(graphicsQueueFamilyIndex), 1, &queuePriority); - vk::UniqueDevice device = physicalDevice.createDeviceUnique(vk::DeviceCreateInfo(vk::DeviceCreateFlags(), 1, &deviceQueueCreateInfo)); + float queuePriority = 0.0f; + vk::DeviceQueueCreateInfo deviceQueueCreateInfo( + vk::DeviceQueueCreateFlags(), static_cast( graphicsQueueFamilyIndex ), 1, &queuePriority ); + vk::UniqueDevice device = + physicalDevice.createDeviceUnique( vk::DeviceCreateInfo( vk::DeviceCreateFlags(), 1, &deviceQueueCreateInfo ) ); std::vector data; - device->getAccelerationStructureHandleNV({}, data, vk::DispatchLoaderDynamic()); + device->getAccelerationStructureHandleNV( {}, data, vk::DispatchLoaderDynamic() ); std::vector::allocator_type vectorAllocator; - vk::UniqueCommandBuffer commandBuffer = std::move(device->allocateCommandBuffersUnique({}, vectorAllocator, vk::DispatchLoaderStatic()).front()); + vk::UniqueCommandBuffer commandBuffer = + std::move( device->allocateCommandBuffersUnique( {}, vectorAllocator, vk::DispatchLoaderStatic() ).front() ); - commandBuffer->begin(nullptr); + commandBuffer->begin( nullptr ); std::vector>::allocator_type dynamicVectorAllocator; - vk::UniqueHandle dynamicCommandBuffer = std::move(device->allocateCommandBuffersUnique({}, dynamicVectorAllocator, vk::DispatchLoaderDynamic()).front()); + vk::UniqueHandle dynamicCommandBuffer = std::move( + device->allocateCommandBuffersUnique( {}, dynamicVectorAllocator, vk::DispatchLoaderDynamic() ).front() ); } - catch (vk::SystemError const& err) + catch ( vk::SystemError const & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; diff --git a/tests/DispatchLoaderDynamic/DispatchLoaderDynamic.cpp b/tests/DispatchLoaderDynamic/DispatchLoaderDynamic.cpp index 8b8458d..f8bf879 100644 --- a/tests/DispatchLoaderDynamic/DispatchLoaderDynamic.cpp +++ b/tests/DispatchLoaderDynamic/DispatchLoaderDynamic.cpp @@ -17,45 +17,46 @@ #define VULKAN_HPP_DISPATCH_LOADER_DYNAMIC 1 -#include #include "vulkan/vulkan.hpp" #include #include +#include VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { - vk::DynamicLoader dl; - PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = dl.getProcAddress("vkGetInstanceProcAddr"); - VULKAN_HPP_DEFAULT_DISPATCHER.init(vkGetInstanceProcAddr); + vk::DynamicLoader dl; + PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = + dl.getProcAddress( "vkGetInstanceProcAddr" ); + VULKAN_HPP_DEFAULT_DISPATCHER.init( vkGetInstanceProcAddr ); - vk::Instance instance = vk::createInstance({}, nullptr); + vk::Instance instance = vk::createInstance( {}, nullptr ); // initialize function pointers for instance - VULKAN_HPP_DEFAULT_DISPATCHER.init(instance); + VULKAN_HPP_DEFAULT_DISPATCHER.init( instance ); // create a dispatcher, based on additional vkDevice/vkGetDeviceProcAddr std::vector physicalDevices = instance.enumeratePhysicalDevices(); - assert(!physicalDevices.empty()); + assert( !physicalDevices.empty() ); - vk::Device device = physicalDevices[0].createDevice({}, nullptr); + vk::Device device = physicalDevices[0].createDevice( {}, nullptr ); // function pointer specialization for device - VULKAN_HPP_DEFAULT_DISPATCHER.init(device); + VULKAN_HPP_DEFAULT_DISPATCHER.init( device ); } - catch (vk::SystemError const& err) + catch ( vk::SystemError const & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; diff --git a/tests/Hash/Hash.cpp b/tests/Hash/Hash.cpp index 41e1fcf..57bc25f 100644 --- a/tests/Hash/Hash.cpp +++ b/tests/Hash/Hash.cpp @@ -16,39 +16,40 @@ // Compile test on using std::hash on handles #include "vulkan/vulkan.hpp" + #include #include #include -static char const* AppName = "Hash"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "Hash"; +static char const * EngineName = "Vulkan.hpp"; -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { - vk::ApplicationInfo appInfo(AppName, 1, EngineName, 1, VK_API_VERSION_1_1); - vk::UniqueInstance instance = vk::createInstanceUnique(vk::InstanceCreateInfo({}, &appInfo)); + vk::ApplicationInfo appInfo( AppName, 1, EngineName, 1, VK_API_VERSION_1_1 ); + vk::UniqueInstance instance = vk::createInstanceUnique( vk::InstanceCreateInfo( {}, &appInfo ) ); - auto h1 = std::hash{}(*instance); - auto h2 = std::hash{}(*instance); - assert(h1 == h2); + auto h1 = std::hash{}( *instance ); + auto h2 = std::hash{}( *instance ); + assert( h1 == h2 ); std::unordered_set uset; - uset.insert(*instance); + uset.insert( *instance ); std::unordered_map umap; umap[*instance] = 1; } - catch (vk::SystemError const& err) + catch ( vk::SystemError const & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; diff --git a/tests/NoExceptions/NoExceptions.cpp b/tests/NoExceptions/NoExceptions.cpp index 34349fa..b555d33 100644 --- a/tests/NoExceptions/NoExceptions.cpp +++ b/tests/NoExceptions/NoExceptions.cpp @@ -20,47 +20,61 @@ #define VULKAN_HPP_DISPATCH_LOADER_DYNAMIC 1 #include "vulkan/vulkan.hpp" + #include -static char const* AppName = "NoExceptions"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "NoExceptions"; +static char const * EngineName = "Vulkan.hpp"; VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { - vk::DynamicLoader dl; - PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = dl.getProcAddress("vkGetInstanceProcAddr"); - VULKAN_HPP_DEFAULT_DISPATCHER.init(vkGetInstanceProcAddr); + vk::DynamicLoader dl; + PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = + dl.getProcAddress( "vkGetInstanceProcAddr" ); + VULKAN_HPP_DEFAULT_DISPATCHER.init( vkGetInstanceProcAddr ); - vk::ApplicationInfo appInfo(AppName, 1, EngineName, 1, VK_API_VERSION_1_1); - vk::UniqueInstance instance = vk::createInstanceUnique(vk::InstanceCreateInfo({}, &appInfo)).value; - VULKAN_HPP_DEFAULT_DISPATCHER.init(*instance); + vk::ApplicationInfo appInfo( AppName, 1, EngineName, 1, VK_API_VERSION_1_1 ); + vk::UniqueInstance instance = vk::createInstanceUnique( vk::InstanceCreateInfo( {}, &appInfo ) ).value; + VULKAN_HPP_DEFAULT_DISPATCHER.init( *instance ); std::vector physicalDevices = instance->enumeratePhysicalDevices().value; - assert(!physicalDevices.empty()); + assert( !physicalDevices.empty() ); // get the QueueFamilyProperties of the first PhysicalDevice std::vector queueFamilyProperties = physicalDevices[0].getQueueFamilyProperties(); // get the first index into queueFamiliyProperties which supports graphics - size_t graphicsQueueFamilyIndex = std::distance(queueFamilyProperties.begin(), - std::find_if(queueFamilyProperties.begin(), - queueFamilyProperties.end(), - [](vk::QueueFamilyProperties const& qfp) { return qfp.queueFlags & vk::QueueFlagBits::eGraphics; })); - assert(graphicsQueueFamilyIndex < queueFamilyProperties.size()); + size_t graphicsQueueFamilyIndex = std::distance( queueFamilyProperties.begin(), + std::find_if( queueFamilyProperties.begin(), + queueFamilyProperties.end(), + []( vk::QueueFamilyProperties const & qfp ) { + return qfp.queueFlags & vk::QueueFlagBits::eGraphics; + } ) ); + assert( graphicsQueueFamilyIndex < queueFamilyProperties.size() ); // create a UniqueDevice - float queuePriority = 0.0f; - vk::DeviceQueueCreateInfo deviceQueueCreateInfo(vk::DeviceQueueCreateFlags(), static_cast(graphicsQueueFamilyIndex), 1, &queuePriority); - vk::UniqueDevice device = physicalDevices[0].createDeviceUnique(vk::DeviceCreateInfo(vk::DeviceCreateFlags(), 1, &deviceQueueCreateInfo)).value; - VULKAN_HPP_DEFAULT_DISPATCHER.init(*device); + float queuePriority = 0.0f; + vk::DeviceQueueCreateInfo deviceQueueCreateInfo( + vk::DeviceQueueCreateFlags(), static_cast( graphicsQueueFamilyIndex ), 1, &queuePriority ); + vk::UniqueDevice device = + physicalDevices[0] + .createDeviceUnique( vk::DeviceCreateInfo( vk::DeviceCreateFlags(), 1, &deviceQueueCreateInfo ) ) + .value; + VULKAN_HPP_DEFAULT_DISPATCHER.init( *device ); // create a UniqueCommandPool to allocate a CommandBuffer from - vk::UniqueCommandPool commandPool = device->createCommandPoolUnique(vk::CommandPoolCreateInfo(vk::CommandPoolCreateFlags(), deviceQueueCreateInfo.queueFamilyIndex)).value; + vk::UniqueCommandPool commandPool = device + ->createCommandPoolUnique( vk::CommandPoolCreateInfo( + vk::CommandPoolCreateFlags(), deviceQueueCreateInfo.queueFamilyIndex ) ) + .value; // allocate a CommandBuffer from the CommandPool - vk::UniqueCommandBuffer commandBuffer = std::move(device->allocateCommandBuffersUnique(vk::CommandBufferAllocateInfo(commandPool.get(), vk::CommandBufferLevel::ePrimary, 1)).value.front()); + vk::UniqueCommandBuffer commandBuffer = std::move( device + ->allocateCommandBuffersUnique( vk::CommandBufferAllocateInfo( + commandPool.get(), vk::CommandBufferLevel::ePrimary, 1 ) ) + .value.front() ); return 0; } diff --git a/tests/StructureChain/StructureChain.cpp b/tests/StructureChain/StructureChain.cpp index 644a40c..e418cb0 100644 --- a/tests/StructureChain/StructureChain.cpp +++ b/tests/StructureChain/StructureChain.cpp @@ -18,15 +18,16 @@ #define VULKAN_HPP_DISPATCH_LOADER_DYNAMIC 1 #include "vulkan/vulkan.hpp" + #include -static char const* AppName = "StructureChain"; -static char const* EngineName = "Vulkan.hpp"; +static char const * AppName = "StructureChain"; +static char const * EngineName = "Vulkan.hpp"; -#if defined(_MSC_VER) -#pragma warning( disable : 4189 ) -#elif defined(__GNUC__) -#pragma GCC diagnostic ignored "-Wunused-variable" +#if defined( _MSC_VER ) +# pragma warning( disable : 4189 ) +#elif defined( __GNUC__ ) +# pragma GCC diagnostic ignored "-Wunused-variable" #else // unknow compiler... just ignore the warnings for yourselves ;) #endif @@ -34,93 +35,106 @@ static char const* EngineName = "Vulkan.hpp"; VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE template -void unused(T const&) +void unused( T const & ) {} -int main(int /*argc*/, char ** /*argv*/) +int main( int /*argc*/, char ** /*argv*/ ) { try { - vk::DynamicLoader dl; - PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = dl.getProcAddress("vkGetInstanceProcAddr"); - VULKAN_HPP_DEFAULT_DISPATCHER.init(vkGetInstanceProcAddr); + vk::DynamicLoader dl; + PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = + dl.getProcAddress( "vkGetInstanceProcAddr" ); + VULKAN_HPP_DEFAULT_DISPATCHER.init( vkGetInstanceProcAddr ); - vk::ApplicationInfo appInfo(AppName, 1, EngineName, 1, VK_API_VERSION_1_1); - vk::UniqueInstance instance = vk::createInstanceUnique(vk::InstanceCreateInfo({}, &appInfo)); - VULKAN_HPP_DEFAULT_DISPATCHER.init(*instance); + vk::ApplicationInfo appInfo( AppName, 1, EngineName, 1, VK_API_VERSION_1_1 ); + vk::UniqueInstance instance = vk::createInstanceUnique( vk::InstanceCreateInfo( {}, &appInfo ) ); + VULKAN_HPP_DEFAULT_DISPATCHER.init( *instance ); vk::PhysicalDevice physicalDevice = instance->enumeratePhysicalDevices().front(); // some valid StructureChains - vk::StructureChain sc0; - vk::StructureChain sc1; - vk::StructureChain sc2; + vk::StructureChain sc0; + vk::StructureChain sc1; + vk::StructureChain sc2; vk::StructureChain sc3; - vk::StructureChain sc4; - vk::StructureChain sc6; - vk::StructureChain sc7; + vk::StructureChain + sc4; + vk::StructureChain + sc6; + vk::StructureChain + sc7; // some invalid StructureChains - //vk::StructureChain x; - //vk::StructureChain x; - //vk::StructureChain x; - //vk::StructureChain x; - //vk::StructureChain x; + // vk::StructureChain x; + // vk::StructureChain x; vk::StructureChain x; + // vk::StructureChain + // x; vk::StructureChain x; // unlink a struct from a StructureChain sc7.unlink(); // some invalid unlink calls - //sc7.unlink(); // assertion fires on trying to unlink some already unlinked structure - //sc7.unlink(); - //sc1.unlink(); + // sc7.unlink(); // assertion fires on trying to unlink some already + // unlinked structure sc7.unlink(); + // sc1.unlink(); // re-link a struct sc7.relink(); // invalid re-linking - //sc7.relink(); - //sc1.relink(); - //sc1.relink(); // assertion fires on trying to relink some structure that hasn't been unlinked + // sc7.relink(); + // sc1.relink(); + // sc1.relink(); // assertion fires on trying to relink some structure + // that hasn't been unlinked // simple call, passing structures in vk::PhysicalDeviceFeatures2 pdf; - physicalDevice.getFeatures2(&pdf); + physicalDevice.getFeatures2( &pdf ); // simple calls, getting structure back vk::PhysicalDeviceFeatures2 a = physicalDevice.getFeatures2(); - unused(a); + unused( a ); // complex calls, getting StructureChain back auto c = physicalDevice.getFeatures2(); vk::PhysicalDeviceFeatures2 & c0 = c.get(); - unused(c0); + unused( c0 ); auto t0 = c.get(); - unused(t0); - + unused( t0 ); + auto d = physicalDevice.getFeatures2(); vk::PhysicalDeviceFeatures2 & d0 = d.get(); - unused(d0); + unused( d0 ); vk::PhysicalDeviceVariablePointerFeatures & d1 = d.get(); - unused(d1); + unused( d1 ); auto t1 = d.get(); - unused(t1); + unused( t1 ); using StructureChain = vk::StructureChain; - using AllocatorType = std::vector::allocator_type; - auto qfd = physicalDevice.getQueueFamilyProperties2(VULKAN_HPP_DEFAULT_DISPATCHER); - unused(qfd); + using AllocatorType = std::vector::allocator_type; + auto qfd = physicalDevice.getQueueFamilyProperties2( VULKAN_HPP_DEFAULT_DISPATCHER ); + unused( qfd ); } - catch (vk::SystemError const& err) + catch ( vk::SystemError const & err ) { std::cout << "vk::SystemError: " << err.what() << std::endl; - exit(-1); + exit( -1 ); } - catch (...) + catch ( ... ) { std::cout << "unknown error\n"; - exit(-1); + exit( -1 ); } return 0; } diff --git a/tests/UniqueHandleDefaultArguments/UniqueHandleDefaultArguments.cpp b/tests/UniqueHandleDefaultArguments/UniqueHandleDefaultArguments.cpp index 4e54405..2a7aaf3 100644 --- a/tests/UniqueHandleDefaultArguments/UniqueHandleDefaultArguments.cpp +++ b/tests/UniqueHandleDefaultArguments/UniqueHandleDefaultArguments.cpp @@ -21,11 +21,11 @@ VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE -#if defined(__x86_64__) || defined(_M_X64) +#if defined( __x86_64__ ) || defined( _M_X64 ) // On 32-bit systems the test is ambiguous. void test() { - VkSurfaceKHR surface = 0; - auto uniqueSurface = vk::UniqueSurfaceKHR(surface, vk::Instance()); + VkSurfaceKHR surface = 0; + auto uniqueSurface = vk::UniqueSurfaceKHR( surface, vk::Instance() ); } #endif diff --git a/vulkan/vulkan.hpp b/vulkan/vulkan.hpp index 1b80610..6c70f99 100644 --- a/vulkan/vulkan.hpp +++ b/vulkan/vulkan.hpp @@ -1,24 +1,24 @@ // Copyright (c) 2015-2020 The Khronos Group Inc. -// +// // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at -// +// // http://www.apache.org/licenses/LICENSE-2.0 -// +// // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. -// +// // ---- Exceptions to the Apache 2.0 License: ---- -// +// // As an exception, if you use this Software to generate code and portions of // this Software are embedded into the generated code as a result, you may // redistribute such product without providing attribution as would otherwise // be required by Sections 4(a), 4(b) and 4(d) of the License. -// +// // In addition, if you combine or link code generated by this Software with // software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 // ("`Combined Software`") and if a court of competent jurisdiction determines @@ -27,7 +27,7 @@ // applicable GPL or LGPL license, you may retroactively and prospectively // choose to deem waived or otherwise exclude such Section(s) of the License, // but only in their entirety and only with respect to the Combined Software. -// +// // This header is generated from the Khronos Vulkan XML API Registry. @@ -47,208 +47,209 @@ #include #include -#if defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) -# if !defined(VULKAN_HPP_NO_SMART_HANDLE) -# define VULKAN_HPP_NO_SMART_HANDLE -# endif +#if defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) +# if !defined( VULKAN_HPP_NO_SMART_HANDLE ) +# define VULKAN_HPP_NO_SMART_HANDLE +# endif #else -# include -# include +# include +# include #endif -#if !defined(VULKAN_HPP_ASSERT) -# include -# define VULKAN_HPP_ASSERT assert +#if !defined( VULKAN_HPP_ASSERT ) +# include +# define VULKAN_HPP_ASSERT assert #endif -#if !defined(VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL) -# define VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL 1 +#if !defined( VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL ) +# define VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL 1 #endif #if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL == 1 -# if defined(__linux__) || defined(__APPLE__) -# include +# if defined( __linux__ ) || defined( __APPLE__ ) +# include # endif -# if defined(_WIN32) -# include +# if defined( _WIN32 ) +# include # endif #endif #if 201711 <= __cpp_impl_three_way_comparison -# define VULKAN_HPP_HAS_SPACESHIP_OPERATOR +# define VULKAN_HPP_HAS_SPACESHIP_OPERATOR #endif -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) -# include +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) +# include #endif -static_assert( VK_HEADER_VERSION == 137 , "Wrong VK_HEADER_VERSION!" ); +static_assert( VK_HEADER_VERSION == 137, "Wrong VK_HEADER_VERSION!" ); // 32-bit vulkan is not typesafe for handles, so don't allow copy constructors on this platform by default. // To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION -#if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__) -# if !defined( VULKAN_HPP_TYPESAFE_CONVERSION ) -# define VULKAN_HPP_TYPESAFE_CONVERSION -# endif +#if defined( __LP64__ ) || defined( _WIN64 ) || ( defined( __x86_64__ ) && !defined( __ILP32__ ) ) || \ + defined( _M_X64 ) || defined( __ia64 ) || defined( _M_IA64 ) || defined( __aarch64__ ) || defined( __powerpc64__ ) +# if !defined( VULKAN_HPP_TYPESAFE_CONVERSION ) +# define VULKAN_HPP_TYPESAFE_CONVERSION +# endif #endif // includes through some other header // this results in major(x) being resolved to gnu_dev_major(x) // which is an expression in a constructor initializer list. -#if defined(major) - #undef major +#if defined( major ) +# undef major #endif -#if defined(minor) - #undef minor +#if defined( minor ) +# undef minor #endif // Windows defines MemoryBarrier which is deprecated and collides // with the VULKAN_HPP_NAMESPACE::MemoryBarrier struct. -#if defined(MemoryBarrier) - #undef MemoryBarrier +#if defined( MemoryBarrier ) +# undef MemoryBarrier #endif -#if !defined(VULKAN_HPP_HAS_UNRESTRICTED_UNIONS) -# if defined(__clang__) -# if __has_feature(cxx_unrestricted_unions) -# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS +#if !defined( VULKAN_HPP_HAS_UNRESTRICTED_UNIONS ) +# if defined( __clang__ ) +# if __has_feature( cxx_unrestricted_unions ) +# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS +# endif +# elif defined( __GNUC__ ) +# define GCC_VERSION ( __GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__ ) +# if 40600 <= GCC_VERSION +# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS +# endif +# elif defined( _MSC_VER ) +# if 1900 <= _MSC_VER +# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS +# endif # endif -# elif defined(__GNUC__) -# define GCC_VERSION (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__) -# if 40600 <= GCC_VERSION -# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS -# endif -# elif defined(_MSC_VER) -# if 1900 <= _MSC_VER -# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS -# endif -# endif #endif -#if !defined(VULKAN_HPP_INLINE) -# if defined(__clang__) -# if __has_attribute(always_inline) -# define VULKAN_HPP_INLINE __attribute__((always_inline)) __inline__ +#if !defined( VULKAN_HPP_INLINE ) +# if defined( __clang__ ) +# if __has_attribute( always_inline ) +# define VULKAN_HPP_INLINE __attribute__( ( always_inline ) ) __inline__ +# else +# define VULKAN_HPP_INLINE inline +# endif +# elif defined( __GNUC__ ) +# define VULKAN_HPP_INLINE __attribute__( ( always_inline ) ) __inline__ +# elif defined( _MSC_VER ) +# define VULKAN_HPP_INLINE inline # else -# define VULKAN_HPP_INLINE inline +# define VULKAN_HPP_INLINE inline # endif -# elif defined(__GNUC__) -# define VULKAN_HPP_INLINE __attribute__((always_inline)) __inline__ -# elif defined(_MSC_VER) -# define VULKAN_HPP_INLINE inline -# else -# define VULKAN_HPP_INLINE inline -# endif #endif -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) -# define VULKAN_HPP_TYPESAFE_EXPLICIT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) +# define VULKAN_HPP_TYPESAFE_EXPLICIT #else -# define VULKAN_HPP_TYPESAFE_EXPLICIT explicit +# define VULKAN_HPP_TYPESAFE_EXPLICIT explicit #endif -#if defined(__cpp_constexpr) -# define VULKAN_HPP_CONSTEXPR constexpr -# if __cpp_constexpr >= 201304 -# define VULKAN_HPP_CONSTEXPR_14 constexpr -# else +#if defined( __cpp_constexpr ) +# define VULKAN_HPP_CONSTEXPR constexpr +# if __cpp_constexpr >= 201304 +# define VULKAN_HPP_CONSTEXPR_14 constexpr +# else +# define VULKAN_HPP_CONSTEXPR_14 +# endif +# define VULKAN_HPP_CONST_OR_CONSTEXPR constexpr +#else +# define VULKAN_HPP_CONSTEXPR # define VULKAN_HPP_CONSTEXPR_14 -# endif -# define VULKAN_HPP_CONST_OR_CONSTEXPR constexpr -#else -# define VULKAN_HPP_CONSTEXPR -# define VULKAN_HPP_CONSTEXPR_14 -# define VULKAN_HPP_CONST_OR_CONSTEXPR const +# define VULKAN_HPP_CONST_OR_CONSTEXPR const #endif -#if !defined(VULKAN_HPP_NOEXCEPT) -# if defined(_MSC_VER) && (_MSC_VER <= 1800) -# define VULKAN_HPP_NOEXCEPT -# else -# define VULKAN_HPP_NOEXCEPT noexcept -# define VULKAN_HPP_HAS_NOEXCEPT 1 -# endif +#if !defined( VULKAN_HPP_NOEXCEPT ) +# if defined( _MSC_VER ) && ( _MSC_VER <= 1800 ) +# define VULKAN_HPP_NOEXCEPT +# else +# define VULKAN_HPP_NOEXCEPT noexcept +# define VULKAN_HPP_HAS_NOEXCEPT 1 +# endif #endif -#if !defined(VULKAN_HPP_NAMESPACE) -#define VULKAN_HPP_NAMESPACE vk +#if !defined( VULKAN_HPP_NAMESPACE ) +# define VULKAN_HPP_NAMESPACE vk #endif -#define VULKAN_HPP_STRINGIFY2(text) #text -#define VULKAN_HPP_STRINGIFY(text) VULKAN_HPP_STRINGIFY2(text) -#define VULKAN_HPP_NAMESPACE_STRING VULKAN_HPP_STRINGIFY(VULKAN_HPP_NAMESPACE) +#define VULKAN_HPP_STRINGIFY2( text ) #text +#define VULKAN_HPP_STRINGIFY( text ) VULKAN_HPP_STRINGIFY2( text ) +#define VULKAN_HPP_NAMESPACE_STRING VULKAN_HPP_STRINGIFY( VULKAN_HPP_NAMESPACE ) namespace VULKAN_HPP_NAMESPACE { -#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) template class ArrayProxy { public: VULKAN_HPP_CONSTEXPR ArrayProxy() VULKAN_HPP_NOEXCEPT - : m_count(0) - , m_ptr(nullptr) + : m_count( 0 ) + , m_ptr( nullptr ) {} - VULKAN_HPP_CONSTEXPR ArrayProxy(std::nullptr_t) VULKAN_HPP_NOEXCEPT - : m_count(0) - , m_ptr(nullptr) + VULKAN_HPP_CONSTEXPR ArrayProxy( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_count( 0 ) + , m_ptr( nullptr ) {} - ArrayProxy(T & value) VULKAN_HPP_NOEXCEPT - : m_count(1) - , m_ptr(&value) + ArrayProxy( T & value ) VULKAN_HPP_NOEXCEPT + : m_count( 1 ) + , m_ptr( &value ) {} - template::value, int>::type = 0> - ArrayProxy(typename std::remove_const::type & value) VULKAN_HPP_NOEXCEPT - : m_count(1) - , m_ptr(&value) + template ::value, int>::type = 0> + ArrayProxy( typename std::remove_const::type & value ) VULKAN_HPP_NOEXCEPT + : m_count( 1 ) + , m_ptr( &value ) {} - ArrayProxy(uint32_t count, T * ptr) VULKAN_HPP_NOEXCEPT - : m_count(count) - , m_ptr(ptr) + ArrayProxy( uint32_t count, T * ptr ) VULKAN_HPP_NOEXCEPT + : m_count( count ) + , m_ptr( ptr ) {} - template::value, int>::type = 0> - ArrayProxy(uint32_t count, typename std::remove_const::type * ptr) VULKAN_HPP_NOEXCEPT - : m_count(count) - , m_ptr(ptr) + template ::value, int>::type = 0> + ArrayProxy( uint32_t count, typename std::remove_const::type * ptr ) VULKAN_HPP_NOEXCEPT + : m_count( count ) + , m_ptr( ptr ) {} - ArrayProxy(std::initializer_list const& list) VULKAN_HPP_NOEXCEPT - : m_count(static_cast(list.size())) - , m_ptr(list.begin()) + ArrayProxy( std::initializer_list const & list ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( list.size() ) ) + , m_ptr( list.begin() ) {} - template::value, int>::type = 0> - ArrayProxy(std::initializer_list::type> const& list) VULKAN_HPP_NOEXCEPT - : m_count(static_cast(list.size())) - , m_ptr(list.begin()) + template ::value, int>::type = 0> + ArrayProxy( std::initializer_list::type> const & list ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( list.size() ) ) + , m_ptr( list.begin() ) {} - ArrayProxy(std::initializer_list & list) VULKAN_HPP_NOEXCEPT - : m_count(static_cast(list.size())) - , m_ptr(list.begin()) + ArrayProxy( std::initializer_list & list ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( list.size() ) ) + , m_ptr( list.begin() ) {} - template::value, int>::type = 0> - ArrayProxy(std::initializer_list::type> & list) VULKAN_HPP_NOEXCEPT - : m_count(static_cast(list.size())) - , m_ptr(list.begin()) + template ::value, int>::type = 0> + ArrayProxy( std::initializer_list::type> & list ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( list.size() ) ) + , m_ptr( list.begin() ) {} template - ArrayProxy(Container const& container) VULKAN_HPP_NOEXCEPT - : m_count(static_cast(container.size())) - , m_ptr(container.data()) + ArrayProxy( Container const & container ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( container.size() ) ) + , m_ptr( container.data() ) {} template - ArrayProxy(Container & container) VULKAN_HPP_NOEXCEPT - : m_count(static_cast(container.size())) - , m_ptr(container.data()) + ArrayProxy( Container & container ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( container.size() ) ) + , m_ptr( container.data() ) {} const T * begin() const VULKAN_HPP_NOEXCEPT @@ -263,19 +264,19 @@ namespace VULKAN_HPP_NAMESPACE const T & front() const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT(m_count && m_ptr); + VULKAN_HPP_ASSERT( m_count && m_ptr ); return *m_ptr; } const T & back() const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT(m_count && m_ptr); - return *(m_ptr + m_count - 1); + VULKAN_HPP_ASSERT( m_count && m_ptr ); + return *( m_ptr + m_count - 1 ); } bool empty() const VULKAN_HPP_NOEXCEPT { - return (m_count == 0); + return ( m_count == 0 ); } uint32_t size() const VULKAN_HPP_NOEXCEPT @@ -289,41 +290,38 @@ namespace VULKAN_HPP_NAMESPACE } private: - uint32_t m_count; - T * m_ptr; + uint32_t m_count; + T * m_ptr; }; #endif template - class ArrayWrapper1D : public std::array + class ArrayWrapper1D : public std::array { public: - VULKAN_HPP_CONSTEXPR ArrayWrapper1D() VULKAN_HPP_NOEXCEPT - : std::array() + VULKAN_HPP_CONSTEXPR ArrayWrapper1D() VULKAN_HPP_NOEXCEPT : std::array() {} + + VULKAN_HPP_CONSTEXPR ArrayWrapper1D( std::array const & data ) VULKAN_HPP_NOEXCEPT : std::array( data ) {} - VULKAN_HPP_CONSTEXPR ArrayWrapper1D(std::array const& data) VULKAN_HPP_NOEXCEPT - : std::array(data) - {} - -#if defined(_WIN32) && !defined(_WIN64) - VULKAN_HPP_CONSTEXPR T const& operator[](int index) const VULKAN_HPP_NOEXCEPT +#if defined( _WIN32 ) && !defined( _WIN64 ) + VULKAN_HPP_CONSTEXPR T const & operator[]( int index ) const VULKAN_HPP_NOEXCEPT { - return std::array::operator[](index); + return std::array::operator[]( index ); } - VULKAN_HPP_CONSTEXPR T & operator[](int index) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR T & operator[]( int index ) VULKAN_HPP_NOEXCEPT { - return std::array::operator[](index); + return std::array::operator[]( index ); } #endif - operator T const* () const VULKAN_HPP_NOEXCEPT + operator T const *() const VULKAN_HPP_NOEXCEPT { return this->data(); } - operator T * () VULKAN_HPP_NOEXCEPT + operator T *() VULKAN_HPP_NOEXCEPT { return this->data(); } @@ -331,57 +329,59 @@ namespace VULKAN_HPP_NAMESPACE // specialization of relational operators between std::string and arrays of chars template - bool operator<(std::string const& lhs, ArrayWrapper1D const& rhs) VULKAN_HPP_NOEXCEPT + bool operator<( std::string const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT { return lhs < rhs.data(); } template - bool operator<=(std::string const& lhs, ArrayWrapper1D const& rhs) VULKAN_HPP_NOEXCEPT + bool operator<=( std::string const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT { return lhs <= rhs.data(); } template - bool operator>(std::string const& lhs, ArrayWrapper1D const& rhs) VULKAN_HPP_NOEXCEPT + bool operator>( std::string const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT { return lhs > rhs.data(); } template - bool operator>=(std::string const& lhs, ArrayWrapper1D const& rhs) VULKAN_HPP_NOEXCEPT + bool operator>=( std::string const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT { return lhs >= rhs.data(); } template - bool operator==(std::string const& lhs, ArrayWrapper1D const& rhs) VULKAN_HPP_NOEXCEPT + bool operator==( std::string const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT { return lhs == rhs.data(); } template - bool operator!=(std::string const& lhs, ArrayWrapper1D const& rhs) VULKAN_HPP_NOEXCEPT + bool operator!=( std::string const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT { return lhs != rhs.data(); } template - class ArrayWrapper2D : public std::array,N> + class ArrayWrapper2D : public std::array, N> { public: - VULKAN_HPP_CONSTEXPR ArrayWrapper2D() VULKAN_HPP_NOEXCEPT - : std::array, N>() - {} + VULKAN_HPP_CONSTEXPR ArrayWrapper2D() VULKAN_HPP_NOEXCEPT : std::array, N>() {} - VULKAN_HPP_CONSTEXPR ArrayWrapper2D(std::array,N> const& data) VULKAN_HPP_NOEXCEPT - : std::array, N>(*reinterpret_cast,N> const*>(&data)) + VULKAN_HPP_CONSTEXPR ArrayWrapper2D( std::array, N> const & data ) VULKAN_HPP_NOEXCEPT + : std::array, N>( *reinterpret_cast, N> const *>( &data ) ) {} }; - template struct FlagTraits + template + struct FlagTraits { - enum { allFlags = 0 }; + enum + { + allFlags = 0 + }; }; template @@ -391,52 +391,44 @@ namespace VULKAN_HPP_NAMESPACE using MaskType = typename std::underlying_type::type; // constructors - VULKAN_HPP_CONSTEXPR Flags() VULKAN_HPP_NOEXCEPT - : m_mask(0) - {} + VULKAN_HPP_CONSTEXPR Flags() VULKAN_HPP_NOEXCEPT : m_mask( 0 ) {} - VULKAN_HPP_CONSTEXPR Flags(BitType bit) VULKAN_HPP_NOEXCEPT - : m_mask(static_cast(bit)) - {} + VULKAN_HPP_CONSTEXPR Flags( BitType bit ) VULKAN_HPP_NOEXCEPT : m_mask( static_cast( bit ) ) {} - VULKAN_HPP_CONSTEXPR Flags(Flags const& rhs) VULKAN_HPP_NOEXCEPT - : m_mask(rhs.m_mask) - {} + VULKAN_HPP_CONSTEXPR Flags( Flags const & rhs ) VULKAN_HPP_NOEXCEPT : m_mask( rhs.m_mask ) {} - VULKAN_HPP_CONSTEXPR explicit Flags(MaskType flags) VULKAN_HPP_NOEXCEPT - : m_mask(flags) - {} + VULKAN_HPP_CONSTEXPR explicit Flags( MaskType flags ) VULKAN_HPP_NOEXCEPT : m_mask( flags ) {} // relational operators -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>(Flags const&) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Flags const & ) const = default; #else - VULKAN_HPP_CONSTEXPR bool operator<(Flags const& rhs) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR bool operator<( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_mask < rhs.m_mask; } - VULKAN_HPP_CONSTEXPR bool operator<=(Flags const& rhs) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR bool operator<=( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_mask <= rhs.m_mask; } - VULKAN_HPP_CONSTEXPR bool operator>(Flags const& rhs) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR bool operator>( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_mask > rhs.m_mask; } - VULKAN_HPP_CONSTEXPR bool operator>=(Flags const& rhs) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR bool operator>=( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_mask >= rhs.m_mask; } - VULKAN_HPP_CONSTEXPR bool operator==(Flags const& rhs) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR bool operator==( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_mask == rhs.m_mask; } - VULKAN_HPP_CONSTEXPR bool operator!=(Flags const& rhs) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR bool operator!=( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_mask != rhs.m_mask; } @@ -449,46 +441,46 @@ namespace VULKAN_HPP_NAMESPACE } // bitwise operators - VULKAN_HPP_CONSTEXPR Flags operator&(Flags const& rhs) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR Flags operator&( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT { - return Flags(m_mask & rhs.m_mask); + return Flags( m_mask & rhs.m_mask ); } - VULKAN_HPP_CONSTEXPR Flags operator|(Flags const& rhs) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR Flags operator|( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT { - return Flags(m_mask | rhs.m_mask); + return Flags( m_mask | rhs.m_mask ); } - VULKAN_HPP_CONSTEXPR Flags operator^(Flags const& rhs) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR Flags operator^( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT { - return Flags(m_mask ^ rhs.m_mask); + return Flags( m_mask ^ rhs.m_mask ); } VULKAN_HPP_CONSTEXPR Flags operator~() const VULKAN_HPP_NOEXCEPT { - return Flags(m_mask ^ FlagTraits::allFlags); + return Flags( m_mask ^ FlagTraits::allFlags ); } // assignment operators - Flags & operator=(Flags const& rhs) VULKAN_HPP_NOEXCEPT + Flags & operator=( Flags const & rhs ) VULKAN_HPP_NOEXCEPT { m_mask = rhs.m_mask; return *this; } - Flags & operator|=(Flags const& rhs) VULKAN_HPP_NOEXCEPT + Flags & operator|=( Flags const & rhs ) VULKAN_HPP_NOEXCEPT { m_mask |= rhs.m_mask; return *this; } - Flags & operator&=(Flags const& rhs) VULKAN_HPP_NOEXCEPT + Flags & operator&=( Flags const & rhs ) VULKAN_HPP_NOEXCEPT { m_mask &= rhs.m_mask; return *this; } - Flags & operator^=(Flags const& rhs) VULKAN_HPP_NOEXCEPT + Flags & operator^=( Flags const & rhs ) VULKAN_HPP_NOEXCEPT { m_mask ^= rhs.m_mask; return *this; @@ -502,47 +494,47 @@ namespace VULKAN_HPP_NAMESPACE explicit VULKAN_HPP_CONSTEXPR operator MaskType() const VULKAN_HPP_NOEXCEPT { - return m_mask; + return m_mask; } private: - MaskType m_mask; + MaskType m_mask; }; -#if !defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) +#if !defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) // relational operators only needed for pre C++20 template - VULKAN_HPP_CONSTEXPR bool operator<(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR bool operator<( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT { return flags > bit; } template - VULKAN_HPP_CONSTEXPR bool operator<=(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR bool operator<=( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT { return flags >= bit; } template - VULKAN_HPP_CONSTEXPR bool operator>(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR bool operator>( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT { return flags < bit; } template - VULKAN_HPP_CONSTEXPR bool operator>=(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR bool operator>=( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT { return flags <= bit; } template - VULKAN_HPP_CONSTEXPR bool operator==(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR bool operator==( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT { return flags == bit; } template - VULKAN_HPP_CONSTEXPR bool operator!=(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR bool operator!=( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT { return flags != bit; } @@ -550,19 +542,19 @@ namespace VULKAN_HPP_NAMESPACE // bitwise operators template - VULKAN_HPP_CONSTEXPR Flags operator&(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR Flags operator&( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT { return flags & bit; } template - VULKAN_HPP_CONSTEXPR Flags operator|(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR Flags operator|( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT { return flags | bit; } template - VULKAN_HPP_CONSTEXPR Flags operator^(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR Flags operator^( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT { return flags ^ bit; } @@ -571,19 +563,44 @@ namespace VULKAN_HPP_NAMESPACE class Optional { public: - Optional(RefType & reference) VULKAN_HPP_NOEXCEPT { m_ptr = &reference; } - Optional(RefType * ptr) VULKAN_HPP_NOEXCEPT { m_ptr = ptr; } - Optional(std::nullptr_t) VULKAN_HPP_NOEXCEPT { m_ptr = nullptr; } + Optional( RefType & reference ) VULKAN_HPP_NOEXCEPT + { + m_ptr = &reference; + } + Optional( RefType * ptr ) VULKAN_HPP_NOEXCEPT + { + m_ptr = ptr; + } + Optional( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_ptr = nullptr; + } - operator RefType*() const VULKAN_HPP_NOEXCEPT { return m_ptr; } - RefType const* operator->() const VULKAN_HPP_NOEXCEPT { return m_ptr; } - explicit operator bool() const VULKAN_HPP_NOEXCEPT { return !!m_ptr; } + operator RefType *() const VULKAN_HPP_NOEXCEPT + { + return m_ptr; + } + RefType const * operator->() const VULKAN_HPP_NOEXCEPT + { + return m_ptr; + } + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return !!m_ptr; + } private: - RefType *m_ptr; + RefType * m_ptr; }; - template struct isStructureChainValid { enum { value = false }; }; + template + struct isStructureChainValid + { + enum + { + value = false + }; + }; template struct TypeList @@ -595,28 +612,29 @@ namespace VULKAN_HPP_NAMESPACE template struct extendCheck { - static const bool valid = isStructureChainValid::value || extendCheck::valid; + static const bool valid = + isStructureChainValid::value || extendCheck::valid; }; template - struct extendCheck,X> + struct extendCheck, X> { static const bool valid = isStructureChainValid::value; }; template - struct extendCheck + struct extendCheck { static const bool valid = true; }; - template + template struct isPartOfStructureChain { static const bool valid = false; }; - template + template struct isPartOfStructureChain { static const bool valid = std::is_same::value || isPartOfStructureChain::valid; @@ -626,13 +644,20 @@ namespace VULKAN_HPP_NAMESPACE class StructureChainElement { public: - explicit operator Element&() VULKAN_HPP_NOEXCEPT { return value; } - explicit operator const Element&() const VULKAN_HPP_NOEXCEPT { return value; } + explicit operator Element &() VULKAN_HPP_NOEXCEPT + { + return value; + } + explicit operator const Element &() const VULKAN_HPP_NOEXCEPT + { + return value; + } + private: Element value; }; - template + template class StructureChain : private StructureChainElement... { public: @@ -641,171 +666,184 @@ namespace VULKAN_HPP_NAMESPACE link(); } - StructureChain(StructureChain const &rhs) VULKAN_HPP_NOEXCEPT + StructureChain( StructureChain const & rhs ) VULKAN_HPP_NOEXCEPT { - linkAndCopy(rhs); + linkAndCopy( rhs ); } - StructureChain(StructureElements const &... elems) VULKAN_HPP_NOEXCEPT + StructureChain( StructureElements const &... elems ) VULKAN_HPP_NOEXCEPT { - linkAndCopyElements(elems...); + linkAndCopyElements( elems... ); } - StructureChain& operator=(StructureChain const &rhs) VULKAN_HPP_NOEXCEPT + StructureChain & operator=( StructureChain const & rhs ) VULKAN_HPP_NOEXCEPT { - linkAndCopy(rhs); + linkAndCopy( rhs ); return *this; } - template ClassType& get() VULKAN_HPP_NOEXCEPT { return static_cast(*this);} - - template const ClassType& get() const VULKAN_HPP_NOEXCEPT { return static_cast(*this);} - - template - std::tuple get() + template + ClassType & get() VULKAN_HPP_NOEXCEPT { - return std::tie(get(), get(), get()...); + return static_cast( *this ); } - template - std::tuple get() const + template + const ClassType & get() const VULKAN_HPP_NOEXCEPT { - return std::tie(get(), get(), get()...); + return static_cast( *this ); } - template + template + std::tuple get() + { + return std::tie( get(), get(), get()... ); + } + + template + std::tuple get() const + { + return std::tie( get(), get(), get()... ); + } + + template void unlink() VULKAN_HPP_NOEXCEPT { - static_assert(isPartOfStructureChain::valid, "Can't unlink Structure that's not part of this StructureChain!"); - static_assert(!std::is_same>::type>::value, "It's not allowed to unlink the first element!"); - VkBaseOutStructure * ptr = reinterpret_cast(&get()); - VULKAN_HPP_ASSERT(ptr != nullptr); - VkBaseOutStructure ** ppNext = &(reinterpret_cast(this)->pNext); - VULKAN_HPP_ASSERT(*ppNext != nullptr); - while (*ppNext != ptr) + static_assert( isPartOfStructureChain::valid, + "Can't unlink Structure that's not part of this StructureChain!" ); + static_assert( + !std::is_same>::type>::value, + "It's not allowed to unlink the first element!" ); + VkBaseOutStructure * ptr = reinterpret_cast( &get() ); + VULKAN_HPP_ASSERT( ptr != nullptr ); + VkBaseOutStructure ** ppNext = &( reinterpret_cast( this )->pNext ); + VULKAN_HPP_ASSERT( *ppNext != nullptr ); + while ( *ppNext != ptr ) { - ppNext = &(*ppNext)->pNext; - VULKAN_HPP_ASSERT(*ppNext != nullptr); // fires, if the ClassType member has already been unlinked ! + ppNext = &( *ppNext )->pNext; + VULKAN_HPP_ASSERT( *ppNext != nullptr ); // fires, if the ClassType member has already been unlinked ! } - VULKAN_HPP_ASSERT(*ppNext == ptr); - *ppNext = (*ppNext)->pNext; + VULKAN_HPP_ASSERT( *ppNext == ptr ); + *ppNext = ( *ppNext )->pNext; } template void relink() VULKAN_HPP_NOEXCEPT { - static_assert(isPartOfStructureChain::valid, "Can't relink Structure that's not part of this StructureChain!"); - static_assert(!std::is_same>::type>::value, "It's not allowed to have the first element unlinked!"); - VkBaseOutStructure * ptr = reinterpret_cast(&get()); - VULKAN_HPP_ASSERT(ptr != nullptr); - VkBaseOutStructure ** ppNext = &(reinterpret_cast(this)->pNext); - VULKAN_HPP_ASSERT(*ppNext != nullptr); -#if !defined(NDEBUG) - while (*ppNext) + static_assert( isPartOfStructureChain::valid, + "Can't relink Structure that's not part of this StructureChain!" ); + static_assert( + !std::is_same>::type>::value, + "It's not allowed to have the first element unlinked!" ); + VkBaseOutStructure * ptr = reinterpret_cast( &get() ); + VULKAN_HPP_ASSERT( ptr != nullptr ); + VkBaseOutStructure ** ppNext = &( reinterpret_cast( this )->pNext ); + VULKAN_HPP_ASSERT( *ppNext != nullptr ); +#if !defined( NDEBUG ) + while ( *ppNext ) { - VULKAN_HPP_ASSERT(*ppNext != ptr); // fires, if the ClassType member has not been unlinked before - ppNext = &(*ppNext)->pNext; + VULKAN_HPP_ASSERT( *ppNext != ptr ); // fires, if the ClassType member has not been unlinked before + ppNext = &( *ppNext )->pNext; } - ppNext = &(reinterpret_cast(this)->pNext); + ppNext = &( reinterpret_cast( this )->pNext ); #endif ptr->pNext = *ppNext; - *ppNext = ptr; + *ppNext = ptr; } private: - template + template void link() VULKAN_HPP_NOEXCEPT { - static_assert(extendCheck::valid, "The structure chain is not valid!"); + static_assert( extendCheck::valid, "The structure chain is not valid!" ); } - template + template void link() VULKAN_HPP_NOEXCEPT { - static_assert(extendCheck::valid, "The structure chain is not valid!"); - X& x = static_cast(*this); - Y& y = static_cast(*this); + static_assert( extendCheck::valid, "The structure chain is not valid!" ); + X & x = static_cast( *this ); + Y & y = static_cast( *this ); x.pNext = &y; link, Y, Z...>(); } - template - void linkAndCopy(StructureChain const &rhs) VULKAN_HPP_NOEXCEPT + template + void linkAndCopy( StructureChain const & rhs ) VULKAN_HPP_NOEXCEPT { - static_assert(extendCheck::valid, "The structure chain is not valid!"); - static_cast(*this) = static_cast(rhs); + static_assert( extendCheck::valid, "The structure chain is not valid!" ); + static_cast( *this ) = static_cast( rhs ); } - template - void linkAndCopy(StructureChain const &rhs) VULKAN_HPP_NOEXCEPT + template + void linkAndCopy( StructureChain const & rhs ) VULKAN_HPP_NOEXCEPT { - static_assert(extendCheck::valid, "The structure chain is not valid!"); - X& x = static_cast(*this); - Y& y = static_cast(*this); - x = static_cast(rhs); + static_assert( extendCheck::valid, "The structure chain is not valid!" ); + X & x = static_cast( *this ); + Y & y = static_cast( *this ); + x = static_cast( rhs ); x.pNext = &y; - linkAndCopy, Y, Z...>(rhs); + linkAndCopy, Y, Z...>( rhs ); } - template - void linkAndCopyElements(X const &xelem) VULKAN_HPP_NOEXCEPT + template + void linkAndCopyElements( X const & xelem ) VULKAN_HPP_NOEXCEPT { - static_assert(extendCheck::valid, "The structure chain is not valid!"); - static_cast(*this) = xelem; + static_assert( extendCheck::valid, "The structure chain is not valid!" ); + static_cast( *this ) = xelem; } - template - void linkAndCopyElements(X const &xelem, Y const &yelem, Z const &... zelem) VULKAN_HPP_NOEXCEPT + template + void linkAndCopyElements( X const & xelem, Y const & yelem, Z const &... zelem ) VULKAN_HPP_NOEXCEPT { - static_assert(extendCheck::valid, "The structure chain is not valid!"); - X& x = static_cast(*this); - Y& y = static_cast(*this); - x = xelem; + static_assert( extendCheck::valid, "The structure chain is not valid!" ); + X & x = static_cast( *this ); + Y & y = static_cast( *this ); + x = xelem; x.pNext = &y; - linkAndCopyElements, Y, Z...>(yelem, zelem...); + linkAndCopyElements, Y, Z...>( yelem, zelem... ); } }; -#if !defined(VULKAN_HPP_NO_SMART_HANDLE) - template class UniqueHandleTraits; +#if !defined( VULKAN_HPP_NO_SMART_HANDLE ) + template + class UniqueHandleTraits; template - class UniqueHandle : public UniqueHandleTraits::deleter + class UniqueHandle : public UniqueHandleTraits::deleter { private: - using Deleter = typename UniqueHandleTraits::deleter; + using Deleter = typename UniqueHandleTraits::deleter; public: using element_type = Type; - UniqueHandle() - : Deleter() - , m_value() - {} + UniqueHandle() : Deleter(), m_value() {} - explicit UniqueHandle( Type const& value, Deleter const& deleter = Deleter() ) VULKAN_HPP_NOEXCEPT - : Deleter( deleter) + explicit UniqueHandle( Type const & value, Deleter const & deleter = Deleter() ) VULKAN_HPP_NOEXCEPT + : Deleter( deleter ) , m_value( value ) {} - UniqueHandle( UniqueHandle const& ) = delete; + UniqueHandle( UniqueHandle const & ) = delete; UniqueHandle( UniqueHandle && other ) VULKAN_HPP_NOEXCEPT - : Deleter( std::move( static_cast( other ) ) ) + : Deleter( std::move( static_cast( other ) ) ) , m_value( other.release() ) {} ~UniqueHandle() VULKAN_HPP_NOEXCEPT { - if ( m_value ) this->destroy( m_value ); + if ( m_value ) + this->destroy( m_value ); } - UniqueHandle & operator=( UniqueHandle const& ) = delete; + UniqueHandle & operator=( UniqueHandle const & ) = delete; UniqueHandle & operator=( UniqueHandle && other ) VULKAN_HPP_NOEXCEPT { reset( other.release() ); - *static_cast(this) = std::move( static_cast(other) ); + *static_cast( this ) = std::move( static_cast( other ) ); return *this; } @@ -814,7 +852,7 @@ namespace VULKAN_HPP_NAMESPACE return m_value.operator bool(); } - Type const* operator->() const VULKAN_HPP_NOEXCEPT + Type const * operator->() const VULKAN_HPP_NOEXCEPT { return &m_value; } @@ -824,7 +862,7 @@ namespace VULKAN_HPP_NAMESPACE return &m_value; } - Type const& operator*() const VULKAN_HPP_NOEXCEPT + Type const & operator*() const VULKAN_HPP_NOEXCEPT { return m_value; } @@ -844,11 +882,12 @@ namespace VULKAN_HPP_NAMESPACE return m_value; } - void reset( Type const& value = Type() ) VULKAN_HPP_NOEXCEPT + void reset( Type const & value = Type() ) VULKAN_HPP_NOEXCEPT { if ( m_value != value ) { - if ( m_value ) this->destroy( m_value ); + if ( m_value ) + this->destroy( m_value ); m_value = value; } } @@ -856,230 +895,372 @@ namespace VULKAN_HPP_NAMESPACE Type release() VULKAN_HPP_NOEXCEPT { Type value = m_value; - m_value = nullptr; + m_value = nullptr; return value; } - void swap( UniqueHandle & rhs ) VULKAN_HPP_NOEXCEPT + void swap( UniqueHandle & rhs ) VULKAN_HPP_NOEXCEPT { - std::swap(m_value, rhs.m_value); - std::swap(static_cast(*this), static_cast(rhs)); + std::swap( m_value, rhs.m_value ); + std::swap( static_cast( *this ), static_cast( rhs ) ); } private: - Type m_value; + Type m_value; }; template - VULKAN_HPP_INLINE std::vector uniqueToRaw(std::vector const& handles) + VULKAN_HPP_INLINE std::vector + uniqueToRaw( std::vector const & handles ) { - std::vector newBuffer(handles.size()); - std::transform(handles.begin(), handles.end(), newBuffer.begin(), [](UniqueType const& handle) { return handle.get(); }); + std::vector newBuffer( handles.size() ); + std::transform( + handles.begin(), handles.end(), newBuffer.begin(), []( UniqueType const & handle ) { return handle.get(); } ); return newBuffer; } template - VULKAN_HPP_INLINE void swap( UniqueHandle & lhs, UniqueHandle & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void swap( UniqueHandle & lhs, + UniqueHandle & rhs ) VULKAN_HPP_NOEXCEPT { lhs.swap( rhs ); } #endif -#if !defined(VK_NO_PROTOTYPES) +#if !defined( VK_NO_PROTOTYPES ) class DispatchLoaderStatic { public: - VkResult vkCreateInstance( const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance ) const VULKAN_HPP_NOEXCEPT + VkResult vkCreateInstance( const VkInstanceCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkInstance * pInstance ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateInstance( pCreateInfo, pAllocator, pInstance ); } - VkResult vkEnumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties ) const VULKAN_HPP_NOEXCEPT + VkResult vkEnumerateInstanceExtensionProperties( const char * pLayerName, + uint32_t * pPropertyCount, + VkExtensionProperties * pProperties ) const VULKAN_HPP_NOEXCEPT { return ::vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, pProperties ); } - VkResult vkEnumerateInstanceLayerProperties( uint32_t* pPropertyCount, VkLayerProperties* pProperties ) const VULKAN_HPP_NOEXCEPT + VkResult vkEnumerateInstanceLayerProperties( uint32_t * pPropertyCount, + VkLayerProperties * pProperties ) const VULKAN_HPP_NOEXCEPT { return ::vkEnumerateInstanceLayerProperties( pPropertyCount, pProperties ); } - VkResult vkEnumerateInstanceVersion( uint32_t* pApiVersion ) const VULKAN_HPP_NOEXCEPT + VkResult vkEnumerateInstanceVersion( uint32_t * pApiVersion ) const VULKAN_HPP_NOEXCEPT { return ::vkEnumerateInstanceVersion( pApiVersion ); } - VkResult vkBeginCommandBuffer( VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo ) const VULKAN_HPP_NOEXCEPT + VkResult vkBeginCommandBuffer( VkCommandBuffer commandBuffer, + const VkCommandBufferBeginInfo * pBeginInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkBeginCommandBuffer( commandBuffer, pBeginInfo ); } - void vkCmdBeginConditionalRenderingEXT( VkCommandBuffer commandBuffer, const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin ) const VULKAN_HPP_NOEXCEPT + void vkCmdBeginConditionalRenderingEXT( + VkCommandBuffer commandBuffer, + const VkConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdBeginConditionalRenderingEXT( commandBuffer, pConditionalRenderingBegin ); } - void vkCmdBeginDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo ) const VULKAN_HPP_NOEXCEPT + void vkCmdBeginDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, + const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdBeginDebugUtilsLabelEXT( commandBuffer, pLabelInfo ); } - void vkCmdBeginQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags ) const VULKAN_HPP_NOEXCEPT + void vkCmdBeginQuery( VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query, + VkQueryControlFlags flags ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdBeginQuery( commandBuffer, queryPool, query, flags ); } - void vkCmdBeginQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index ) const VULKAN_HPP_NOEXCEPT + void vkCmdBeginQueryIndexedEXT( VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query, + VkQueryControlFlags flags, + uint32_t index ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdBeginQueryIndexedEXT( commandBuffer, queryPool, query, flags, index ); } - void vkCmdBeginRenderPass( VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT + void vkCmdBeginRenderPass( VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo * pRenderPassBegin, + VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdBeginRenderPass( commandBuffer, pRenderPassBegin, contents ); } - void vkCmdBeginRenderPass2( VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfo* pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT + void vkCmdBeginRenderPass2( VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo * pRenderPassBegin, + const VkSubpassBeginInfo * pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdBeginRenderPass2( commandBuffer, pRenderPassBegin, pSubpassBeginInfo ); } - void vkCmdBeginRenderPass2KHR( VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfo* pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT + void vkCmdBeginRenderPass2KHR( VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo * pRenderPassBegin, + const VkSubpassBeginInfo * pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdBeginRenderPass2KHR( commandBuffer, pRenderPassBegin, pSubpassBeginInfo ); } - void vkCmdBeginTransformFeedbackEXT( VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT + void vkCmdBeginTransformFeedbackEXT( VkCommandBuffer commandBuffer, + uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VkBuffer * pCounterBuffers, + const VkDeviceSize * pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdBeginTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets ); + return ::vkCmdBeginTransformFeedbackEXT( + commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets ); } - void vkCmdBindDescriptorSets( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets ) const VULKAN_HPP_NOEXCEPT + void vkCmdBindDescriptorSets( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t firstSet, + uint32_t descriptorSetCount, + const VkDescriptorSet * pDescriptorSets, + uint32_t dynamicOffsetCount, + const uint32_t * pDynamicOffsets ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdBindDescriptorSets( commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets ); + return ::vkCmdBindDescriptorSets( commandBuffer, + pipelineBindPoint, + layout, + firstSet, + descriptorSetCount, + pDescriptorSets, + dynamicOffsetCount, + pDynamicOffsets ); } - void vkCmdBindIndexBuffer( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType ) const VULKAN_HPP_NOEXCEPT + void vkCmdBindIndexBuffer( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkIndexType indexType ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdBindIndexBuffer( commandBuffer, buffer, offset, indexType ); } - void vkCmdBindPipeline( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline ) const VULKAN_HPP_NOEXCEPT + void vkCmdBindPipeline( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipeline pipeline ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdBindPipeline( commandBuffer, pipelineBindPoint, pipeline ); } - void vkCmdBindPipelineShaderGroupNV( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline, uint32_t groupIndex ) const VULKAN_HPP_NOEXCEPT + void vkCmdBindPipelineShaderGroupNV( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipeline pipeline, + uint32_t groupIndex ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdBindPipelineShaderGroupNV( commandBuffer, pipelineBindPoint, pipeline, groupIndex ); } - void vkCmdBindShadingRateImageNV( VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT + void vkCmdBindShadingRateImageNV( VkCommandBuffer commandBuffer, + VkImageView imageView, + VkImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdBindShadingRateImageNV( commandBuffer, imageView, imageLayout ); } - void vkCmdBindTransformFeedbackBuffersEXT( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes ) const VULKAN_HPP_NOEXCEPT + void vkCmdBindTransformFeedbackBuffersEXT( VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer * pBuffers, + const VkDeviceSize * pOffsets, + const VkDeviceSize * pSizes ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdBindTransformFeedbackBuffersEXT( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes ); + return ::vkCmdBindTransformFeedbackBuffersEXT( + commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes ); } - void vkCmdBindVertexBuffers( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets ) const VULKAN_HPP_NOEXCEPT + void vkCmdBindVertexBuffers( VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer * pBuffers, + const VkDeviceSize * pOffsets ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdBindVertexBuffers( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets ); } - void vkCmdBlitImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter ) const VULKAN_HPP_NOEXCEPT + void vkCmdBlitImage( VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageBlit * pRegions, + VkFilter filter ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdBlitImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter ); + return ::vkCmdBlitImage( + commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter ); } -#ifdef VK_ENABLE_BETA_EXTENSIONS - void vkCmdBuildAccelerationStructureIndirectKHR( VkCommandBuffer commandBuffer, const VkAccelerationStructureBuildGeometryInfoKHR* pInfo, VkBuffer indirectBuffer, VkDeviceSize indirectOffset, uint32_t indirectStride ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_ENABLE_BETA_EXTENSIONS + void vkCmdBuildAccelerationStructureIndirectKHR( VkCommandBuffer commandBuffer, + const VkAccelerationStructureBuildGeometryInfoKHR * pInfo, + VkBuffer indirectBuffer, + VkDeviceSize indirectOffset, + uint32_t indirectStride ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdBuildAccelerationStructureIndirectKHR( commandBuffer, pInfo, indirectBuffer, indirectOffset, indirectStride ); + return ::vkCmdBuildAccelerationStructureIndirectKHR( + commandBuffer, pInfo, indirectBuffer, indirectOffset, indirectStride ); } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - void vkCmdBuildAccelerationStructureKHR( VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildOffsetInfoKHR* const* ppOffsetInfos ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_ENABLE_BETA_EXTENSIONS + void vkCmdBuildAccelerationStructureKHR( + VkCommandBuffer commandBuffer, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, + const VkAccelerationStructureBuildOffsetInfoKHR * const * ppOffsetInfos ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdBuildAccelerationStructureKHR( commandBuffer, infoCount, pInfos, ppOffsetInfos ); } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ - void vkCmdBuildAccelerationStructureNV( VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureKHR dst, VkAccelerationStructureKHR src, VkBuffer scratch, VkDeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT + void vkCmdBuildAccelerationStructureNV( VkCommandBuffer commandBuffer, + const VkAccelerationStructureInfoNV * pInfo, + VkBuffer instanceData, + VkDeviceSize instanceOffset, + VkBool32 update, + VkAccelerationStructureKHR dst, + VkAccelerationStructureKHR src, + VkBuffer scratch, + VkDeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdBuildAccelerationStructureNV( commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset ); + return ::vkCmdBuildAccelerationStructureNV( + commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset ); } - void vkCmdClearAttachments( VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects ) const VULKAN_HPP_NOEXCEPT + void vkCmdClearAttachments( VkCommandBuffer commandBuffer, + uint32_t attachmentCount, + const VkClearAttachment * pAttachments, + uint32_t rectCount, + const VkClearRect * pRects ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdClearAttachments( commandBuffer, attachmentCount, pAttachments, rectCount, pRects ); } - void vkCmdClearColorImage( VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges ) const VULKAN_HPP_NOEXCEPT + void vkCmdClearColorImage( VkCommandBuffer commandBuffer, + VkImage image, + VkImageLayout imageLayout, + const VkClearColorValue * pColor, + uint32_t rangeCount, + const VkImageSubresourceRange * pRanges ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdClearColorImage( commandBuffer, image, imageLayout, pColor, rangeCount, pRanges ); } - void vkCmdClearDepthStencilImage( VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges ) const VULKAN_HPP_NOEXCEPT + void vkCmdClearDepthStencilImage( VkCommandBuffer commandBuffer, + VkImage image, + VkImageLayout imageLayout, + const VkClearDepthStencilValue * pDepthStencil, + uint32_t rangeCount, + const VkImageSubresourceRange * pRanges ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdClearDepthStencilImage( commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges ); } -#ifdef VK_ENABLE_BETA_EXTENSIONS - void vkCmdCopyAccelerationStructureKHR( VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_ENABLE_BETA_EXTENSIONS + void vkCmdCopyAccelerationStructureKHR( VkCommandBuffer commandBuffer, + const VkCopyAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdCopyAccelerationStructureKHR( commandBuffer, pInfo ); } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ - void vkCmdCopyAccelerationStructureNV( VkCommandBuffer commandBuffer, VkAccelerationStructureKHR dst, VkAccelerationStructureKHR src, VkCopyAccelerationStructureModeKHR mode ) const VULKAN_HPP_NOEXCEPT + void vkCmdCopyAccelerationStructureNV( VkCommandBuffer commandBuffer, + VkAccelerationStructureKHR dst, + VkAccelerationStructureKHR src, + VkCopyAccelerationStructureModeKHR mode ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdCopyAccelerationStructureNV( commandBuffer, dst, src, mode ); } -#ifdef VK_ENABLE_BETA_EXTENSIONS - void vkCmdCopyAccelerationStructureToMemoryKHR( VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_ENABLE_BETA_EXTENSIONS + void vkCmdCopyAccelerationStructureToMemoryKHR( VkCommandBuffer commandBuffer, + const VkCopyAccelerationStructureToMemoryInfoKHR * pInfo ) const + VULKAN_HPP_NOEXCEPT { return ::vkCmdCopyAccelerationStructureToMemoryKHR( commandBuffer, pInfo ); } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ - void vkCmdCopyBuffer( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy* pRegions ) const VULKAN_HPP_NOEXCEPT + void vkCmdCopyBuffer( VkCommandBuffer commandBuffer, + VkBuffer srcBuffer, + VkBuffer dstBuffer, + uint32_t regionCount, + const VkBufferCopy * pRegions ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdCopyBuffer( commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions ); } - void vkCmdCopyBufferToImage( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions ) const VULKAN_HPP_NOEXCEPT + void vkCmdCopyBufferToImage( VkCommandBuffer commandBuffer, + VkBuffer srcBuffer, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkBufferImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdCopyBufferToImage( commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions ); } - void vkCmdCopyImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy* pRegions ) const VULKAN_HPP_NOEXCEPT + void vkCmdCopyImage( VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdCopyImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); + return ::vkCmdCopyImage( + commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); } - void vkCmdCopyImageToBuffer( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions ) const VULKAN_HPP_NOEXCEPT + void vkCmdCopyImageToBuffer( VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkBuffer dstBuffer, + uint32_t regionCount, + const VkBufferImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdCopyImageToBuffer( commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions ); } -#ifdef VK_ENABLE_BETA_EXTENSIONS - void vkCmdCopyMemoryToAccelerationStructureKHR( VkCommandBuffer commandBuffer, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_ENABLE_BETA_EXTENSIONS + void vkCmdCopyMemoryToAccelerationStructureKHR( VkCommandBuffer commandBuffer, + const VkCopyMemoryToAccelerationStructureInfoKHR * pInfo ) const + VULKAN_HPP_NOEXCEPT { return ::vkCmdCopyMemoryToAccelerationStructureKHR( commandBuffer, pInfo ); } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ - void vkCmdCopyQueryPoolResults( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT + void vkCmdCopyQueryPoolResults( VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + VkDeviceSize stride, + VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdCopyQueryPoolResults( commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags ); + return ::vkCmdCopyQueryPoolResults( + commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags ); } - void vkCmdDebugMarkerBeginEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo ) const VULKAN_HPP_NOEXCEPT + void vkCmdDebugMarkerBeginEXT( VkCommandBuffer commandBuffer, + const VkDebugMarkerMarkerInfoEXT * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdDebugMarkerBeginEXT( commandBuffer, pMarkerInfo ); } @@ -1089,97 +1270,196 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdDebugMarkerEndEXT( commandBuffer ); } - void vkCmdDebugMarkerInsertEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo ) const VULKAN_HPP_NOEXCEPT + void vkCmdDebugMarkerInsertEXT( VkCommandBuffer commandBuffer, + const VkDebugMarkerMarkerInfoEXT * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdDebugMarkerInsertEXT( commandBuffer, pMarkerInfo ); } - void vkCmdDispatch( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + void vkCmdDispatch( VkCommandBuffer commandBuffer, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdDispatch( commandBuffer, groupCountX, groupCountY, groupCountZ ); } - void vkCmdDispatchBase( VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + void vkCmdDispatchBase( VkCommandBuffer commandBuffer, + uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdDispatchBase( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + return ::vkCmdDispatchBase( + commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); } - void vkCmdDispatchBaseKHR( VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + void vkCmdDispatchBaseKHR( VkCommandBuffer commandBuffer, + uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdDispatchBaseKHR( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + return ::vkCmdDispatchBaseKHR( + commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); } - void vkCmdDispatchIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset ) const VULKAN_HPP_NOEXCEPT + void vkCmdDispatchIndirect( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdDispatchIndirect( commandBuffer, buffer, offset ); } - void vkCmdDraw( VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT + void vkCmdDraw( VkCommandBuffer commandBuffer, + uint32_t vertexCount, + uint32_t instanceCount, + uint32_t firstVertex, + uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdDraw( commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance ); } - void vkCmdDrawIndexed( VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT + void vkCmdDrawIndexed( VkCommandBuffer commandBuffer, + uint32_t indexCount, + uint32_t instanceCount, + uint32_t firstIndex, + int32_t vertexOffset, + uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdDrawIndexed( commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); } - void vkCmdDrawIndexedIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT + void vkCmdDrawIndexedIndirect( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + uint32_t drawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdDrawIndexedIndirect( commandBuffer, buffer, offset, drawCount, stride ); } - void vkCmdDrawIndexedIndirectCount( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT + void vkCmdDrawIndexedIndirectCount( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdDrawIndexedIndirectCount( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + return ::vkCmdDrawIndexedIndirectCount( + commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); } - void vkCmdDrawIndexedIndirectCountAMD( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT + void vkCmdDrawIndexedIndirectCountAMD( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdDrawIndexedIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + return ::vkCmdDrawIndexedIndirectCountAMD( + commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); } - void vkCmdDrawIndexedIndirectCountKHR( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT + void vkCmdDrawIndexedIndirectCountKHR( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdDrawIndexedIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + return ::vkCmdDrawIndexedIndirectCountKHR( + commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); } - void vkCmdDrawIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT + void vkCmdDrawIndirect( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + uint32_t drawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdDrawIndirect( commandBuffer, buffer, offset, drawCount, stride ); } - void vkCmdDrawIndirectByteCountEXT( VkCommandBuffer commandBuffer, uint32_t instanceCount, uint32_t firstInstance, VkBuffer counterBuffer, VkDeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride ) const VULKAN_HPP_NOEXCEPT + void vkCmdDrawIndirectByteCountEXT( VkCommandBuffer commandBuffer, + uint32_t instanceCount, + uint32_t firstInstance, + VkBuffer counterBuffer, + VkDeviceSize counterBufferOffset, + uint32_t counterOffset, + uint32_t vertexStride ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdDrawIndirectByteCountEXT( commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride ); + return ::vkCmdDrawIndirectByteCountEXT( + commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride ); } - void vkCmdDrawIndirectCount( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT + void vkCmdDrawIndirectCount( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdDrawIndirectCount( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + return ::vkCmdDrawIndirectCount( + commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); } - void vkCmdDrawIndirectCountAMD( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT + void vkCmdDrawIndirectCountAMD( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdDrawIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + return ::vkCmdDrawIndirectCountAMD( + commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); } - void vkCmdDrawIndirectCountKHR( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT + void vkCmdDrawIndirectCountKHR( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdDrawIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + return ::vkCmdDrawIndirectCountKHR( + commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); } - void vkCmdDrawMeshTasksIndirectCountNV( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT + void vkCmdDrawMeshTasksIndirectCountNV( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdDrawMeshTasksIndirectCountNV( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + return ::vkCmdDrawMeshTasksIndirectCountNV( + commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); } - void vkCmdDrawMeshTasksIndirectNV( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT + void vkCmdDrawMeshTasksIndirectNV( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + uint32_t drawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdDrawMeshTasksIndirectNV( commandBuffer, buffer, offset, drawCount, stride ); } - void vkCmdDrawMeshTasksNV( VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask ) const VULKAN_HPP_NOEXCEPT + void vkCmdDrawMeshTasksNV( VkCommandBuffer commandBuffer, + uint32_t taskCount, + uint32_t firstTask ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdDrawMeshTasksNV( commandBuffer, taskCount, firstTask ); } @@ -1199,7 +1479,10 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdEndQuery( commandBuffer, queryPool, query ); } - void vkCmdEndQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index ) const VULKAN_HPP_NOEXCEPT + void vkCmdEndQueryIndexedEXT( VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query, + uint32_t index ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdEndQueryIndexedEXT( commandBuffer, queryPool, query, index ); } @@ -1209,37 +1492,54 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdEndRenderPass( commandBuffer ); } - void vkCmdEndRenderPass2( VkCommandBuffer commandBuffer, const VkSubpassEndInfo* pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT + void vkCmdEndRenderPass2( VkCommandBuffer commandBuffer, + const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdEndRenderPass2( commandBuffer, pSubpassEndInfo ); } - void vkCmdEndRenderPass2KHR( VkCommandBuffer commandBuffer, const VkSubpassEndInfo* pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT + void vkCmdEndRenderPass2KHR( VkCommandBuffer commandBuffer, + const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdEndRenderPass2KHR( commandBuffer, pSubpassEndInfo ); } - void vkCmdEndTransformFeedbackEXT( VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT + void vkCmdEndTransformFeedbackEXT( VkCommandBuffer commandBuffer, + uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VkBuffer * pCounterBuffers, + const VkDeviceSize * pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdEndTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets ); + return ::vkCmdEndTransformFeedbackEXT( + commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets ); } - void vkCmdExecuteCommands( VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers ) const VULKAN_HPP_NOEXCEPT + void vkCmdExecuteCommands( VkCommandBuffer commandBuffer, + uint32_t commandBufferCount, + const VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdExecuteCommands( commandBuffer, commandBufferCount, pCommandBuffers ); } - void vkCmdExecuteGeneratedCommandsNV( VkCommandBuffer commandBuffer, VkBool32 isPreprocessed, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT + void vkCmdExecuteGeneratedCommandsNV( VkCommandBuffer commandBuffer, + VkBool32 isPreprocessed, + const VkGeneratedCommandsInfoNV * pGeneratedCommandsInfo ) const + VULKAN_HPP_NOEXCEPT { return ::vkCmdExecuteGeneratedCommandsNV( commandBuffer, isPreprocessed, pGeneratedCommandsInfo ); } - void vkCmdFillBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data ) const VULKAN_HPP_NOEXCEPT + void vkCmdFillBuffer( VkCommandBuffer commandBuffer, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + VkDeviceSize size, + uint32_t data ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdFillBuffer( commandBuffer, dstBuffer, dstOffset, size, data ); } - void vkCmdInsertDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo ) const VULKAN_HPP_NOEXCEPT + void vkCmdInsertDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, + const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdInsertDebugUtilsLabelEXT( commandBuffer, pLabelInfo ); } @@ -1249,77 +1549,139 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdNextSubpass( commandBuffer, contents ); } - void vkCmdNextSubpass2( VkCommandBuffer commandBuffer, const VkSubpassBeginInfo* pSubpassBeginInfo, const VkSubpassEndInfo* pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT + void vkCmdNextSubpass2( VkCommandBuffer commandBuffer, + const VkSubpassBeginInfo * pSubpassBeginInfo, + const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdNextSubpass2( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo ); } - void vkCmdNextSubpass2KHR( VkCommandBuffer commandBuffer, const VkSubpassBeginInfo* pSubpassBeginInfo, const VkSubpassEndInfo* pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT + void vkCmdNextSubpass2KHR( VkCommandBuffer commandBuffer, + const VkSubpassBeginInfo * pSubpassBeginInfo, + const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdNextSubpass2KHR( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo ); } - void vkCmdPipelineBarrier( VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT + void vkCmdPipelineBarrier( VkCommandBuffer commandBuffer, + VkPipelineStageFlags srcStageMask, + VkPipelineStageFlags dstStageMask, + VkDependencyFlags dependencyFlags, + uint32_t memoryBarrierCount, + const VkMemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VkBufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VkImageMemoryBarrier * pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdPipelineBarrier( commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers ); + return ::vkCmdPipelineBarrier( commandBuffer, + srcStageMask, + dstStageMask, + dependencyFlags, + memoryBarrierCount, + pMemoryBarriers, + bufferMemoryBarrierCount, + pBufferMemoryBarriers, + imageMemoryBarrierCount, + pImageMemoryBarriers ); } - void vkCmdPreprocessGeneratedCommandsNV( VkCommandBuffer commandBuffer, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT + void vkCmdPreprocessGeneratedCommandsNV( VkCommandBuffer commandBuffer, + const VkGeneratedCommandsInfoNV * pGeneratedCommandsInfo ) const + VULKAN_HPP_NOEXCEPT { return ::vkCmdPreprocessGeneratedCommandsNV( commandBuffer, pGeneratedCommandsInfo ); } - void vkCmdPushConstants( VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues ) const VULKAN_HPP_NOEXCEPT + void vkCmdPushConstants( VkCommandBuffer commandBuffer, + VkPipelineLayout layout, + VkShaderStageFlags stageFlags, + uint32_t offset, + uint32_t size, + const void * pValues ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdPushConstants( commandBuffer, layout, stageFlags, offset, size, pValues ); } - void vkCmdPushDescriptorSetKHR( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites ) const VULKAN_HPP_NOEXCEPT + void vkCmdPushDescriptorSetKHR( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VkWriteDescriptorSet * pDescriptorWrites ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdPushDescriptorSetKHR( commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites ); + return ::vkCmdPushDescriptorSetKHR( + commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites ); } - void vkCmdPushDescriptorSetWithTemplateKHR( VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData ) const VULKAN_HPP_NOEXCEPT + void vkCmdPushDescriptorSetWithTemplateKHR( VkCommandBuffer commandBuffer, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + VkPipelineLayout layout, + uint32_t set, + const void * pData ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdPushDescriptorSetWithTemplateKHR( commandBuffer, descriptorUpdateTemplate, layout, set, pData ); } - void vkCmdResetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT + void vkCmdResetEvent( VkCommandBuffer commandBuffer, + VkEvent event, + VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdResetEvent( commandBuffer, event, stageMask ); } - void vkCmdResetQueryPool( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT + void vkCmdResetQueryPool( VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdResetQueryPool( commandBuffer, queryPool, firstQuery, queryCount ); } - void vkCmdResolveImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve* pRegions ) const VULKAN_HPP_NOEXCEPT + void vkCmdResolveImage( VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageResolve * pRegions ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdResolveImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); + return ::vkCmdResolveImage( + commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); } - void vkCmdSetBlendConstants( VkCommandBuffer commandBuffer, const float blendConstants[4] ) const VULKAN_HPP_NOEXCEPT + void vkCmdSetBlendConstants( VkCommandBuffer commandBuffer, + const float blendConstants[4] ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetBlendConstants( commandBuffer, blendConstants ); } - void vkCmdSetCheckpointNV( VkCommandBuffer commandBuffer, const void* pCheckpointMarker ) const VULKAN_HPP_NOEXCEPT + void vkCmdSetCheckpointNV( VkCommandBuffer commandBuffer, const void * pCheckpointMarker ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetCheckpointNV( commandBuffer, pCheckpointMarker ); } - void vkCmdSetCoarseSampleOrderNV( VkCommandBuffer commandBuffer, VkCoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VkCoarseSampleOrderCustomNV* pCustomSampleOrders ) const VULKAN_HPP_NOEXCEPT + void + vkCmdSetCoarseSampleOrderNV( VkCommandBuffer commandBuffer, + VkCoarseSampleOrderTypeNV sampleOrderType, + uint32_t customSampleOrderCount, + const VkCoarseSampleOrderCustomNV * pCustomSampleOrders ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdSetCoarseSampleOrderNV( commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders ); + return ::vkCmdSetCoarseSampleOrderNV( + commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders ); } - void vkCmdSetDepthBias( VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor ) const VULKAN_HPP_NOEXCEPT + void vkCmdSetDepthBias( VkCommandBuffer commandBuffer, + float depthBiasConstantFactor, + float depthBiasClamp, + float depthBiasSlopeFactor ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetDepthBias( commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); } - void vkCmdSetDepthBounds( VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds ) const VULKAN_HPP_NOEXCEPT + void vkCmdSetDepthBounds( VkCommandBuffer commandBuffer, + float minDepthBounds, + float maxDepthBounds ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetDepthBounds( commandBuffer, minDepthBounds, maxDepthBounds ); } @@ -1334,22 +1696,34 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdSetDeviceMaskKHR( commandBuffer, deviceMask ); } - void vkCmdSetDiscardRectangleEXT( VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D* pDiscardRectangles ) const VULKAN_HPP_NOEXCEPT + void vkCmdSetDiscardRectangleEXT( VkCommandBuffer commandBuffer, + uint32_t firstDiscardRectangle, + uint32_t discardRectangleCount, + const VkRect2D * pDiscardRectangles ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdSetDiscardRectangleEXT( commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles ); + return ::vkCmdSetDiscardRectangleEXT( + commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles ); } - void vkCmdSetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT + void vkCmdSetEvent( VkCommandBuffer commandBuffer, + VkEvent event, + VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetEvent( commandBuffer, event, stageMask ); } - void vkCmdSetExclusiveScissorNV( VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VkRect2D* pExclusiveScissors ) const VULKAN_HPP_NOEXCEPT + void vkCmdSetExclusiveScissorNV( VkCommandBuffer commandBuffer, + uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VkRect2D * pExclusiveScissors ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdSetExclusiveScissorNV( commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors ); + return ::vkCmdSetExclusiveScissorNV( + commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors ); } - void vkCmdSetLineStippleEXT( VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT + void vkCmdSetLineStippleEXT( VkCommandBuffer commandBuffer, + uint32_t lineStippleFactor, + uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetLineStippleEXT( commandBuffer, lineStippleFactor, lineStipplePattern ); } @@ -1359,106 +1733,229 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdSetLineWidth( commandBuffer, lineWidth ); } - VkResult vkCmdSetPerformanceMarkerINTEL( VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL* pMarkerInfo ) const VULKAN_HPP_NOEXCEPT + VkResult + vkCmdSetPerformanceMarkerINTEL( VkCommandBuffer commandBuffer, + const VkPerformanceMarkerInfoINTEL * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetPerformanceMarkerINTEL( commandBuffer, pMarkerInfo ); } - VkResult vkCmdSetPerformanceOverrideINTEL( VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL* pOverrideInfo ) const VULKAN_HPP_NOEXCEPT + VkResult + vkCmdSetPerformanceOverrideINTEL( VkCommandBuffer commandBuffer, + const VkPerformanceOverrideInfoINTEL * pOverrideInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetPerformanceOverrideINTEL( commandBuffer, pOverrideInfo ); } - VkResult vkCmdSetPerformanceStreamMarkerINTEL( VkCommandBuffer commandBuffer, const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo ) const VULKAN_HPP_NOEXCEPT + VkResult vkCmdSetPerformanceStreamMarkerINTEL( + VkCommandBuffer commandBuffer, const VkPerformanceStreamMarkerInfoINTEL * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetPerformanceStreamMarkerINTEL( commandBuffer, pMarkerInfo ); } - void vkCmdSetSampleLocationsEXT( VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT* pSampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT + void vkCmdSetSampleLocationsEXT( VkCommandBuffer commandBuffer, + const VkSampleLocationsInfoEXT * pSampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetSampleLocationsEXT( commandBuffer, pSampleLocationsInfo ); } - void vkCmdSetScissor( VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors ) const VULKAN_HPP_NOEXCEPT + void vkCmdSetScissor( VkCommandBuffer commandBuffer, + uint32_t firstScissor, + uint32_t scissorCount, + const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetScissor( commandBuffer, firstScissor, scissorCount, pScissors ); } - void vkCmdSetStencilCompareMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask ) const VULKAN_HPP_NOEXCEPT + void vkCmdSetStencilCompareMask( VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + uint32_t compareMask ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetStencilCompareMask( commandBuffer, faceMask, compareMask ); } - void vkCmdSetStencilReference( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference ) const VULKAN_HPP_NOEXCEPT + void vkCmdSetStencilReference( VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + uint32_t reference ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetStencilReference( commandBuffer, faceMask, reference ); } - void vkCmdSetStencilWriteMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask ) const VULKAN_HPP_NOEXCEPT + void vkCmdSetStencilWriteMask( VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + uint32_t writeMask ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetStencilWriteMask( commandBuffer, faceMask, writeMask ); } - void vkCmdSetViewport( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports ) const VULKAN_HPP_NOEXCEPT + void vkCmdSetViewport( VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetViewport( commandBuffer, firstViewport, viewportCount, pViewports ); } - void vkCmdSetViewportShadingRatePaletteNV( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkShadingRatePaletteNV* pShadingRatePalettes ) const VULKAN_HPP_NOEXCEPT + void vkCmdSetViewportShadingRatePaletteNV( VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkShadingRatePaletteNV * pShadingRatePalettes ) const + VULKAN_HPP_NOEXCEPT { - return ::vkCmdSetViewportShadingRatePaletteNV( commandBuffer, firstViewport, viewportCount, pShadingRatePalettes ); + return ::vkCmdSetViewportShadingRatePaletteNV( + commandBuffer, firstViewport, viewportCount, pShadingRatePalettes ); } - void vkCmdSetViewportWScalingNV( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV* pViewportWScalings ) const VULKAN_HPP_NOEXCEPT + void vkCmdSetViewportWScalingNV( VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewportWScalingNV * pViewportWScalings ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetViewportWScalingNV( commandBuffer, firstViewport, viewportCount, pViewportWScalings ); } -#ifdef VK_ENABLE_BETA_EXTENSIONS - void vkCmdTraceRaysIndirectKHR( VkCommandBuffer commandBuffer, const VkStridedBufferRegionKHR* pRaygenShaderBindingTable, const VkStridedBufferRegionKHR* pMissShaderBindingTable, const VkStridedBufferRegionKHR* pHitShaderBindingTable, const VkStridedBufferRegionKHR* pCallableShaderBindingTable, VkBuffer buffer, VkDeviceSize offset ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_ENABLE_BETA_EXTENSIONS + void vkCmdTraceRaysIndirectKHR( VkCommandBuffer commandBuffer, + const VkStridedBufferRegionKHR * pRaygenShaderBindingTable, + const VkStridedBufferRegionKHR * pMissShaderBindingTable, + const VkStridedBufferRegionKHR * pHitShaderBindingTable, + const VkStridedBufferRegionKHR * pCallableShaderBindingTable, + VkBuffer buffer, + VkDeviceSize offset ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdTraceRaysIndirectKHR( commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, buffer, offset ); + return ::vkCmdTraceRaysIndirectKHR( commandBuffer, + pRaygenShaderBindingTable, + pMissShaderBindingTable, + pHitShaderBindingTable, + pCallableShaderBindingTable, + buffer, + offset ); } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - void vkCmdTraceRaysKHR( VkCommandBuffer commandBuffer, const VkStridedBufferRegionKHR* pRaygenShaderBindingTable, const VkStridedBufferRegionKHR* pMissShaderBindingTable, const VkStridedBufferRegionKHR* pHitShaderBindingTable, const VkStridedBufferRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_ENABLE_BETA_EXTENSIONS + void vkCmdTraceRaysKHR( VkCommandBuffer commandBuffer, + const VkStridedBufferRegionKHR * pRaygenShaderBindingTable, + const VkStridedBufferRegionKHR * pMissShaderBindingTable, + const VkStridedBufferRegionKHR * pHitShaderBindingTable, + const VkStridedBufferRegionKHR * pCallableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdTraceRaysKHR( commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, width, height, depth ); + return ::vkCmdTraceRaysKHR( commandBuffer, + pRaygenShaderBindingTable, + pMissShaderBindingTable, + pHitShaderBindingTable, + pCallableShaderBindingTable, + width, + height, + depth ); } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ - void vkCmdTraceRaysNV( VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth ) const VULKAN_HPP_NOEXCEPT + void vkCmdTraceRaysNV( VkCommandBuffer commandBuffer, + VkBuffer raygenShaderBindingTableBuffer, + VkDeviceSize raygenShaderBindingOffset, + VkBuffer missShaderBindingTableBuffer, + VkDeviceSize missShaderBindingOffset, + VkDeviceSize missShaderBindingStride, + VkBuffer hitShaderBindingTableBuffer, + VkDeviceSize hitShaderBindingOffset, + VkDeviceSize hitShaderBindingStride, + VkBuffer callableShaderBindingTableBuffer, + VkDeviceSize callableShaderBindingOffset, + VkDeviceSize callableShaderBindingStride, + uint32_t width, + uint32_t height, + uint32_t depth ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdTraceRaysNV( commandBuffer, raygenShaderBindingTableBuffer, raygenShaderBindingOffset, missShaderBindingTableBuffer, missShaderBindingOffset, missShaderBindingStride, hitShaderBindingTableBuffer, hitShaderBindingOffset, hitShaderBindingStride, callableShaderBindingTableBuffer, callableShaderBindingOffset, callableShaderBindingStride, width, height, depth ); + return ::vkCmdTraceRaysNV( commandBuffer, + raygenShaderBindingTableBuffer, + raygenShaderBindingOffset, + missShaderBindingTableBuffer, + missShaderBindingOffset, + missShaderBindingStride, + hitShaderBindingTableBuffer, + hitShaderBindingOffset, + hitShaderBindingStride, + callableShaderBindingTableBuffer, + callableShaderBindingOffset, + callableShaderBindingStride, + width, + height, + depth ); } - void vkCmdUpdateBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void* pData ) const VULKAN_HPP_NOEXCEPT + void vkCmdUpdateBuffer( VkCommandBuffer commandBuffer, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + VkDeviceSize dataSize, + const void * pData ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdUpdateBuffer( commandBuffer, dstBuffer, dstOffset, dataSize, pData ); } - void vkCmdWaitEvents( VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT + void vkCmdWaitEvents( VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent * pEvents, + VkPipelineStageFlags srcStageMask, + VkPipelineStageFlags dstStageMask, + uint32_t memoryBarrierCount, + const VkMemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VkBufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VkImageMemoryBarrier * pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdWaitEvents( commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers ); + return ::vkCmdWaitEvents( commandBuffer, + eventCount, + pEvents, + srcStageMask, + dstStageMask, + memoryBarrierCount, + pMemoryBarriers, + bufferMemoryBarrierCount, + pBufferMemoryBarriers, + imageMemoryBarrierCount, + pImageMemoryBarriers ); } - void vkCmdWriteAccelerationStructuresPropertiesKHR( VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT + void vkCmdWriteAccelerationStructuresPropertiesKHR( VkCommandBuffer commandBuffer, + uint32_t accelerationStructureCount, + const VkAccelerationStructureKHR * pAccelerationStructures, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdWriteAccelerationStructuresPropertiesKHR( commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery ); + return ::vkCmdWriteAccelerationStructuresPropertiesKHR( + commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery ); } - void vkCmdWriteAccelerationStructuresPropertiesNV( VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT + void vkCmdWriteAccelerationStructuresPropertiesNV( VkCommandBuffer commandBuffer, + uint32_t accelerationStructureCount, + const VkAccelerationStructureKHR * pAccelerationStructures, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdWriteAccelerationStructuresPropertiesNV( commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery ); + return ::vkCmdWriteAccelerationStructuresPropertiesNV( + commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery ); } - void vkCmdWriteBufferMarkerAMD( VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker ) const VULKAN_HPP_NOEXCEPT + void vkCmdWriteBufferMarkerAMD( VkCommandBuffer commandBuffer, + VkPipelineStageFlagBits pipelineStage, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + uint32_t marker ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdWriteBufferMarkerAMD( commandBuffer, pipelineStage, dstBuffer, dstOffset, marker ); } - void vkCmdWriteTimestamp( VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT + void vkCmdWriteTimestamp( VkCommandBuffer commandBuffer, + VkPipelineStageFlagBits pipelineStage, + VkQueryPool queryPool, + uint32_t query ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdWriteTimestamp( commandBuffer, pipelineStage, queryPool, query ); } @@ -1468,462 +1965,685 @@ namespace VULKAN_HPP_NAMESPACE return ::vkEndCommandBuffer( commandBuffer ); } - VkResult vkResetCommandBuffer( VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags ) const VULKAN_HPP_NOEXCEPT + VkResult vkResetCommandBuffer( VkCommandBuffer commandBuffer, + VkCommandBufferResetFlags flags ) const VULKAN_HPP_NOEXCEPT { return ::vkResetCommandBuffer( commandBuffer, flags ); } -#ifdef VK_USE_PLATFORM_WIN32_KHR +# ifdef VK_USE_PLATFORM_WIN32_KHR VkResult vkAcquireFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT { return ::vkAcquireFullScreenExclusiveModeEXT( device, swapchain ); } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - VkResult vkAcquireNextImage2KHR( VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex ) const VULKAN_HPP_NOEXCEPT + VkResult vkAcquireNextImage2KHR( VkDevice device, + const VkAcquireNextImageInfoKHR * pAcquireInfo, + uint32_t * pImageIndex ) const VULKAN_HPP_NOEXCEPT { return ::vkAcquireNextImage2KHR( device, pAcquireInfo, pImageIndex ); } - VkResult vkAcquireNextImageKHR( VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex ) const VULKAN_HPP_NOEXCEPT + VkResult vkAcquireNextImageKHR( VkDevice device, + VkSwapchainKHR swapchain, + uint64_t timeout, + VkSemaphore semaphore, + VkFence fence, + uint32_t * pImageIndex ) const VULKAN_HPP_NOEXCEPT { return ::vkAcquireNextImageKHR( device, swapchain, timeout, semaphore, fence, pImageIndex ); } - VkResult vkAcquirePerformanceConfigurationINTEL( VkDevice device, const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VkPerformanceConfigurationINTEL* pConfiguration ) const VULKAN_HPP_NOEXCEPT + VkResult vkAcquirePerformanceConfigurationINTEL( VkDevice device, + const VkPerformanceConfigurationAcquireInfoINTEL * pAcquireInfo, + VkPerformanceConfigurationINTEL * pConfiguration ) const + VULKAN_HPP_NOEXCEPT { return ::vkAcquirePerformanceConfigurationINTEL( device, pAcquireInfo, pConfiguration ); } - VkResult vkAcquireProfilingLockKHR( VkDevice device, const VkAcquireProfilingLockInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT + VkResult vkAcquireProfilingLockKHR( VkDevice device, + const VkAcquireProfilingLockInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkAcquireProfilingLockKHR( device, pInfo ); } - VkResult vkAllocateCommandBuffers( VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers ) const VULKAN_HPP_NOEXCEPT + VkResult vkAllocateCommandBuffers( VkDevice device, + const VkCommandBufferAllocateInfo * pAllocateInfo, + VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT { return ::vkAllocateCommandBuffers( device, pAllocateInfo, pCommandBuffers ); } - VkResult vkAllocateDescriptorSets( VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets ) const VULKAN_HPP_NOEXCEPT + VkResult vkAllocateDescriptorSets( VkDevice device, + const VkDescriptorSetAllocateInfo * pAllocateInfo, + VkDescriptorSet * pDescriptorSets ) const VULKAN_HPP_NOEXCEPT { return ::vkAllocateDescriptorSets( device, pAllocateInfo, pDescriptorSets ); } - VkResult vkAllocateMemory( VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory ) const VULKAN_HPP_NOEXCEPT + VkResult vkAllocateMemory( VkDevice device, + const VkMemoryAllocateInfo * pAllocateInfo, + const VkAllocationCallbacks * pAllocator, + VkDeviceMemory * pMemory ) const VULKAN_HPP_NOEXCEPT { return ::vkAllocateMemory( device, pAllocateInfo, pAllocator, pMemory ); } - VkResult vkBindAccelerationStructureMemoryKHR( VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR* pBindInfos ) const VULKAN_HPP_NOEXCEPT + VkResult vkBindAccelerationStructureMemoryKHR( VkDevice device, + uint32_t bindInfoCount, + const VkBindAccelerationStructureMemoryInfoKHR * pBindInfos ) const + VULKAN_HPP_NOEXCEPT { return ::vkBindAccelerationStructureMemoryKHR( device, bindInfoCount, pBindInfos ); } - VkResult vkBindAccelerationStructureMemoryNV( VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR* pBindInfos ) const VULKAN_HPP_NOEXCEPT + VkResult vkBindAccelerationStructureMemoryNV( VkDevice device, + uint32_t bindInfoCount, + const VkBindAccelerationStructureMemoryInfoKHR * pBindInfos ) const + VULKAN_HPP_NOEXCEPT { return ::vkBindAccelerationStructureMemoryNV( device, bindInfoCount, pBindInfos ); } - VkResult vkBindBufferMemory( VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT + VkResult vkBindBufferMemory( VkDevice device, + VkBuffer buffer, + VkDeviceMemory memory, + VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT { return ::vkBindBufferMemory( device, buffer, memory, memoryOffset ); } - VkResult vkBindBufferMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos ) const VULKAN_HPP_NOEXCEPT + VkResult vkBindBufferMemory2( VkDevice device, + uint32_t bindInfoCount, + const VkBindBufferMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT { return ::vkBindBufferMemory2( device, bindInfoCount, pBindInfos ); } - VkResult vkBindBufferMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos ) const VULKAN_HPP_NOEXCEPT + VkResult vkBindBufferMemory2KHR( VkDevice device, + uint32_t bindInfoCount, + const VkBindBufferMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT { return ::vkBindBufferMemory2KHR( device, bindInfoCount, pBindInfos ); } - VkResult vkBindImageMemory( VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT + VkResult vkBindImageMemory( VkDevice device, + VkImage image, + VkDeviceMemory memory, + VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT { return ::vkBindImageMemory( device, image, memory, memoryOffset ); } - VkResult vkBindImageMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos ) const VULKAN_HPP_NOEXCEPT + VkResult vkBindImageMemory2( VkDevice device, + uint32_t bindInfoCount, + const VkBindImageMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT { return ::vkBindImageMemory2( device, bindInfoCount, pBindInfos ); } - VkResult vkBindImageMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos ) const VULKAN_HPP_NOEXCEPT + VkResult vkBindImageMemory2KHR( VkDevice device, + uint32_t bindInfoCount, + const VkBindImageMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT { return ::vkBindImageMemory2KHR( device, bindInfoCount, pBindInfos ); } -#ifdef VK_ENABLE_BETA_EXTENSIONS - VkResult vkBuildAccelerationStructureKHR( VkDevice device, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildOffsetInfoKHR* const* ppOffsetInfos ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_ENABLE_BETA_EXTENSIONS + VkResult vkBuildAccelerationStructureKHR( + VkDevice device, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, + const VkAccelerationStructureBuildOffsetInfoKHR * const * ppOffsetInfos ) const VULKAN_HPP_NOEXCEPT { return ::vkBuildAccelerationStructureKHR( device, infoCount, pInfos, ppOffsetInfos ); } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ VkResult vkCompileDeferredNV( VkDevice device, VkPipeline pipeline, uint32_t shader ) const VULKAN_HPP_NOEXCEPT { return ::vkCompileDeferredNV( device, pipeline, shader ); } -#ifdef VK_ENABLE_BETA_EXTENSIONS - VkResult vkCopyAccelerationStructureKHR( VkDevice device, const VkCopyAccelerationStructureInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_ENABLE_BETA_EXTENSIONS + VkResult + vkCopyAccelerationStructureKHR( VkDevice device, + const VkCopyAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCopyAccelerationStructureKHR( device, pInfo ); } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - VkResult vkCopyAccelerationStructureToMemoryKHR( VkDevice device, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_ENABLE_BETA_EXTENSIONS + VkResult vkCopyAccelerationStructureToMemoryKHR( + VkDevice device, const VkCopyAccelerationStructureToMemoryInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCopyAccelerationStructureToMemoryKHR( device, pInfo ); } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - VkResult vkCopyMemoryToAccelerationStructureKHR( VkDevice device, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_ENABLE_BETA_EXTENSIONS + VkResult vkCopyMemoryToAccelerationStructureKHR( + VkDevice device, const VkCopyMemoryToAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCopyMemoryToAccelerationStructureKHR( device, pInfo ); } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - VkResult vkCreateAccelerationStructureKHR( VkDevice device, const VkAccelerationStructureCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureKHR* pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_ENABLE_BETA_EXTENSIONS + VkResult + vkCreateAccelerationStructureKHR( VkDevice device, + const VkAccelerationStructureCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkAccelerationStructureKHR * pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateAccelerationStructureKHR( device, pCreateInfo, pAllocator, pAccelerationStructure ); } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ - VkResult vkCreateAccelerationStructureNV( VkDevice device, const VkAccelerationStructureCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureNV* pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT + VkResult + vkCreateAccelerationStructureNV( VkDevice device, + const VkAccelerationStructureCreateInfoNV * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkAccelerationStructureNV * pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateAccelerationStructureNV( device, pCreateInfo, pAllocator, pAccelerationStructure ); } - VkResult vkCreateBuffer( VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer ) const VULKAN_HPP_NOEXCEPT + VkResult vkCreateBuffer( VkDevice device, + const VkBufferCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkBuffer * pBuffer ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateBuffer( device, pCreateInfo, pAllocator, pBuffer ); } - VkResult vkCreateBufferView( VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView* pView ) const VULKAN_HPP_NOEXCEPT + VkResult vkCreateBufferView( VkDevice device, + const VkBufferViewCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkBufferView * pView ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateBufferView( device, pCreateInfo, pAllocator, pView ); } - VkResult vkCreateCommandPool( VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool ) const VULKAN_HPP_NOEXCEPT + VkResult vkCreateCommandPool( VkDevice device, + const VkCommandPoolCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkCommandPool * pCommandPool ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateCommandPool( device, pCreateInfo, pAllocator, pCommandPool ); } - VkResult vkCreateComputePipelines( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines ) const VULKAN_HPP_NOEXCEPT + VkResult vkCreateComputePipelines( VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkComputePipelineCreateInfo * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateComputePipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); } -#ifdef VK_ENABLE_BETA_EXTENSIONS - VkResult vkCreateDeferredOperationKHR( VkDevice device, const VkAllocationCallbacks* pAllocator, VkDeferredOperationKHR* pDeferredOperation ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_ENABLE_BETA_EXTENSIONS + VkResult vkCreateDeferredOperationKHR( VkDevice device, + const VkAllocationCallbacks * pAllocator, + VkDeferredOperationKHR * pDeferredOperation ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateDeferredOperationKHR( device, pAllocator, pDeferredOperation ); } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ - VkResult vkCreateDescriptorPool( VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool ) const VULKAN_HPP_NOEXCEPT + VkResult vkCreateDescriptorPool( VkDevice device, + const VkDescriptorPoolCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDescriptorPool * pDescriptorPool ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateDescriptorPool( device, pCreateInfo, pAllocator, pDescriptorPool ); } - VkResult vkCreateDescriptorSetLayout( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout ) const VULKAN_HPP_NOEXCEPT + VkResult vkCreateDescriptorSetLayout( VkDevice device, + const VkDescriptorSetLayoutCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDescriptorSetLayout * pSetLayout ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateDescriptorSetLayout( device, pCreateInfo, pAllocator, pSetLayout ); } - VkResult vkCreateDescriptorUpdateTemplate( VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT + VkResult vkCreateDescriptorUpdateTemplate( VkDevice device, + const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate ) const + VULKAN_HPP_NOEXCEPT { return ::vkCreateDescriptorUpdateTemplate( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate ); } - VkResult vkCreateDescriptorUpdateTemplateKHR( VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT + VkResult vkCreateDescriptorUpdateTemplateKHR( VkDevice device, + const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate ) const + VULKAN_HPP_NOEXCEPT { return ::vkCreateDescriptorUpdateTemplateKHR( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate ); } - VkResult vkCreateEvent( VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent ) const VULKAN_HPP_NOEXCEPT + VkResult vkCreateEvent( VkDevice device, + const VkEventCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkEvent * pEvent ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateEvent( device, pCreateInfo, pAllocator, pEvent ); } - VkResult vkCreateFence( VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence ) const VULKAN_HPP_NOEXCEPT + VkResult vkCreateFence( VkDevice device, + const VkFenceCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkFence * pFence ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateFence( device, pCreateInfo, pAllocator, pFence ); } - VkResult vkCreateFramebuffer( VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer ) const VULKAN_HPP_NOEXCEPT + VkResult vkCreateFramebuffer( VkDevice device, + const VkFramebufferCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkFramebuffer * pFramebuffer ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateFramebuffer( device, pCreateInfo, pAllocator, pFramebuffer ); } - VkResult vkCreateGraphicsPipelines( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines ) const VULKAN_HPP_NOEXCEPT + VkResult vkCreateGraphicsPipelines( VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkGraphicsPipelineCreateInfo * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateGraphicsPipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + return ::vkCreateGraphicsPipelines( + device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); } - VkResult vkCreateImage( VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage ) const VULKAN_HPP_NOEXCEPT + VkResult vkCreateImage( VkDevice device, + const VkImageCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkImage * pImage ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateImage( device, pCreateInfo, pAllocator, pImage ); } - VkResult vkCreateImageView( VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView ) const VULKAN_HPP_NOEXCEPT + VkResult vkCreateImageView( VkDevice device, + const VkImageViewCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkImageView * pView ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateImageView( device, pCreateInfo, pAllocator, pView ); } - VkResult vkCreateIndirectCommandsLayoutNV( VkDevice device, const VkIndirectCommandsLayoutCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNV* pIndirectCommandsLayout ) const VULKAN_HPP_NOEXCEPT + VkResult + vkCreateIndirectCommandsLayoutNV( VkDevice device, + const VkIndirectCommandsLayoutCreateInfoNV * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkIndirectCommandsLayoutNV * pIndirectCommandsLayout ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateIndirectCommandsLayoutNV( device, pCreateInfo, pAllocator, pIndirectCommandsLayout ); } - VkResult vkCreatePipelineCache( VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineCache* pPipelineCache ) const VULKAN_HPP_NOEXCEPT + VkResult vkCreatePipelineCache( VkDevice device, + const VkPipelineCacheCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkPipelineCache * pPipelineCache ) const VULKAN_HPP_NOEXCEPT { return ::vkCreatePipelineCache( device, pCreateInfo, pAllocator, pPipelineCache ); } - VkResult vkCreatePipelineLayout( VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout ) const VULKAN_HPP_NOEXCEPT + VkResult vkCreatePipelineLayout( VkDevice device, + const VkPipelineLayoutCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkPipelineLayout * pPipelineLayout ) const VULKAN_HPP_NOEXCEPT { return ::vkCreatePipelineLayout( device, pCreateInfo, pAllocator, pPipelineLayout ); } - VkResult vkCreateQueryPool( VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool ) const VULKAN_HPP_NOEXCEPT + VkResult vkCreateQueryPool( VkDevice device, + const VkQueryPoolCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkQueryPool * pQueryPool ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateQueryPool( device, pCreateInfo, pAllocator, pQueryPool ); } -#ifdef VK_ENABLE_BETA_EXTENSIONS - VkResult vkCreateRayTracingPipelinesKHR( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_ENABLE_BETA_EXTENSIONS + VkResult vkCreateRayTracingPipelinesKHR( VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkRayTracingPipelineCreateInfoKHR * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateRayTracingPipelinesKHR( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + return ::vkCreateRayTracingPipelinesKHR( + device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ - VkResult vkCreateRayTracingPipelinesNV( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines ) const VULKAN_HPP_NOEXCEPT + VkResult vkCreateRayTracingPipelinesNV( VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkRayTracingPipelineCreateInfoNV * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateRayTracingPipelinesNV( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + return ::vkCreateRayTracingPipelinesNV( + device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); } - VkResult vkCreateRenderPass( VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass ) const VULKAN_HPP_NOEXCEPT + VkResult vkCreateRenderPass( VkDevice device, + const VkRenderPassCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateRenderPass( device, pCreateInfo, pAllocator, pRenderPass ); } - VkResult vkCreateRenderPass2( VkDevice device, const VkRenderPassCreateInfo2* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass ) const VULKAN_HPP_NOEXCEPT + VkResult vkCreateRenderPass2( VkDevice device, + const VkRenderPassCreateInfo2 * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateRenderPass2( device, pCreateInfo, pAllocator, pRenderPass ); } - VkResult vkCreateRenderPass2KHR( VkDevice device, const VkRenderPassCreateInfo2* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass ) const VULKAN_HPP_NOEXCEPT + VkResult vkCreateRenderPass2KHR( VkDevice device, + const VkRenderPassCreateInfo2 * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateRenderPass2KHR( device, pCreateInfo, pAllocator, pRenderPass ); } - VkResult vkCreateSampler( VkDevice device, const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSampler* pSampler ) const VULKAN_HPP_NOEXCEPT + VkResult vkCreateSampler( VkDevice device, + const VkSamplerCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSampler * pSampler ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateSampler( device, pCreateInfo, pAllocator, pSampler ); } - VkResult vkCreateSamplerYcbcrConversion( VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT + VkResult vkCreateSamplerYcbcrConversion( VkDevice device, + const VkSamplerYcbcrConversionCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSamplerYcbcrConversion * pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateSamplerYcbcrConversion( device, pCreateInfo, pAllocator, pYcbcrConversion ); } - VkResult vkCreateSamplerYcbcrConversionKHR( VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT + VkResult vkCreateSamplerYcbcrConversionKHR( VkDevice device, + const VkSamplerYcbcrConversionCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSamplerYcbcrConversion * pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateSamplerYcbcrConversionKHR( device, pCreateInfo, pAllocator, pYcbcrConversion ); } - VkResult vkCreateSemaphore( VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore ) const VULKAN_HPP_NOEXCEPT + VkResult vkCreateSemaphore( VkDevice device, + const VkSemaphoreCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSemaphore * pSemaphore ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateSemaphore( device, pCreateInfo, pAllocator, pSemaphore ); } - VkResult vkCreateShaderModule( VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule ) const VULKAN_HPP_NOEXCEPT + VkResult vkCreateShaderModule( VkDevice device, + const VkShaderModuleCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkShaderModule * pShaderModule ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateShaderModule( device, pCreateInfo, pAllocator, pShaderModule ); } - VkResult vkCreateSharedSwapchainsKHR( VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains ) const VULKAN_HPP_NOEXCEPT + VkResult vkCreateSharedSwapchainsKHR( VkDevice device, + uint32_t swapchainCount, + const VkSwapchainCreateInfoKHR * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkSwapchainKHR * pSwapchains ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateSharedSwapchainsKHR( device, swapchainCount, pCreateInfos, pAllocator, pSwapchains ); } - VkResult vkCreateSwapchainKHR( VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain ) const VULKAN_HPP_NOEXCEPT + VkResult vkCreateSwapchainKHR( VkDevice device, + const VkSwapchainCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSwapchainKHR * pSwapchain ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateSwapchainKHR( device, pCreateInfo, pAllocator, pSwapchain ); } - VkResult vkCreateValidationCacheEXT( VkDevice device, const VkValidationCacheCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkValidationCacheEXT* pValidationCache ) const VULKAN_HPP_NOEXCEPT + VkResult vkCreateValidationCacheEXT( VkDevice device, + const VkValidationCacheCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkValidationCacheEXT * pValidationCache ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateValidationCacheEXT( device, pCreateInfo, pAllocator, pValidationCache ); } - VkResult vkDebugMarkerSetObjectNameEXT( VkDevice device, const VkDebugMarkerObjectNameInfoEXT* pNameInfo ) const VULKAN_HPP_NOEXCEPT + VkResult vkDebugMarkerSetObjectNameEXT( VkDevice device, + const VkDebugMarkerObjectNameInfoEXT * pNameInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkDebugMarkerSetObjectNameEXT( device, pNameInfo ); } - VkResult vkDebugMarkerSetObjectTagEXT( VkDevice device, const VkDebugMarkerObjectTagInfoEXT* pTagInfo ) const VULKAN_HPP_NOEXCEPT + VkResult vkDebugMarkerSetObjectTagEXT( VkDevice device, + const VkDebugMarkerObjectTagInfoEXT * pTagInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkDebugMarkerSetObjectTagEXT( device, pTagInfo ); } -#ifdef VK_ENABLE_BETA_EXTENSIONS +# ifdef VK_ENABLE_BETA_EXTENSIONS VkResult vkDeferredOperationJoinKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT { return ::vkDeferredOperationJoinKHR( device, operation ); } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ - void vkDestroyAccelerationStructureKHR( VkDevice device, VkAccelerationStructureKHR accelerationStructure, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + void vkDestroyAccelerationStructureKHR( VkDevice device, + VkAccelerationStructureKHR accelerationStructure, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyAccelerationStructureKHR( device, accelerationStructure, pAllocator ); } - void vkDestroyAccelerationStructureNV( VkDevice device, VkAccelerationStructureKHR accelerationStructure, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + void vkDestroyAccelerationStructureNV( VkDevice device, + VkAccelerationStructureKHR accelerationStructure, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyAccelerationStructureNV( device, accelerationStructure, pAllocator ); } - void vkDestroyBuffer( VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + void vkDestroyBuffer( VkDevice device, + VkBuffer buffer, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyBuffer( device, buffer, pAllocator ); } - void vkDestroyBufferView( VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + void vkDestroyBufferView( VkDevice device, + VkBufferView bufferView, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyBufferView( device, bufferView, pAllocator ); } - void vkDestroyCommandPool( VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + void vkDestroyCommandPool( VkDevice device, + VkCommandPool commandPool, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyCommandPool( device, commandPool, pAllocator ); } -#ifdef VK_ENABLE_BETA_EXTENSIONS - void vkDestroyDeferredOperationKHR( VkDevice device, VkDeferredOperationKHR operation, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_ENABLE_BETA_EXTENSIONS + void vkDestroyDeferredOperationKHR( VkDevice device, + VkDeferredOperationKHR operation, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyDeferredOperationKHR( device, operation, pAllocator ); } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ - void vkDestroyDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + void vkDestroyDescriptorPool( VkDevice device, + VkDescriptorPool descriptorPool, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyDescriptorPool( device, descriptorPool, pAllocator ); } - void vkDestroyDescriptorSetLayout( VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + void vkDestroyDescriptorSetLayout( VkDevice device, + VkDescriptorSetLayout descriptorSetLayout, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyDescriptorSetLayout( device, descriptorSetLayout, pAllocator ); } - void vkDestroyDescriptorUpdateTemplate( VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + void vkDestroyDescriptorUpdateTemplate( VkDevice device, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyDescriptorUpdateTemplate( device, descriptorUpdateTemplate, pAllocator ); } - void vkDestroyDescriptorUpdateTemplateKHR( VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + void vkDestroyDescriptorUpdateTemplateKHR( VkDevice device, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyDescriptorUpdateTemplateKHR( device, descriptorUpdateTemplate, pAllocator ); } - void vkDestroyDevice( VkDevice device, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + void vkDestroyDevice( VkDevice device, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyDevice( device, pAllocator ); } - void vkDestroyEvent( VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + void vkDestroyEvent( VkDevice device, + VkEvent event, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyEvent( device, event, pAllocator ); } - void vkDestroyFence( VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + void vkDestroyFence( VkDevice device, + VkFence fence, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyFence( device, fence, pAllocator ); } - void vkDestroyFramebuffer( VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + void vkDestroyFramebuffer( VkDevice device, + VkFramebuffer framebuffer, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyFramebuffer( device, framebuffer, pAllocator ); } - void vkDestroyImage( VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + void vkDestroyImage( VkDevice device, + VkImage image, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyImage( device, image, pAllocator ); } - void vkDestroyImageView( VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + void vkDestroyImageView( VkDevice device, + VkImageView imageView, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyImageView( device, imageView, pAllocator ); } - void vkDestroyIndirectCommandsLayoutNV( VkDevice device, VkIndirectCommandsLayoutNV indirectCommandsLayout, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + void vkDestroyIndirectCommandsLayoutNV( VkDevice device, + VkIndirectCommandsLayoutNV indirectCommandsLayout, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyIndirectCommandsLayoutNV( device, indirectCommandsLayout, pAllocator ); } - void vkDestroyPipeline( VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + void vkDestroyPipeline( VkDevice device, + VkPipeline pipeline, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyPipeline( device, pipeline, pAllocator ); } - void vkDestroyPipelineCache( VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + void vkDestroyPipelineCache( VkDevice device, + VkPipelineCache pipelineCache, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyPipelineCache( device, pipelineCache, pAllocator ); } - void vkDestroyPipelineLayout( VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + void vkDestroyPipelineLayout( VkDevice device, + VkPipelineLayout pipelineLayout, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyPipelineLayout( device, pipelineLayout, pAllocator ); } - void vkDestroyQueryPool( VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + void vkDestroyQueryPool( VkDevice device, + VkQueryPool queryPool, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyQueryPool( device, queryPool, pAllocator ); } - void vkDestroyRenderPass( VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + void vkDestroyRenderPass( VkDevice device, + VkRenderPass renderPass, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyRenderPass( device, renderPass, pAllocator ); } - void vkDestroySampler( VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + void vkDestroySampler( VkDevice device, + VkSampler sampler, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroySampler( device, sampler, pAllocator ); } - void vkDestroySamplerYcbcrConversion( VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + void vkDestroySamplerYcbcrConversion( VkDevice device, + VkSamplerYcbcrConversion ycbcrConversion, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroySamplerYcbcrConversion( device, ycbcrConversion, pAllocator ); } - void vkDestroySamplerYcbcrConversionKHR( VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + void vkDestroySamplerYcbcrConversionKHR( VkDevice device, + VkSamplerYcbcrConversion ycbcrConversion, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroySamplerYcbcrConversionKHR( device, ycbcrConversion, pAllocator ); } - void vkDestroySemaphore( VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + void vkDestroySemaphore( VkDevice device, + VkSemaphore semaphore, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroySemaphore( device, semaphore, pAllocator ); } - void vkDestroyShaderModule( VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + void vkDestroyShaderModule( VkDevice device, + VkShaderModule shaderModule, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyShaderModule( device, shaderModule, pAllocator ); } - void vkDestroySwapchainKHR( VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + void vkDestroySwapchainKHR( VkDevice device, + VkSwapchainKHR swapchain, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroySwapchainKHR( device, swapchain, pAllocator ); } - void vkDestroyValidationCacheEXT( VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + void vkDestroyValidationCacheEXT( VkDevice device, + VkValidationCacheEXT validationCache, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyValidationCacheEXT( device, validationCache, pAllocator ); } @@ -1933,191 +2653,263 @@ namespace VULKAN_HPP_NAMESPACE return ::vkDeviceWaitIdle( device ); } - VkResult vkDisplayPowerControlEXT( VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT* pDisplayPowerInfo ) const VULKAN_HPP_NOEXCEPT + VkResult vkDisplayPowerControlEXT( VkDevice device, + VkDisplayKHR display, + const VkDisplayPowerInfoEXT * pDisplayPowerInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkDisplayPowerControlEXT( device, display, pDisplayPowerInfo ); } - VkResult vkFlushMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges ) const VULKAN_HPP_NOEXCEPT + VkResult vkFlushMappedMemoryRanges( VkDevice device, + uint32_t memoryRangeCount, + const VkMappedMemoryRange * pMemoryRanges ) const VULKAN_HPP_NOEXCEPT { return ::vkFlushMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges ); } - void vkFreeCommandBuffers( VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers ) const VULKAN_HPP_NOEXCEPT + void vkFreeCommandBuffers( VkDevice device, + VkCommandPool commandPool, + uint32_t commandBufferCount, + const VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT { return ::vkFreeCommandBuffers( device, commandPool, commandBufferCount, pCommandBuffers ); } - VkResult vkFreeDescriptorSets( VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets ) const VULKAN_HPP_NOEXCEPT + VkResult vkFreeDescriptorSets( VkDevice device, + VkDescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VkDescriptorSet * pDescriptorSets ) const VULKAN_HPP_NOEXCEPT { return ::vkFreeDescriptorSets( device, descriptorPool, descriptorSetCount, pDescriptorSets ); } - void vkFreeMemory( VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + void vkFreeMemory( VkDevice device, + VkDeviceMemory memory, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkFreeMemory( device, memory, pAllocator ); } -#ifdef VK_ENABLE_BETA_EXTENSIONS - VkDeviceAddress vkGetAccelerationStructureDeviceAddressKHR( VkDevice device, const VkAccelerationStructureDeviceAddressInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_ENABLE_BETA_EXTENSIONS + VkDeviceAddress vkGetAccelerationStructureDeviceAddressKHR( + VkDevice device, const VkAccelerationStructureDeviceAddressInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkGetAccelerationStructureDeviceAddressKHR( device, pInfo ); } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ - VkResult vkGetAccelerationStructureHandleNV( VkDevice device, VkAccelerationStructureKHR accelerationStructure, size_t dataSize, void* pData ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetAccelerationStructureHandleNV( VkDevice device, + VkAccelerationStructureKHR accelerationStructure, + size_t dataSize, + void * pData ) const VULKAN_HPP_NOEXCEPT { return ::vkGetAccelerationStructureHandleNV( device, accelerationStructure, dataSize, pData ); } -#ifdef VK_ENABLE_BETA_EXTENSIONS - void vkGetAccelerationStructureMemoryRequirementsKHR( VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoKHR* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_ENABLE_BETA_EXTENSIONS + void vkGetAccelerationStructureMemoryRequirementsKHR( + VkDevice device, + const VkAccelerationStructureMemoryRequirementsInfoKHR * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT { return ::vkGetAccelerationStructureMemoryRequirementsKHR( device, pInfo, pMemoryRequirements ); } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ - void vkGetAccelerationStructureMemoryRequirementsNV( VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + void vkGetAccelerationStructureMemoryRequirementsNV( VkDevice device, + const VkAccelerationStructureMemoryRequirementsInfoNV * pInfo, + VkMemoryRequirements2KHR * pMemoryRequirements ) const + VULKAN_HPP_NOEXCEPT { return ::vkGetAccelerationStructureMemoryRequirementsNV( device, pInfo, pMemoryRequirements ); } -#ifdef VK_USE_PLATFORM_ANDROID_KHR - VkResult vkGetAndroidHardwareBufferPropertiesANDROID( VkDevice device, const struct AHardwareBuffer* buffer, VkAndroidHardwareBufferPropertiesANDROID* pProperties ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_USE_PLATFORM_ANDROID_KHR + VkResult vkGetAndroidHardwareBufferPropertiesANDROID( VkDevice device, + const struct AHardwareBuffer * buffer, + VkAndroidHardwareBufferPropertiesANDROID * pProperties ) const + VULKAN_HPP_NOEXCEPT { return ::vkGetAndroidHardwareBufferPropertiesANDROID( device, buffer, pProperties ); } -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - VkDeviceAddress vkGetBufferDeviceAddress( VkDevice device, const VkBufferDeviceAddressInfo* pInfo ) const VULKAN_HPP_NOEXCEPT + VkDeviceAddress vkGetBufferDeviceAddress( VkDevice device, + const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkGetBufferDeviceAddress( device, pInfo ); } - VkDeviceAddress vkGetBufferDeviceAddressEXT( VkDevice device, const VkBufferDeviceAddressInfo* pInfo ) const VULKAN_HPP_NOEXCEPT + VkDeviceAddress vkGetBufferDeviceAddressEXT( VkDevice device, + const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkGetBufferDeviceAddressEXT( device, pInfo ); } - VkDeviceAddress vkGetBufferDeviceAddressKHR( VkDevice device, const VkBufferDeviceAddressInfo* pInfo ) const VULKAN_HPP_NOEXCEPT + VkDeviceAddress vkGetBufferDeviceAddressKHR( VkDevice device, + const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkGetBufferDeviceAddressKHR( device, pInfo ); } - void vkGetBufferMemoryRequirements( VkDevice device, VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + void vkGetBufferMemoryRequirements( VkDevice device, + VkBuffer buffer, + VkMemoryRequirements * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT { return ::vkGetBufferMemoryRequirements( device, buffer, pMemoryRequirements ); } - void vkGetBufferMemoryRequirements2( VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + void vkGetBufferMemoryRequirements2( VkDevice device, + const VkBufferMemoryRequirementsInfo2 * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT { return ::vkGetBufferMemoryRequirements2( device, pInfo, pMemoryRequirements ); } - void vkGetBufferMemoryRequirements2KHR( VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + void vkGetBufferMemoryRequirements2KHR( VkDevice device, + const VkBufferMemoryRequirementsInfo2 * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT { return ::vkGetBufferMemoryRequirements2KHR( device, pInfo, pMemoryRequirements ); } - uint64_t vkGetBufferOpaqueCaptureAddress( VkDevice device, const VkBufferDeviceAddressInfo* pInfo ) const VULKAN_HPP_NOEXCEPT + uint64_t vkGetBufferOpaqueCaptureAddress( VkDevice device, + const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkGetBufferOpaqueCaptureAddress( device, pInfo ); } - uint64_t vkGetBufferOpaqueCaptureAddressKHR( VkDevice device, const VkBufferDeviceAddressInfo* pInfo ) const VULKAN_HPP_NOEXCEPT + uint64_t vkGetBufferOpaqueCaptureAddressKHR( VkDevice device, + const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkGetBufferOpaqueCaptureAddressKHR( device, pInfo ); } - VkResult vkGetCalibratedTimestampsEXT( VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetCalibratedTimestampsEXT( VkDevice device, + uint32_t timestampCount, + const VkCalibratedTimestampInfoEXT * pTimestampInfos, + uint64_t * pTimestamps, + uint64_t * pMaxDeviation ) const VULKAN_HPP_NOEXCEPT { return ::vkGetCalibratedTimestampsEXT( device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation ); } -#ifdef VK_ENABLE_BETA_EXTENSIONS - uint32_t vkGetDeferredOperationMaxConcurrencyKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_ENABLE_BETA_EXTENSIONS + uint32_t vkGetDeferredOperationMaxConcurrencyKHR( VkDevice device, + VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT { return ::vkGetDeferredOperationMaxConcurrencyKHR( device, operation ); } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - VkResult vkGetDeferredOperationResultKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_ENABLE_BETA_EXTENSIONS + VkResult vkGetDeferredOperationResultKHR( VkDevice device, + VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT { return ::vkGetDeferredOperationResultKHR( device, operation ); } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ - void vkGetDescriptorSetLayoutSupport( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport ) const VULKAN_HPP_NOEXCEPT + void vkGetDescriptorSetLayoutSupport( VkDevice device, + const VkDescriptorSetLayoutCreateInfo * pCreateInfo, + VkDescriptorSetLayoutSupport * pSupport ) const VULKAN_HPP_NOEXCEPT { return ::vkGetDescriptorSetLayoutSupport( device, pCreateInfo, pSupport ); } - void vkGetDescriptorSetLayoutSupportKHR( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport ) const VULKAN_HPP_NOEXCEPT + void vkGetDescriptorSetLayoutSupportKHR( VkDevice device, + const VkDescriptorSetLayoutCreateInfo * pCreateInfo, + VkDescriptorSetLayoutSupport * pSupport ) const VULKAN_HPP_NOEXCEPT { return ::vkGetDescriptorSetLayoutSupportKHR( device, pCreateInfo, pSupport ); } -#ifdef VK_ENABLE_BETA_EXTENSIONS - VkResult vkGetDeviceAccelerationStructureCompatibilityKHR( VkDevice device, const VkAccelerationStructureVersionKHR* version ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_ENABLE_BETA_EXTENSIONS + VkResult vkGetDeviceAccelerationStructureCompatibilityKHR( + VkDevice device, const VkAccelerationStructureVersionKHR * version ) const VULKAN_HPP_NOEXCEPT { return ::vkGetDeviceAccelerationStructureCompatibilityKHR( device, version ); } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ - void vkGetDeviceGroupPeerMemoryFeatures( VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT + void vkGetDeviceGroupPeerMemoryFeatures( VkDevice device, + uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VkPeerMemoryFeatureFlags * pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetDeviceGroupPeerMemoryFeatures( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures ); + return ::vkGetDeviceGroupPeerMemoryFeatures( + device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures ); } - void vkGetDeviceGroupPeerMemoryFeaturesKHR( VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT + void + vkGetDeviceGroupPeerMemoryFeaturesKHR( VkDevice device, + uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VkPeerMemoryFeatureFlags * pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetDeviceGroupPeerMemoryFeaturesKHR( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures ); + return ::vkGetDeviceGroupPeerMemoryFeaturesKHR( + device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures ); } - VkResult vkGetDeviceGroupPresentCapabilitiesKHR( VkDevice device, VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetDeviceGroupPresentCapabilitiesKHR( + VkDevice device, VkDeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities ) const VULKAN_HPP_NOEXCEPT { return ::vkGetDeviceGroupPresentCapabilitiesKHR( device, pDeviceGroupPresentCapabilities ); } -#ifdef VK_USE_PLATFORM_WIN32_KHR - VkResult vkGetDeviceGroupSurfacePresentModes2EXT( VkDevice device, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkDeviceGroupPresentModeFlagsKHR* pModes ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_USE_PLATFORM_WIN32_KHR + VkResult + vkGetDeviceGroupSurfacePresentModes2EXT( VkDevice device, + const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VkDeviceGroupPresentModeFlagsKHR * pModes ) const VULKAN_HPP_NOEXCEPT { return ::vkGetDeviceGroupSurfacePresentModes2EXT( device, pSurfaceInfo, pModes ); } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - VkResult vkGetDeviceGroupSurfacePresentModesKHR( VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR* pModes ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetDeviceGroupSurfacePresentModesKHR( + VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR * pModes ) const VULKAN_HPP_NOEXCEPT { return ::vkGetDeviceGroupSurfacePresentModesKHR( device, surface, pModes ); } - void vkGetDeviceMemoryCommitment( VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes ) const VULKAN_HPP_NOEXCEPT + void vkGetDeviceMemoryCommitment( VkDevice device, + VkDeviceMemory memory, + VkDeviceSize * pCommittedMemoryInBytes ) const VULKAN_HPP_NOEXCEPT { return ::vkGetDeviceMemoryCommitment( device, memory, pCommittedMemoryInBytes ); } - uint64_t vkGetDeviceMemoryOpaqueCaptureAddress( VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo ) const VULKAN_HPP_NOEXCEPT + uint64_t vkGetDeviceMemoryOpaqueCaptureAddress( + VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkGetDeviceMemoryOpaqueCaptureAddress( device, pInfo ); } - uint64_t vkGetDeviceMemoryOpaqueCaptureAddressKHR( VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo ) const VULKAN_HPP_NOEXCEPT + uint64_t vkGetDeviceMemoryOpaqueCaptureAddressKHR( + VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkGetDeviceMemoryOpaqueCaptureAddressKHR( device, pInfo ); } - PFN_vkVoidFunction vkGetDeviceProcAddr( VkDevice device, const char* pName ) const VULKAN_HPP_NOEXCEPT + PFN_vkVoidFunction vkGetDeviceProcAddr( VkDevice device, const char * pName ) const VULKAN_HPP_NOEXCEPT { return ::vkGetDeviceProcAddr( device, pName ); } - void vkGetDeviceQueue( VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue ) const VULKAN_HPP_NOEXCEPT + void vkGetDeviceQueue( VkDevice device, + uint32_t queueFamilyIndex, + uint32_t queueIndex, + VkQueue * pQueue ) const VULKAN_HPP_NOEXCEPT { return ::vkGetDeviceQueue( device, queueFamilyIndex, queueIndex, pQueue ); } - void vkGetDeviceQueue2( VkDevice device, const VkDeviceQueueInfo2* pQueueInfo, VkQueue* pQueue ) const VULKAN_HPP_NOEXCEPT + void vkGetDeviceQueue2( VkDevice device, + const VkDeviceQueueInfo2 * pQueueInfo, + VkQueue * pQueue ) const VULKAN_HPP_NOEXCEPT { return ::vkGetDeviceQueue2( device, pQueueInfo, pQueue ); } @@ -2127,7 +2919,8 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetEventStatus( device, event ); } - VkResult vkGetFenceFdKHR( VkDevice device, const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd ) const VULKAN_HPP_NOEXCEPT + VkResult + vkGetFenceFdKHR( VkDevice device, const VkFenceGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT { return ::vkGetFenceFdKHR( device, pGetFdInfo, pFd ); } @@ -2137,206 +2930,326 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetFenceStatus( device, fence ); } -#ifdef VK_USE_PLATFORM_WIN32_KHR - VkResult vkGetFenceWin32HandleKHR( VkDevice device, const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_USE_PLATFORM_WIN32_KHR + VkResult vkGetFenceWin32HandleKHR( VkDevice device, + const VkFenceGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT { return ::vkGetFenceWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - void vkGetGeneratedCommandsMemoryRequirementsNV( VkDevice device, const VkGeneratedCommandsMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + void vkGetGeneratedCommandsMemoryRequirementsNV( VkDevice device, + const VkGeneratedCommandsMemoryRequirementsInfoNV * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const + VULKAN_HPP_NOEXCEPT { return ::vkGetGeneratedCommandsMemoryRequirementsNV( device, pInfo, pMemoryRequirements ); } - VkResult vkGetImageDrmFormatModifierPropertiesEXT( VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT* pProperties ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetImageDrmFormatModifierPropertiesEXT( + VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT * pProperties ) const VULKAN_HPP_NOEXCEPT { return ::vkGetImageDrmFormatModifierPropertiesEXT( device, image, pProperties ); } - void vkGetImageMemoryRequirements( VkDevice device, VkImage image, VkMemoryRequirements* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + void vkGetImageMemoryRequirements( VkDevice device, + VkImage image, + VkMemoryRequirements * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT { return ::vkGetImageMemoryRequirements( device, image, pMemoryRequirements ); } - void vkGetImageMemoryRequirements2( VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + void vkGetImageMemoryRequirements2( VkDevice device, + const VkImageMemoryRequirementsInfo2 * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT { return ::vkGetImageMemoryRequirements2( device, pInfo, pMemoryRequirements ); } - void vkGetImageMemoryRequirements2KHR( VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + void vkGetImageMemoryRequirements2KHR( VkDevice device, + const VkImageMemoryRequirementsInfo2 * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT { return ::vkGetImageMemoryRequirements2KHR( device, pInfo, pMemoryRequirements ); } - void vkGetImageSparseMemoryRequirements( VkDevice device, VkImage image, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements* pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + void vkGetImageSparseMemoryRequirements( VkDevice device, + VkImage image, + uint32_t * pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements * pSparseMemoryRequirements ) const + VULKAN_HPP_NOEXCEPT { - return ::vkGetImageSparseMemoryRequirements( device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); + return ::vkGetImageSparseMemoryRequirements( + device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); } - void vkGetImageSparseMemoryRequirements2( VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + void vkGetImageSparseMemoryRequirements2( VkDevice device, + const VkImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const + VULKAN_HPP_NOEXCEPT { - return ::vkGetImageSparseMemoryRequirements2( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); + return ::vkGetImageSparseMemoryRequirements2( + device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); } - void vkGetImageSparseMemoryRequirements2KHR( VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + void vkGetImageSparseMemoryRequirements2KHR( VkDevice device, + const VkImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const + VULKAN_HPP_NOEXCEPT { - return ::vkGetImageSparseMemoryRequirements2KHR( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); + return ::vkGetImageSparseMemoryRequirements2KHR( + device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); } - void vkGetImageSubresourceLayout( VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout ) const VULKAN_HPP_NOEXCEPT + void vkGetImageSubresourceLayout( VkDevice device, + VkImage image, + const VkImageSubresource * pSubresource, + VkSubresourceLayout * pLayout ) const VULKAN_HPP_NOEXCEPT { return ::vkGetImageSubresourceLayout( device, image, pSubresource, pLayout ); } - VkResult vkGetImageViewAddressNVX( VkDevice device, VkImageView imageView, VkImageViewAddressPropertiesNVX* pProperties ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetImageViewAddressNVX( VkDevice device, + VkImageView imageView, + VkImageViewAddressPropertiesNVX * pProperties ) const VULKAN_HPP_NOEXCEPT { return ::vkGetImageViewAddressNVX( device, imageView, pProperties ); } - uint32_t vkGetImageViewHandleNVX( VkDevice device, const VkImageViewHandleInfoNVX* pInfo ) const VULKAN_HPP_NOEXCEPT + uint32_t vkGetImageViewHandleNVX( VkDevice device, + const VkImageViewHandleInfoNVX * pInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkGetImageViewHandleNVX( device, pInfo ); } -#ifdef VK_USE_PLATFORM_ANDROID_KHR - VkResult vkGetMemoryAndroidHardwareBufferANDROID( VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_USE_PLATFORM_ANDROID_KHR + VkResult vkGetMemoryAndroidHardwareBufferANDROID( VkDevice device, + const VkMemoryGetAndroidHardwareBufferInfoANDROID * pInfo, + struct AHardwareBuffer ** pBuffer ) const VULKAN_HPP_NOEXCEPT { return ::vkGetMemoryAndroidHardwareBufferANDROID( device, pInfo, pBuffer ); } -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - VkResult vkGetMemoryFdKHR( VkDevice device, const VkMemoryGetFdInfoKHR* pGetFdInfo, int* pFd ) const VULKAN_HPP_NOEXCEPT + VkResult + vkGetMemoryFdKHR( VkDevice device, const VkMemoryGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT { return ::vkGetMemoryFdKHR( device, pGetFdInfo, pFd ); } - VkResult vkGetMemoryFdPropertiesKHR( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, VkMemoryFdPropertiesKHR* pMemoryFdProperties ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetMemoryFdPropertiesKHR( VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + int fd, + VkMemoryFdPropertiesKHR * pMemoryFdProperties ) const VULKAN_HPP_NOEXCEPT { return ::vkGetMemoryFdPropertiesKHR( device, handleType, fd, pMemoryFdProperties ); } - VkResult vkGetMemoryHostPointerPropertiesEXT( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetMemoryHostPointerPropertiesEXT( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + const void * pHostPointer, + VkMemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties ) const VULKAN_HPP_NOEXCEPT { return ::vkGetMemoryHostPointerPropertiesEXT( device, handleType, pHostPointer, pMemoryHostPointerProperties ); } -#ifdef VK_USE_PLATFORM_WIN32_KHR - VkResult vkGetMemoryWin32HandleKHR( VkDevice device, const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_USE_PLATFORM_WIN32_KHR + VkResult vkGetMemoryWin32HandleKHR( VkDevice device, + const VkMemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT { return ::vkGetMemoryWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - VkResult vkGetMemoryWin32HandleNV( VkDevice device, VkDeviceMemory memory, VkExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_USE_PLATFORM_WIN32_KHR + VkResult vkGetMemoryWin32HandleNV( VkDevice device, + VkDeviceMemory memory, + VkExternalMemoryHandleTypeFlagsNV handleType, + HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT { return ::vkGetMemoryWin32HandleNV( device, memory, handleType, pHandle ); } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - VkResult vkGetMemoryWin32HandlePropertiesKHR( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_USE_PLATFORM_WIN32_KHR + VkResult vkGetMemoryWin32HandlePropertiesKHR( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + HANDLE handle, + VkMemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties ) const VULKAN_HPP_NOEXCEPT { return ::vkGetMemoryWin32HandlePropertiesKHR( device, handleType, handle, pMemoryWin32HandleProperties ); } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - VkResult vkGetPastPresentationTimingGOOGLE( VkDevice device, VkSwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VkPastPresentationTimingGOOGLE* pPresentationTimings ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetPastPresentationTimingGOOGLE( VkDevice device, + VkSwapchainKHR swapchain, + uint32_t * pPresentationTimingCount, + VkPastPresentationTimingGOOGLE * pPresentationTimings ) const + VULKAN_HPP_NOEXCEPT { return ::vkGetPastPresentationTimingGOOGLE( device, swapchain, pPresentationTimingCount, pPresentationTimings ); } - VkResult vkGetPerformanceParameterINTEL( VkDevice device, VkPerformanceParameterTypeINTEL parameter, VkPerformanceValueINTEL* pValue ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetPerformanceParameterINTEL( VkDevice device, + VkPerformanceParameterTypeINTEL parameter, + VkPerformanceValueINTEL * pValue ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPerformanceParameterINTEL( device, parameter, pValue ); } - VkResult vkGetPipelineCacheData( VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetPipelineCacheData( VkDevice device, + VkPipelineCache pipelineCache, + size_t * pDataSize, + void * pData ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPipelineCacheData( device, pipelineCache, pDataSize, pData ); } - VkResult vkGetPipelineExecutableInternalRepresentationsKHR( VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetPipelineExecutableInternalRepresentationsKHR( + VkDevice device, + const VkPipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pInternalRepresentationCount, + VkPipelineExecutableInternalRepresentationKHR * pInternalRepresentations ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPipelineExecutableInternalRepresentationsKHR( device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations ); + return ::vkGetPipelineExecutableInternalRepresentationsKHR( + device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations ); } - VkResult vkGetPipelineExecutablePropertiesKHR( VkDevice device, const VkPipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VkPipelineExecutablePropertiesKHR* pProperties ) const VULKAN_HPP_NOEXCEPT + VkResult + vkGetPipelineExecutablePropertiesKHR( VkDevice device, + const VkPipelineInfoKHR * pPipelineInfo, + uint32_t * pExecutableCount, + VkPipelineExecutablePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPipelineExecutablePropertiesKHR( device, pPipelineInfo, pExecutableCount, pProperties ); } - VkResult vkGetPipelineExecutableStatisticsKHR( VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VkPipelineExecutableStatisticKHR* pStatistics ) const VULKAN_HPP_NOEXCEPT + VkResult + vkGetPipelineExecutableStatisticsKHR( VkDevice device, + const VkPipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pStatisticCount, + VkPipelineExecutableStatisticKHR * pStatistics ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPipelineExecutableStatisticsKHR( device, pExecutableInfo, pStatisticCount, pStatistics ); } - VkResult vkGetQueryPoolResults( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetQueryPoolResults( VkDevice device, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + void * pData, + VkDeviceSize stride, + VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT { return ::vkGetQueryPoolResults( device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags ); } -#ifdef VK_ENABLE_BETA_EXTENSIONS - VkResult vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_ENABLE_BETA_EXTENSIONS + VkResult vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( VkDevice device, + VkPipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( device, pipeline, firstGroup, groupCount, dataSize, pData ); + return ::vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( + device, pipeline, firstGroup, groupCount, dataSize, pData ); } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ - VkResult vkGetRayTracingShaderGroupHandlesKHR( VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetRayTracingShaderGroupHandlesKHR( VkDevice device, + VkPipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData ) const VULKAN_HPP_NOEXCEPT { return ::vkGetRayTracingShaderGroupHandlesKHR( device, pipeline, firstGroup, groupCount, dataSize, pData ); } - VkResult vkGetRayTracingShaderGroupHandlesNV( VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetRayTracingShaderGroupHandlesNV( VkDevice device, + VkPipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData ) const VULKAN_HPP_NOEXCEPT { return ::vkGetRayTracingShaderGroupHandlesNV( device, pipeline, firstGroup, groupCount, dataSize, pData ); } - VkResult vkGetRefreshCycleDurationGOOGLE( VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetRefreshCycleDurationGOOGLE( VkDevice device, + VkSwapchainKHR swapchain, + VkRefreshCycleDurationGOOGLE * pDisplayTimingProperties ) const + VULKAN_HPP_NOEXCEPT { return ::vkGetRefreshCycleDurationGOOGLE( device, swapchain, pDisplayTimingProperties ); } - void vkGetRenderAreaGranularity( VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity ) const VULKAN_HPP_NOEXCEPT + void vkGetRenderAreaGranularity( VkDevice device, + VkRenderPass renderPass, + VkExtent2D * pGranularity ) const VULKAN_HPP_NOEXCEPT { return ::vkGetRenderAreaGranularity( device, renderPass, pGranularity ); } - VkResult vkGetSemaphoreCounterValue( VkDevice device, VkSemaphore semaphore, uint64_t* pValue ) const VULKAN_HPP_NOEXCEPT + VkResult + vkGetSemaphoreCounterValue( VkDevice device, VkSemaphore semaphore, uint64_t * pValue ) const VULKAN_HPP_NOEXCEPT { return ::vkGetSemaphoreCounterValue( device, semaphore, pValue ); } - VkResult vkGetSemaphoreCounterValueKHR( VkDevice device, VkSemaphore semaphore, uint64_t* pValue ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetSemaphoreCounterValueKHR( VkDevice device, + VkSemaphore semaphore, + uint64_t * pValue ) const VULKAN_HPP_NOEXCEPT { return ::vkGetSemaphoreCounterValueKHR( device, semaphore, pValue ); } - VkResult vkGetSemaphoreFdKHR( VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetSemaphoreFdKHR( VkDevice device, + const VkSemaphoreGetFdInfoKHR * pGetFdInfo, + int * pFd ) const VULKAN_HPP_NOEXCEPT { return ::vkGetSemaphoreFdKHR( device, pGetFdInfo, pFd ); } -#ifdef VK_USE_PLATFORM_WIN32_KHR - VkResult vkGetSemaphoreWin32HandleKHR( VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_USE_PLATFORM_WIN32_KHR + VkResult vkGetSemaphoreWin32HandleKHR( VkDevice device, + const VkSemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT { return ::vkGetSemaphoreWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - VkResult vkGetShaderInfoAMD( VkDevice device, VkPipeline pipeline, VkShaderStageFlagBits shaderStage, VkShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetShaderInfoAMD( VkDevice device, + VkPipeline pipeline, + VkShaderStageFlagBits shaderStage, + VkShaderInfoTypeAMD infoType, + size_t * pInfoSize, + void * pInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkGetShaderInfoAMD( device, pipeline, shaderStage, infoType, pInfoSize, pInfo ); } - VkResult vkGetSwapchainCounterEXT( VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetSwapchainCounterEXT( VkDevice device, + VkSwapchainKHR swapchain, + VkSurfaceCounterFlagBitsEXT counter, + uint64_t * pCounterValue ) const VULKAN_HPP_NOEXCEPT { return ::vkGetSwapchainCounterEXT( device, swapchain, counter, pCounterValue ); } - VkResult vkGetSwapchainImagesKHR( VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetSwapchainImagesKHR( VkDevice device, + VkSwapchainKHR swapchain, + uint32_t * pSwapchainImageCount, + VkImage * pSwapchainImages ) const VULKAN_HPP_NOEXCEPT { return ::vkGetSwapchainImagesKHR( device, swapchain, pSwapchainImageCount, pSwapchainImages ); } @@ -2346,78 +3259,110 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetSwapchainStatusKHR( device, swapchain ); } - VkResult vkGetValidationCacheDataEXT( VkDevice device, VkValidationCacheEXT validationCache, size_t* pDataSize, void* pData ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetValidationCacheDataEXT( VkDevice device, + VkValidationCacheEXT validationCache, + size_t * pDataSize, + void * pData ) const VULKAN_HPP_NOEXCEPT { return ::vkGetValidationCacheDataEXT( device, validationCache, pDataSize, pData ); } - VkResult vkImportFenceFdKHR( VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo ) const VULKAN_HPP_NOEXCEPT + VkResult vkImportFenceFdKHR( VkDevice device, + const VkImportFenceFdInfoKHR * pImportFenceFdInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkImportFenceFdKHR( device, pImportFenceFdInfo ); } -#ifdef VK_USE_PLATFORM_WIN32_KHR - VkResult vkImportFenceWin32HandleKHR( VkDevice device, const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_USE_PLATFORM_WIN32_KHR + VkResult vkImportFenceWin32HandleKHR( + VkDevice device, const VkImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkImportFenceWin32HandleKHR( device, pImportFenceWin32HandleInfo ); } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - VkResult vkImportSemaphoreFdKHR( VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo ) const VULKAN_HPP_NOEXCEPT + VkResult + vkImportSemaphoreFdKHR( VkDevice device, + const VkImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkImportSemaphoreFdKHR( device, pImportSemaphoreFdInfo ); } -#ifdef VK_USE_PLATFORM_WIN32_KHR - VkResult vkImportSemaphoreWin32HandleKHR( VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_USE_PLATFORM_WIN32_KHR + VkResult vkImportSemaphoreWin32HandleKHR( + VkDevice device, + const VkImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkImportSemaphoreWin32HandleKHR( device, pImportSemaphoreWin32HandleInfo ); } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - VkResult vkInitializePerformanceApiINTEL( VkDevice device, const VkInitializePerformanceApiInfoINTEL* pInitializeInfo ) const VULKAN_HPP_NOEXCEPT + VkResult vkInitializePerformanceApiINTEL( + VkDevice device, const VkInitializePerformanceApiInfoINTEL * pInitializeInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkInitializePerformanceApiINTEL( device, pInitializeInfo ); } - VkResult vkInvalidateMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges ) const VULKAN_HPP_NOEXCEPT + VkResult vkInvalidateMappedMemoryRanges( VkDevice device, + uint32_t memoryRangeCount, + const VkMappedMemoryRange * pMemoryRanges ) const VULKAN_HPP_NOEXCEPT { return ::vkInvalidateMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges ); } - VkResult vkMapMemory( VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData ) const VULKAN_HPP_NOEXCEPT + VkResult vkMapMemory( VkDevice device, + VkDeviceMemory memory, + VkDeviceSize offset, + VkDeviceSize size, + VkMemoryMapFlags flags, + void ** ppData ) const VULKAN_HPP_NOEXCEPT { return ::vkMapMemory( device, memory, offset, size, flags, ppData ); } - VkResult vkMergePipelineCaches( VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches ) const VULKAN_HPP_NOEXCEPT + VkResult vkMergePipelineCaches( VkDevice device, + VkPipelineCache dstCache, + uint32_t srcCacheCount, + const VkPipelineCache * pSrcCaches ) const VULKAN_HPP_NOEXCEPT { return ::vkMergePipelineCaches( device, dstCache, srcCacheCount, pSrcCaches ); } - VkResult vkMergeValidationCachesEXT( VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT* pSrcCaches ) const VULKAN_HPP_NOEXCEPT + VkResult vkMergeValidationCachesEXT( VkDevice device, + VkValidationCacheEXT dstCache, + uint32_t srcCacheCount, + const VkValidationCacheEXT * pSrcCaches ) const VULKAN_HPP_NOEXCEPT { return ::vkMergeValidationCachesEXT( device, dstCache, srcCacheCount, pSrcCaches ); } - VkResult vkRegisterDeviceEventEXT( VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence ) const VULKAN_HPP_NOEXCEPT + VkResult vkRegisterDeviceEventEXT( VkDevice device, + const VkDeviceEventInfoEXT * pDeviceEventInfo, + const VkAllocationCallbacks * pAllocator, + VkFence * pFence ) const VULKAN_HPP_NOEXCEPT { return ::vkRegisterDeviceEventEXT( device, pDeviceEventInfo, pAllocator, pFence ); } - VkResult vkRegisterDisplayEventEXT( VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence ) const VULKAN_HPP_NOEXCEPT + VkResult vkRegisterDisplayEventEXT( VkDevice device, + VkDisplayKHR display, + const VkDisplayEventInfoEXT * pDisplayEventInfo, + const VkAllocationCallbacks * pAllocator, + VkFence * pFence ) const VULKAN_HPP_NOEXCEPT { return ::vkRegisterDisplayEventEXT( device, display, pDisplayEventInfo, pAllocator, pFence ); } -#ifdef VK_USE_PLATFORM_WIN32_KHR +# ifdef VK_USE_PLATFORM_WIN32_KHR VkResult vkReleaseFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT { return ::vkReleaseFullScreenExclusiveModeEXT( device, swapchain ); } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - VkResult vkReleasePerformanceConfigurationINTEL( VkDevice device, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT + VkResult + vkReleasePerformanceConfigurationINTEL( VkDevice device, + VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT { return ::vkReleasePerformanceConfigurationINTEL( device, configuration ); } @@ -2427,12 +3372,16 @@ namespace VULKAN_HPP_NAMESPACE return ::vkReleaseProfilingLockKHR( device ); } - VkResult vkResetCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT + VkResult vkResetCommandPool( VkDevice device, + VkCommandPool commandPool, + VkCommandPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT { return ::vkResetCommandPool( device, commandPool, flags ); } - VkResult vkResetDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT + VkResult vkResetDescriptorPool( VkDevice device, + VkDescriptorPool descriptorPool, + VkDescriptorPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT { return ::vkResetDescriptorPool( device, descriptorPool, flags ); } @@ -2442,27 +3391,35 @@ namespace VULKAN_HPP_NAMESPACE return ::vkResetEvent( device, event ); } - VkResult vkResetFences( VkDevice device, uint32_t fenceCount, const VkFence* pFences ) const VULKAN_HPP_NOEXCEPT + VkResult vkResetFences( VkDevice device, uint32_t fenceCount, const VkFence * pFences ) const VULKAN_HPP_NOEXCEPT { return ::vkResetFences( device, fenceCount, pFences ); } - void vkResetQueryPool( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT + void vkResetQueryPool( VkDevice device, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT { return ::vkResetQueryPool( device, queryPool, firstQuery, queryCount ); } - void vkResetQueryPoolEXT( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT + void vkResetQueryPoolEXT( VkDevice device, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT { return ::vkResetQueryPoolEXT( device, queryPool, firstQuery, queryCount ); } - VkResult vkSetDebugUtilsObjectNameEXT( VkDevice device, const VkDebugUtilsObjectNameInfoEXT* pNameInfo ) const VULKAN_HPP_NOEXCEPT + VkResult vkSetDebugUtilsObjectNameEXT( VkDevice device, + const VkDebugUtilsObjectNameInfoEXT * pNameInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkSetDebugUtilsObjectNameEXT( device, pNameInfo ); } - VkResult vkSetDebugUtilsObjectTagEXT( VkDevice device, const VkDebugUtilsObjectTagInfoEXT* pTagInfo ) const VULKAN_HPP_NOEXCEPT + VkResult vkSetDebugUtilsObjectTagEXT( VkDevice device, + const VkDebugUtilsObjectTagInfoEXT * pTagInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkSetDebugUtilsObjectTagEXT( device, pTagInfo ); } @@ -2472,32 +3429,42 @@ namespace VULKAN_HPP_NAMESPACE return ::vkSetEvent( device, event ); } - void vkSetHdrMetadataEXT( VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata ) const VULKAN_HPP_NOEXCEPT + void vkSetHdrMetadataEXT( VkDevice device, + uint32_t swapchainCount, + const VkSwapchainKHR * pSwapchains, + const VkHdrMetadataEXT * pMetadata ) const VULKAN_HPP_NOEXCEPT { return ::vkSetHdrMetadataEXT( device, swapchainCount, pSwapchains, pMetadata ); } - void vkSetLocalDimmingAMD( VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT + void vkSetLocalDimmingAMD( VkDevice device, + VkSwapchainKHR swapChain, + VkBool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT { return ::vkSetLocalDimmingAMD( device, swapChain, localDimmingEnable ); } - VkResult vkSignalSemaphore( VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo ) const VULKAN_HPP_NOEXCEPT + VkResult vkSignalSemaphore( VkDevice device, const VkSemaphoreSignalInfo * pSignalInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkSignalSemaphore( device, pSignalInfo ); } - VkResult vkSignalSemaphoreKHR( VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo ) const VULKAN_HPP_NOEXCEPT + VkResult vkSignalSemaphoreKHR( VkDevice device, + const VkSemaphoreSignalInfo * pSignalInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkSignalSemaphoreKHR( device, pSignalInfo ); } - void vkTrimCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT + void vkTrimCommandPool( VkDevice device, + VkCommandPool commandPool, + VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT { return ::vkTrimCommandPool( device, commandPool, flags ); } - void vkTrimCommandPoolKHR( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT + void vkTrimCommandPoolKHR( VkDevice device, + VkCommandPool commandPool, + VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT { return ::vkTrimCommandPoolKHR( device, commandPool, flags ); } @@ -2512,535 +3479,837 @@ namespace VULKAN_HPP_NAMESPACE return ::vkUnmapMemory( device, memory ); } - void vkUpdateDescriptorSetWithTemplate( VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData ) const VULKAN_HPP_NOEXCEPT + void vkUpdateDescriptorSetWithTemplate( VkDevice device, + VkDescriptorSet descriptorSet, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData ) const VULKAN_HPP_NOEXCEPT { return ::vkUpdateDescriptorSetWithTemplate( device, descriptorSet, descriptorUpdateTemplate, pData ); } - void vkUpdateDescriptorSetWithTemplateKHR( VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData ) const VULKAN_HPP_NOEXCEPT + void vkUpdateDescriptorSetWithTemplateKHR( VkDevice device, + VkDescriptorSet descriptorSet, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData ) const VULKAN_HPP_NOEXCEPT { return ::vkUpdateDescriptorSetWithTemplateKHR( device, descriptorSet, descriptorUpdateTemplate, pData ); } - void vkUpdateDescriptorSets( VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies ) const VULKAN_HPP_NOEXCEPT + void vkUpdateDescriptorSets( VkDevice device, + uint32_t descriptorWriteCount, + const VkWriteDescriptorSet * pDescriptorWrites, + uint32_t descriptorCopyCount, + const VkCopyDescriptorSet * pDescriptorCopies ) const VULKAN_HPP_NOEXCEPT { - return ::vkUpdateDescriptorSets( device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies ); + return ::vkUpdateDescriptorSets( + device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies ); } - VkResult vkWaitForFences( VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT + VkResult vkWaitForFences( VkDevice device, + uint32_t fenceCount, + const VkFence * pFences, + VkBool32 waitAll, + uint64_t timeout ) const VULKAN_HPP_NOEXCEPT { return ::vkWaitForFences( device, fenceCount, pFences, waitAll, timeout ); } - VkResult vkWaitSemaphores( VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT + VkResult vkWaitSemaphores( VkDevice device, + const VkSemaphoreWaitInfo * pWaitInfo, + uint64_t timeout ) const VULKAN_HPP_NOEXCEPT { return ::vkWaitSemaphores( device, pWaitInfo, timeout ); } - VkResult vkWaitSemaphoresKHR( VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT + VkResult vkWaitSemaphoresKHR( VkDevice device, + const VkSemaphoreWaitInfo * pWaitInfo, + uint64_t timeout ) const VULKAN_HPP_NOEXCEPT { return ::vkWaitSemaphoresKHR( device, pWaitInfo, timeout ); } -#ifdef VK_ENABLE_BETA_EXTENSIONS - VkResult vkWriteAccelerationStructuresPropertiesKHR( VkDevice device, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, size_t dataSize, void* pData, size_t stride ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_ENABLE_BETA_EXTENSIONS + VkResult vkWriteAccelerationStructuresPropertiesKHR( VkDevice device, + uint32_t accelerationStructureCount, + const VkAccelerationStructureKHR * pAccelerationStructures, + VkQueryType queryType, + size_t dataSize, + void * pData, + size_t stride ) const VULKAN_HPP_NOEXCEPT { - return ::vkWriteAccelerationStructuresPropertiesKHR( device, accelerationStructureCount, pAccelerationStructures, queryType, dataSize, pData, stride ); + return ::vkWriteAccelerationStructuresPropertiesKHR( + device, accelerationStructureCount, pAccelerationStructures, queryType, dataSize, pData, stride ); } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_USE_PLATFORM_ANDROID_KHR - VkResult vkCreateAndroidSurfaceKHR( VkInstance instance, const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_USE_PLATFORM_ANDROID_KHR + VkResult vkCreateAndroidSurfaceKHR( VkInstance instance, + const VkAndroidSurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateAndroidSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); } -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - VkResult vkCreateDebugReportCallbackEXT( VkInstance instance, const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugReportCallbackEXT* pCallback ) const VULKAN_HPP_NOEXCEPT + VkResult vkCreateDebugReportCallbackEXT( VkInstance instance, + const VkDebugReportCallbackCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDebugReportCallbackEXT * pCallback ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateDebugReportCallbackEXT( instance, pCreateInfo, pAllocator, pCallback ); } - VkResult vkCreateDebugUtilsMessengerEXT( VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugUtilsMessengerEXT* pMessenger ) const VULKAN_HPP_NOEXCEPT + VkResult vkCreateDebugUtilsMessengerEXT( VkInstance instance, + const VkDebugUtilsMessengerCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDebugUtilsMessengerEXT * pMessenger ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateDebugUtilsMessengerEXT( instance, pCreateInfo, pAllocator, pMessenger ); } - VkResult vkCreateDisplayPlaneSurfaceKHR( VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT + VkResult vkCreateDisplayPlaneSurfaceKHR( VkInstance instance, + const VkDisplaySurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateDisplayPlaneSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); } - VkResult vkCreateHeadlessSurfaceEXT( VkInstance instance, const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT + VkResult vkCreateHeadlessSurfaceEXT( VkInstance instance, + const VkHeadlessSurfaceCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateHeadlessSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); } -#ifdef VK_USE_PLATFORM_IOS_MVK - VkResult vkCreateIOSSurfaceMVK( VkInstance instance, const VkIOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_USE_PLATFORM_IOS_MVK + VkResult vkCreateIOSSurfaceMVK( VkInstance instance, + const VkIOSSurfaceCreateInfoMVK * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateIOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface ); } -#endif /*VK_USE_PLATFORM_IOS_MVK*/ +# endif /*VK_USE_PLATFORM_IOS_MVK*/ -#ifdef VK_USE_PLATFORM_FUCHSIA - VkResult vkCreateImagePipeSurfaceFUCHSIA( VkInstance instance, const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_USE_PLATFORM_FUCHSIA + VkResult vkCreateImagePipeSurfaceFUCHSIA( VkInstance instance, + const VkImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateImagePipeSurfaceFUCHSIA( instance, pCreateInfo, pAllocator, pSurface ); } -#endif /*VK_USE_PLATFORM_FUCHSIA*/ +# endif /*VK_USE_PLATFORM_FUCHSIA*/ -#ifdef VK_USE_PLATFORM_MACOS_MVK - VkResult vkCreateMacOSSurfaceMVK( VkInstance instance, const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_USE_PLATFORM_MACOS_MVK + VkResult vkCreateMacOSSurfaceMVK( VkInstance instance, + const VkMacOSSurfaceCreateInfoMVK * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateMacOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface ); } -#endif /*VK_USE_PLATFORM_MACOS_MVK*/ +# endif /*VK_USE_PLATFORM_MACOS_MVK*/ -#ifdef VK_USE_PLATFORM_METAL_EXT - VkResult vkCreateMetalSurfaceEXT( VkInstance instance, const VkMetalSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_USE_PLATFORM_METAL_EXT + VkResult vkCreateMetalSurfaceEXT( VkInstance instance, + const VkMetalSurfaceCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateMetalSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); } -#endif /*VK_USE_PLATFORM_METAL_EXT*/ +# endif /*VK_USE_PLATFORM_METAL_EXT*/ -#ifdef VK_USE_PLATFORM_GGP - VkResult vkCreateStreamDescriptorSurfaceGGP( VkInstance instance, const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_USE_PLATFORM_GGP + VkResult vkCreateStreamDescriptorSurfaceGGP( VkInstance instance, + const VkStreamDescriptorSurfaceCreateInfoGGP * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateStreamDescriptorSurfaceGGP( instance, pCreateInfo, pAllocator, pSurface ); } -#endif /*VK_USE_PLATFORM_GGP*/ +# endif /*VK_USE_PLATFORM_GGP*/ -#ifdef VK_USE_PLATFORM_VI_NN - VkResult vkCreateViSurfaceNN( VkInstance instance, const VkViSurfaceCreateInfoNN* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_USE_PLATFORM_VI_NN + VkResult vkCreateViSurfaceNN( VkInstance instance, + const VkViSurfaceCreateInfoNN * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateViSurfaceNN( instance, pCreateInfo, pAllocator, pSurface ); } -#endif /*VK_USE_PLATFORM_VI_NN*/ +# endif /*VK_USE_PLATFORM_VI_NN*/ -#ifdef VK_USE_PLATFORM_WAYLAND_KHR - VkResult vkCreateWaylandSurfaceKHR( VkInstance instance, const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_USE_PLATFORM_WAYLAND_KHR + VkResult vkCreateWaylandSurfaceKHR( VkInstance instance, + const VkWaylandSurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateWaylandSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); } -#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - VkResult vkCreateWin32SurfaceKHR( VkInstance instance, const VkWin32SurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_USE_PLATFORM_WIN32_KHR + VkResult vkCreateWin32SurfaceKHR( VkInstance instance, + const VkWin32SurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateWin32SurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_XCB_KHR - VkResult vkCreateXcbSurfaceKHR( VkInstance instance, const VkXcbSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_USE_PLATFORM_XCB_KHR + VkResult vkCreateXcbSurfaceKHR( VkInstance instance, + const VkXcbSurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateXcbSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); } -#endif /*VK_USE_PLATFORM_XCB_KHR*/ +# endif /*VK_USE_PLATFORM_XCB_KHR*/ -#ifdef VK_USE_PLATFORM_XLIB_KHR - VkResult vkCreateXlibSurfaceKHR( VkInstance instance, const VkXlibSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_USE_PLATFORM_XLIB_KHR + VkResult vkCreateXlibSurfaceKHR( VkInstance instance, + const VkXlibSurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateXlibSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); } -#endif /*VK_USE_PLATFORM_XLIB_KHR*/ +# endif /*VK_USE_PLATFORM_XLIB_KHR*/ - void vkDebugReportMessageEXT( VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage ) const VULKAN_HPP_NOEXCEPT + void vkDebugReportMessageEXT( VkInstance instance, + VkDebugReportFlagsEXT flags, + VkDebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const char * pLayerPrefix, + const char * pMessage ) const VULKAN_HPP_NOEXCEPT { - return ::vkDebugReportMessageEXT( instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage ); + return ::vkDebugReportMessageEXT( + instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage ); } - void vkDestroyDebugReportCallbackEXT( VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + void vkDestroyDebugReportCallbackEXT( VkInstance instance, + VkDebugReportCallbackEXT callback, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyDebugReportCallbackEXT( instance, callback, pAllocator ); } - void vkDestroyDebugUtilsMessengerEXT( VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + void vkDestroyDebugUtilsMessengerEXT( VkInstance instance, + VkDebugUtilsMessengerEXT messenger, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyDebugUtilsMessengerEXT( instance, messenger, pAllocator ); } - void vkDestroyInstance( VkInstance instance, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + void vkDestroyInstance( VkInstance instance, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyInstance( instance, pAllocator ); } - void vkDestroySurfaceKHR( VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + void vkDestroySurfaceKHR( VkInstance instance, + VkSurfaceKHR surface, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroySurfaceKHR( instance, surface, pAllocator ); } - VkResult vkEnumeratePhysicalDeviceGroups( VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT + VkResult vkEnumeratePhysicalDeviceGroups( VkInstance instance, + uint32_t * pPhysicalDeviceGroupCount, + VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties ) const + VULKAN_HPP_NOEXCEPT { return ::vkEnumeratePhysicalDeviceGroups( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties ); } - VkResult vkEnumeratePhysicalDeviceGroupsKHR( VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT + VkResult vkEnumeratePhysicalDeviceGroupsKHR( + VkInstance instance, + uint32_t * pPhysicalDeviceGroupCount, + VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT { - return ::vkEnumeratePhysicalDeviceGroupsKHR( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties ); + return ::vkEnumeratePhysicalDeviceGroupsKHR( + instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties ); } - VkResult vkEnumeratePhysicalDevices( VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices ) const VULKAN_HPP_NOEXCEPT + VkResult vkEnumeratePhysicalDevices( VkInstance instance, + uint32_t * pPhysicalDeviceCount, + VkPhysicalDevice * pPhysicalDevices ) const VULKAN_HPP_NOEXCEPT { return ::vkEnumeratePhysicalDevices( instance, pPhysicalDeviceCount, pPhysicalDevices ); } - PFN_vkVoidFunction vkGetInstanceProcAddr( VkInstance instance, const char* pName ) const VULKAN_HPP_NOEXCEPT + PFN_vkVoidFunction vkGetInstanceProcAddr( VkInstance instance, const char * pName ) const VULKAN_HPP_NOEXCEPT { return ::vkGetInstanceProcAddr( instance, pName ); } - void vkSubmitDebugUtilsMessageEXT( VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData ) const VULKAN_HPP_NOEXCEPT + void vkSubmitDebugUtilsMessageEXT( VkInstance instance, + VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VkDebugUtilsMessageTypeFlagsEXT messageTypes, + const VkDebugUtilsMessengerCallbackDataEXT * pCallbackData ) const + VULKAN_HPP_NOEXCEPT { return ::vkSubmitDebugUtilsMessageEXT( instance, messageSeverity, messageTypes, pCallbackData ); } -#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT - VkResult vkAcquireXlibDisplayEXT( VkPhysicalDevice physicalDevice, Display* dpy, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT + VkResult vkAcquireXlibDisplayEXT( VkPhysicalDevice physicalDevice, + Display * dpy, + VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT { return ::vkAcquireXlibDisplayEXT( physicalDevice, dpy, display ); } -#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ +# endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ - VkResult vkCreateDevice( VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice ) const VULKAN_HPP_NOEXCEPT + VkResult vkCreateDevice( VkPhysicalDevice physicalDevice, + const VkDeviceCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDevice * pDevice ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateDevice( physicalDevice, pCreateInfo, pAllocator, pDevice ); } - VkResult vkCreateDisplayModeKHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode ) const VULKAN_HPP_NOEXCEPT + VkResult vkCreateDisplayModeKHR( VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + const VkDisplayModeCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDisplayModeKHR * pMode ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateDisplayModeKHR( physicalDevice, display, pCreateInfo, pAllocator, pMode ); } - VkResult vkEnumerateDeviceExtensionProperties( VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties ) const VULKAN_HPP_NOEXCEPT + VkResult vkEnumerateDeviceExtensionProperties( VkPhysicalDevice physicalDevice, + const char * pLayerName, + uint32_t * pPropertyCount, + VkExtensionProperties * pProperties ) const VULKAN_HPP_NOEXCEPT { return ::vkEnumerateDeviceExtensionProperties( physicalDevice, pLayerName, pPropertyCount, pProperties ); } - VkResult vkEnumerateDeviceLayerProperties( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkLayerProperties* pProperties ) const VULKAN_HPP_NOEXCEPT + VkResult vkEnumerateDeviceLayerProperties( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkLayerProperties * pProperties ) const VULKAN_HPP_NOEXCEPT { return ::vkEnumerateDeviceLayerProperties( physicalDevice, pPropertyCount, pProperties ); } - VkResult vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t* pCounterCount, VkPerformanceCounterKHR* pCounters, VkPerformanceCounterDescriptionKHR* pCounterDescriptions ) const VULKAN_HPP_NOEXCEPT + VkResult vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + uint32_t * pCounterCount, + VkPerformanceCounterKHR * pCounters, + VkPerformanceCounterDescriptionKHR * pCounterDescriptions ) const VULKAN_HPP_NOEXCEPT { - return ::vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( physicalDevice, queueFamilyIndex, pCounterCount, pCounters, pCounterDescriptions ); + return ::vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + physicalDevice, queueFamilyIndex, pCounterCount, pCounters, pCounterDescriptions ); } - VkResult vkGetDisplayModeProperties2KHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModeProperties2KHR* pProperties ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetDisplayModeProperties2KHR( VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + uint32_t * pPropertyCount, + VkDisplayModeProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT { return ::vkGetDisplayModeProperties2KHR( physicalDevice, display, pPropertyCount, pProperties ); } - VkResult vkGetDisplayModePropertiesKHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModePropertiesKHR* pProperties ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetDisplayModePropertiesKHR( VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + uint32_t * pPropertyCount, + VkDisplayModePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT { return ::vkGetDisplayModePropertiesKHR( physicalDevice, display, pPropertyCount, pProperties ); } - VkResult vkGetDisplayPlaneCapabilities2KHR( VkPhysicalDevice physicalDevice, const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, VkDisplayPlaneCapabilities2KHR* pCapabilities ) const VULKAN_HPP_NOEXCEPT + VkResult + vkGetDisplayPlaneCapabilities2KHR( VkPhysicalDevice physicalDevice, + const VkDisplayPlaneInfo2KHR * pDisplayPlaneInfo, + VkDisplayPlaneCapabilities2KHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT { return ::vkGetDisplayPlaneCapabilities2KHR( physicalDevice, pDisplayPlaneInfo, pCapabilities ); } - VkResult vkGetDisplayPlaneCapabilitiesKHR( VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR* pCapabilities ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetDisplayPlaneCapabilitiesKHR( VkPhysicalDevice physicalDevice, + VkDisplayModeKHR mode, + uint32_t planeIndex, + VkDisplayPlaneCapabilitiesKHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT { return ::vkGetDisplayPlaneCapabilitiesKHR( physicalDevice, mode, planeIndex, pCapabilities ); } - VkResult vkGetDisplayPlaneSupportedDisplaysKHR( VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, VkDisplayKHR* pDisplays ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetDisplayPlaneSupportedDisplaysKHR( VkPhysicalDevice physicalDevice, + uint32_t planeIndex, + uint32_t * pDisplayCount, + VkDisplayKHR * pDisplays ) const VULKAN_HPP_NOEXCEPT { return ::vkGetDisplayPlaneSupportedDisplaysKHR( physicalDevice, planeIndex, pDisplayCount, pDisplays ); } - VkResult vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, VkTimeDomainEXT* pTimeDomains ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( VkPhysicalDevice physicalDevice, + uint32_t * pTimeDomainCount, + VkTimeDomainEXT * pTimeDomains ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( physicalDevice, pTimeDomainCount, pTimeDomains ); } - VkResult vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkCooperativeMatrixPropertiesNV* pProperties ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkCooperativeMatrixPropertiesNV * pProperties ) const + VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( physicalDevice, pPropertyCount, pProperties ); } - VkResult vkGetPhysicalDeviceDisplayPlaneProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlaneProperties2KHR* pProperties ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetPhysicalDeviceDisplayPlaneProperties2KHR( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkDisplayPlaneProperties2KHR * pProperties ) const + VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceDisplayPlaneProperties2KHR( physicalDevice, pPropertyCount, pProperties ); } - VkResult vkGetPhysicalDeviceDisplayPlanePropertiesKHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlanePropertiesKHR* pProperties ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetPhysicalDeviceDisplayPlanePropertiesKHR( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkDisplayPlanePropertiesKHR * pProperties ) const + VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceDisplayPlanePropertiesKHR( physicalDevice, pPropertyCount, pProperties ); } - VkResult vkGetPhysicalDeviceDisplayProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayProperties2KHR* pProperties ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetPhysicalDeviceDisplayProperties2KHR( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkDisplayProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceDisplayProperties2KHR( physicalDevice, pPropertyCount, pProperties ); } - VkResult vkGetPhysicalDeviceDisplayPropertiesKHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPropertiesKHR* pProperties ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetPhysicalDeviceDisplayPropertiesKHR( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkDisplayPropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceDisplayPropertiesKHR( physicalDevice, pPropertyCount, pProperties ); } - void vkGetPhysicalDeviceExternalBufferProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties ) const VULKAN_HPP_NOEXCEPT + void vkGetPhysicalDeviceExternalBufferProperties( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + VkExternalBufferProperties * pExternalBufferProperties ) const + VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceExternalBufferProperties( physicalDevice, pExternalBufferInfo, pExternalBufferProperties ); + return ::vkGetPhysicalDeviceExternalBufferProperties( + physicalDevice, pExternalBufferInfo, pExternalBufferProperties ); } - void vkGetPhysicalDeviceExternalBufferPropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties ) const VULKAN_HPP_NOEXCEPT + void vkGetPhysicalDeviceExternalBufferPropertiesKHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + VkExternalBufferProperties * pExternalBufferProperties ) const + VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceExternalBufferPropertiesKHR( physicalDevice, pExternalBufferInfo, pExternalBufferProperties ); + return ::vkGetPhysicalDeviceExternalBufferPropertiesKHR( + physicalDevice, pExternalBufferInfo, pExternalBufferProperties ); } - void vkGetPhysicalDeviceExternalFenceProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties ) const VULKAN_HPP_NOEXCEPT + void vkGetPhysicalDeviceExternalFenceProperties( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + VkExternalFenceProperties * pExternalFenceProperties ) const + VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceExternalFenceProperties( physicalDevice, pExternalFenceInfo, pExternalFenceProperties ); + return ::vkGetPhysicalDeviceExternalFenceProperties( + physicalDevice, pExternalFenceInfo, pExternalFenceProperties ); } - void vkGetPhysicalDeviceExternalFencePropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties ) const VULKAN_HPP_NOEXCEPT + void vkGetPhysicalDeviceExternalFencePropertiesKHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + VkExternalFenceProperties * pExternalFenceProperties ) const + VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceExternalFencePropertiesKHR( physicalDevice, pExternalFenceInfo, pExternalFenceProperties ); + return ::vkGetPhysicalDeviceExternalFencePropertiesKHR( + physicalDevice, pExternalFenceInfo, pExternalFenceProperties ); } - VkResult vkGetPhysicalDeviceExternalImageFormatPropertiesNV( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType, VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetPhysicalDeviceExternalImageFormatPropertiesNV( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkImageTiling tiling, + VkImageUsageFlags usage, + VkImageCreateFlags flags, + VkExternalMemoryHandleTypeFlagsNV externalHandleType, + VkExternalImageFormatPropertiesNV * pExternalImageFormatProperties ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceExternalImageFormatPropertiesNV( physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties ); + return ::vkGetPhysicalDeviceExternalImageFormatPropertiesNV( + physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties ); } - void vkGetPhysicalDeviceExternalSemaphoreProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT + void vkGetPhysicalDeviceExternalSemaphoreProperties( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + VkExternalSemaphoreProperties * pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceExternalSemaphoreProperties( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties ); + return ::vkGetPhysicalDeviceExternalSemaphoreProperties( + physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties ); } - void vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT + void vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + VkExternalSemaphoreProperties * pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties ); + return ::vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( + physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties ); } - void vkGetPhysicalDeviceFeatures( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures ) const VULKAN_HPP_NOEXCEPT + void vkGetPhysicalDeviceFeatures( VkPhysicalDevice physicalDevice, + VkPhysicalDeviceFeatures * pFeatures ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceFeatures( physicalDevice, pFeatures ); } - void vkGetPhysicalDeviceFeatures2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures ) const VULKAN_HPP_NOEXCEPT + void vkGetPhysicalDeviceFeatures2( VkPhysicalDevice physicalDevice, + VkPhysicalDeviceFeatures2 * pFeatures ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceFeatures2( physicalDevice, pFeatures ); } - void vkGetPhysicalDeviceFeatures2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures ) const VULKAN_HPP_NOEXCEPT + void vkGetPhysicalDeviceFeatures2KHR( VkPhysicalDevice physicalDevice, + VkPhysicalDeviceFeatures2 * pFeatures ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceFeatures2KHR( physicalDevice, pFeatures ); } - void vkGetPhysicalDeviceFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties ) const VULKAN_HPP_NOEXCEPT + void vkGetPhysicalDeviceFormatProperties( VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties * pFormatProperties ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceFormatProperties( physicalDevice, format, pFormatProperties ); } - void vkGetPhysicalDeviceFormatProperties2( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties ) const VULKAN_HPP_NOEXCEPT + void vkGetPhysicalDeviceFormatProperties2( VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties2 * pFormatProperties ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceFormatProperties2( physicalDevice, format, pFormatProperties ); } - void vkGetPhysicalDeviceFormatProperties2KHR( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties ) const VULKAN_HPP_NOEXCEPT + void vkGetPhysicalDeviceFormatProperties2KHR( VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties2 * pFormatProperties ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceFormatProperties2KHR( physicalDevice, format, pFormatProperties ); } - VkResult vkGetPhysicalDeviceImageFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetPhysicalDeviceImageFormatProperties( VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkImageTiling tiling, + VkImageUsageFlags usage, + VkImageCreateFlags flags, + VkImageFormatProperties * pImageFormatProperties ) const + VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceImageFormatProperties( physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties ); + return ::vkGetPhysicalDeviceImageFormatProperties( + physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties ); } - VkResult vkGetPhysicalDeviceImageFormatProperties2( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetPhysicalDeviceImageFormatProperties2( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + VkImageFormatProperties2 * pImageFormatProperties ) const + VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceImageFormatProperties2( physicalDevice, pImageFormatInfo, pImageFormatProperties ); } - VkResult vkGetPhysicalDeviceImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetPhysicalDeviceImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + VkImageFormatProperties2 * pImageFormatProperties ) const + VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceImageFormatProperties2KHR( physicalDevice, pImageFormatInfo, pImageFormatProperties ); } - void vkGetPhysicalDeviceMemoryProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties ) const VULKAN_HPP_NOEXCEPT + void vkGetPhysicalDeviceMemoryProperties( + VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceMemoryProperties( physicalDevice, pMemoryProperties ); } - void vkGetPhysicalDeviceMemoryProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties ) const VULKAN_HPP_NOEXCEPT + void vkGetPhysicalDeviceMemoryProperties2( + VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2 * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceMemoryProperties2( physicalDevice, pMemoryProperties ); } - void vkGetPhysicalDeviceMemoryProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties ) const VULKAN_HPP_NOEXCEPT + void vkGetPhysicalDeviceMemoryProperties2KHR( + VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2 * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceMemoryProperties2KHR( physicalDevice, pMemoryProperties ); } - void vkGetPhysicalDeviceMultisamplePropertiesEXT( VkPhysicalDevice physicalDevice, VkSampleCountFlagBits samples, VkMultisamplePropertiesEXT* pMultisampleProperties ) const VULKAN_HPP_NOEXCEPT + void vkGetPhysicalDeviceMultisamplePropertiesEXT( VkPhysicalDevice physicalDevice, + VkSampleCountFlagBits samples, + VkMultisamplePropertiesEXT * pMultisampleProperties ) const + VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceMultisamplePropertiesEXT( physicalDevice, samples, pMultisampleProperties ); } - VkResult vkGetPhysicalDevicePresentRectanglesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pRectCount, VkRect2D* pRects ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetPhysicalDevicePresentRectanglesKHR( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t * pRectCount, + VkRect2D * pRects ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDevicePresentRectanglesKHR( physicalDevice, surface, pRectCount, pRects ); } - void vkGetPhysicalDeviceProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties ) const VULKAN_HPP_NOEXCEPT + void vkGetPhysicalDeviceProperties( VkPhysicalDevice physicalDevice, + VkPhysicalDeviceProperties * pProperties ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceProperties( physicalDevice, pProperties ); } - void vkGetPhysicalDeviceProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties ) const VULKAN_HPP_NOEXCEPT + void vkGetPhysicalDeviceProperties2( VkPhysicalDevice physicalDevice, + VkPhysicalDeviceProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceProperties2( physicalDevice, pProperties ); } - void vkGetPhysicalDeviceProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties ) const VULKAN_HPP_NOEXCEPT + void vkGetPhysicalDeviceProperties2KHR( VkPhysicalDevice physicalDevice, + VkPhysicalDeviceProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceProperties2KHR( physicalDevice, pProperties ); } - void vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( VkPhysicalDevice physicalDevice, const VkQueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, uint32_t* pNumPasses ) const VULKAN_HPP_NOEXCEPT + void vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + VkPhysicalDevice physicalDevice, + const VkQueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo, + uint32_t * pNumPasses ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( physicalDevice, pPerformanceQueryCreateInfo, pNumPasses ); + return ::vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + physicalDevice, pPerformanceQueryCreateInfo, pNumPasses ); } - void vkGetPhysicalDeviceQueueFamilyProperties( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT + void vkGetPhysicalDeviceQueueFamilyProperties( VkPhysicalDevice physicalDevice, + uint32_t * pQueueFamilyPropertyCount, + VkQueueFamilyProperties * pQueueFamilyProperties ) const + VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceQueueFamilyProperties( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); + return ::vkGetPhysicalDeviceQueueFamilyProperties( + physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); } - void vkGetPhysicalDeviceQueueFamilyProperties2( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT + void vkGetPhysicalDeviceQueueFamilyProperties2( VkPhysicalDevice physicalDevice, + uint32_t * pQueueFamilyPropertyCount, + VkQueueFamilyProperties2 * pQueueFamilyProperties ) const + VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceQueueFamilyProperties2( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); + return ::vkGetPhysicalDeviceQueueFamilyProperties2( + physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); } - void vkGetPhysicalDeviceQueueFamilyProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT + void vkGetPhysicalDeviceQueueFamilyProperties2KHR( VkPhysicalDevice physicalDevice, + uint32_t * pQueueFamilyPropertyCount, + VkQueueFamilyProperties2 * pQueueFamilyProperties ) const + VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceQueueFamilyProperties2KHR( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); + return ::vkGetPhysicalDeviceQueueFamilyProperties2KHR( + physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); } - void vkGetPhysicalDeviceSparseImageFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pPropertyCount, VkSparseImageFormatProperties* pProperties ) const VULKAN_HPP_NOEXCEPT + void vkGetPhysicalDeviceSparseImageFormatProperties( VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkSampleCountFlagBits samples, + VkImageUsageFlags usage, + VkImageTiling tiling, + uint32_t * pPropertyCount, + VkSparseImageFormatProperties * pProperties ) const + VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceSparseImageFormatProperties( physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties ); + return ::vkGetPhysicalDeviceSparseImageFormatProperties( + physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties ); } - void vkGetPhysicalDeviceSparseImageFormatProperties2( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties ) const VULKAN_HPP_NOEXCEPT + void vkGetPhysicalDeviceSparseImageFormatProperties2( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + VkSparseImageFormatProperties2 * pProperties ) const + VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceSparseImageFormatProperties2( physicalDevice, pFormatInfo, pPropertyCount, pProperties ); + return ::vkGetPhysicalDeviceSparseImageFormatProperties2( + physicalDevice, pFormatInfo, pPropertyCount, pProperties ); } - void vkGetPhysicalDeviceSparseImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties ) const VULKAN_HPP_NOEXCEPT + void vkGetPhysicalDeviceSparseImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + VkSparseImageFormatProperties2 * pProperties ) const + VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceSparseImageFormatProperties2KHR( physicalDevice, pFormatInfo, pPropertyCount, pProperties ); + return ::vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + physicalDevice, pFormatInfo, pPropertyCount, pProperties ); } - VkResult vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( VkPhysicalDevice physicalDevice, uint32_t* pCombinationCount, VkFramebufferMixedSamplesCombinationNV* pCombinations ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + VkPhysicalDevice physicalDevice, + uint32_t * pCombinationCount, + VkFramebufferMixedSamplesCombinationNV * pCombinations ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( physicalDevice, pCombinationCount, pCombinations ); + return ::vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + physicalDevice, pCombinationCount, pCombinations ); } - VkResult vkGetPhysicalDeviceSurfaceCapabilities2EXT( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT* pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetPhysicalDeviceSurfaceCapabilities2EXT( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + VkSurfaceCapabilities2EXT * pSurfaceCapabilities ) const + VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceSurfaceCapabilities2EXT( physicalDevice, surface, pSurfaceCapabilities ); } - VkResult vkGetPhysicalDeviceSurfaceCapabilities2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkSurfaceCapabilities2KHR* pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetPhysicalDeviceSurfaceCapabilities2KHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VkSurfaceCapabilities2KHR * pSurfaceCapabilities ) const + VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceSurfaceCapabilities2KHR( physicalDevice, pSurfaceInfo, pSurfaceCapabilities ); } - VkResult vkGetPhysicalDeviceSurfaceCapabilitiesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetPhysicalDeviceSurfaceCapabilitiesKHR( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + VkSurfaceCapabilitiesKHR * pSurfaceCapabilities ) const + VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceSurfaceCapabilitiesKHR( physicalDevice, surface, pSurfaceCapabilities ); } - VkResult vkGetPhysicalDeviceSurfaceFormats2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VkSurfaceFormat2KHR* pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetPhysicalDeviceSurfaceFormats2KHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pSurfaceFormatCount, + VkSurfaceFormat2KHR * pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceSurfaceFormats2KHR( physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats ); + return ::vkGetPhysicalDeviceSurfaceFormats2KHR( + physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats ); } - VkResult vkGetPhysicalDeviceSurfaceFormatsKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetPhysicalDeviceSurfaceFormatsKHR( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t * pSurfaceFormatCount, + VkSurfaceFormatKHR * pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceSurfaceFormatsKHR( physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats ); } -#ifdef VK_USE_PLATFORM_WIN32_KHR - VkResult vkGetPhysicalDeviceSurfacePresentModes2EXT( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_USE_PLATFORM_WIN32_KHR + VkResult vkGetPhysicalDeviceSurfacePresentModes2EXT( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pPresentModeCount, + VkPresentModeKHR * pPresentModes ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceSurfacePresentModes2EXT( physicalDevice, pSurfaceInfo, pPresentModeCount, pPresentModes ); + return ::vkGetPhysicalDeviceSurfacePresentModes2EXT( + physicalDevice, pSurfaceInfo, pPresentModeCount, pPresentModes ); } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - VkResult vkGetPhysicalDeviceSurfacePresentModesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetPhysicalDeviceSurfacePresentModesKHR( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t * pPresentModeCount, + VkPresentModeKHR * pPresentModes ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceSurfacePresentModesKHR( physicalDevice, surface, pPresentModeCount, pPresentModes ); } - VkResult vkGetPhysicalDeviceSurfaceSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32* pSupported ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetPhysicalDeviceSurfaceSupportKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + VkSurfaceKHR surface, + VkBool32 * pSupported ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceSurfaceSupportKHR( physicalDevice, queueFamilyIndex, surface, pSupported ); } - VkResult vkGetPhysicalDeviceToolPropertiesEXT( VkPhysicalDevice physicalDevice, uint32_t* pToolCount, VkPhysicalDeviceToolPropertiesEXT* pToolProperties ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetPhysicalDeviceToolPropertiesEXT( VkPhysicalDevice physicalDevice, + uint32_t * pToolCount, + VkPhysicalDeviceToolPropertiesEXT * pToolProperties ) const + VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceToolPropertiesEXT( physicalDevice, pToolCount, pToolProperties ); } -#ifdef VK_USE_PLATFORM_WAYLAND_KHR - VkBool32 vkGetPhysicalDeviceWaylandPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct wl_display* display ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_USE_PLATFORM_WAYLAND_KHR + VkBool32 vkGetPhysicalDeviceWaylandPresentationSupportKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + struct wl_display * display ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceWaylandPresentationSupportKHR( physicalDevice, queueFamilyIndex, display ); } -#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - VkBool32 vkGetPhysicalDeviceWin32PresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_USE_PLATFORM_WIN32_KHR + VkBool32 vkGetPhysicalDeviceWin32PresentationSupportKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceWin32PresentationSupportKHR( physicalDevice, queueFamilyIndex ); } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_XCB_KHR - VkBool32 vkGetPhysicalDeviceXcbPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_USE_PLATFORM_XCB_KHR + VkBool32 vkGetPhysicalDeviceXcbPresentationSupportKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + xcb_connection_t * connection, + xcb_visualid_t visual_id ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceXcbPresentationSupportKHR( physicalDevice, queueFamilyIndex, connection, visual_id ); } -#endif /*VK_USE_PLATFORM_XCB_KHR*/ +# endif /*VK_USE_PLATFORM_XCB_KHR*/ -#ifdef VK_USE_PLATFORM_XLIB_KHR - VkBool32 vkGetPhysicalDeviceXlibPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, Display* dpy, VisualID visualID ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_USE_PLATFORM_XLIB_KHR + VkBool32 vkGetPhysicalDeviceXlibPresentationSupportKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + Display * dpy, + VisualID visualID ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceXlibPresentationSupportKHR( physicalDevice, queueFamilyIndex, dpy, visualID ); } -#endif /*VK_USE_PLATFORM_XLIB_KHR*/ +# endif /*VK_USE_PLATFORM_XLIB_KHR*/ -#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT - VkResult vkGetRandROutputDisplayEXT( VkPhysicalDevice physicalDevice, Display* dpy, RROutput rrOutput, VkDisplayKHR* pDisplay ) const VULKAN_HPP_NOEXCEPT +# ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT + VkResult vkGetRandROutputDisplayEXT( VkPhysicalDevice physicalDevice, + Display * dpy, + RROutput rrOutput, + VkDisplayKHR * pDisplay ) const VULKAN_HPP_NOEXCEPT { return ::vkGetRandROutputDisplayEXT( physicalDevice, dpy, rrOutput, pDisplay ); } -#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ +# endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ VkResult vkReleaseDisplayEXT( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT { return ::vkReleaseDisplayEXT( physicalDevice, display ); } - void vkGetQueueCheckpointDataNV( VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointDataNV* pCheckpointData ) const VULKAN_HPP_NOEXCEPT + void vkGetQueueCheckpointDataNV( VkQueue queue, + uint32_t * pCheckpointDataCount, + VkCheckpointDataNV * pCheckpointData ) const VULKAN_HPP_NOEXCEPT { return ::vkGetQueueCheckpointDataNV( queue, pCheckpointDataCount, pCheckpointData ); } - void vkQueueBeginDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo ) const VULKAN_HPP_NOEXCEPT + void vkQueueBeginDebugUtilsLabelEXT( VkQueue queue, + const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkQueueBeginDebugUtilsLabelEXT( queue, pLabelInfo ); } - VkResult vkQueueBindSparse( VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence ) const VULKAN_HPP_NOEXCEPT + VkResult vkQueueBindSparse( VkQueue queue, + uint32_t bindInfoCount, + const VkBindSparseInfo * pBindInfo, + VkFence fence ) const VULKAN_HPP_NOEXCEPT { return ::vkQueueBindSparse( queue, bindInfoCount, pBindInfo, fence ); } @@ -3050,22 +4319,28 @@ namespace VULKAN_HPP_NAMESPACE return ::vkQueueEndDebugUtilsLabelEXT( queue ); } - void vkQueueInsertDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo ) const VULKAN_HPP_NOEXCEPT + void vkQueueInsertDebugUtilsLabelEXT( VkQueue queue, + const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkQueueInsertDebugUtilsLabelEXT( queue, pLabelInfo ); } - VkResult vkQueuePresentKHR( VkQueue queue, const VkPresentInfoKHR* pPresentInfo ) const VULKAN_HPP_NOEXCEPT + VkResult vkQueuePresentKHR( VkQueue queue, const VkPresentInfoKHR * pPresentInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkQueuePresentKHR( queue, pPresentInfo ); } - VkResult vkQueueSetPerformanceConfigurationINTEL( VkQueue queue, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT + VkResult + vkQueueSetPerformanceConfigurationINTEL( VkQueue queue, + VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT { return ::vkQueueSetPerformanceConfigurationINTEL( queue, configuration ); } - VkResult vkQueueSubmit( VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT + VkResult vkQueueSubmit( VkQueue queue, + uint32_t submitCount, + const VkSubmitInfo * pSubmits, + VkFence fence ) const VULKAN_HPP_NOEXCEPT { return ::vkQueueSubmit( queue, submitCount, pSubmits, fence ); } @@ -3078,31 +4353,35 @@ namespace VULKAN_HPP_NAMESPACE #endif class DispatchLoaderDynamic; -#if !defined(VULKAN_HPP_DISPATCH_LOADER_DYNAMIC) -# if defined(VK_NO_PROTOTYPES) -# define VULKAN_HPP_DISPATCH_LOADER_DYNAMIC 1 -# else -# define VULKAN_HPP_DISPATCH_LOADER_DYNAMIC 0 -# endif +#if !defined( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC ) +# if defined( VK_NO_PROTOTYPES ) +# define VULKAN_HPP_DISPATCH_LOADER_DYNAMIC 1 +# else +# define VULKAN_HPP_DISPATCH_LOADER_DYNAMIC 0 +# endif #endif -#if !defined(VULKAN_HPP_DEFAULT_DISPATCHER) -# if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 -# define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::defaultDispatchLoaderDynamic -# define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE namespace VULKAN_HPP_NAMESPACE { DispatchLoaderDynamic defaultDispatchLoaderDynamic; } +#if !defined( VULKAN_HPP_DEFAULT_DISPATCHER ) +# if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 +# define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::defaultDispatchLoaderDynamic +# define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE \ + namespace VULKAN_HPP_NAMESPACE \ + { \ + DispatchLoaderDynamic defaultDispatchLoaderDynamic; \ + } extern DispatchLoaderDynamic defaultDispatchLoaderDynamic; -# else -# define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic() -# define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE -# endif +# else +# define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic() +# define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE +# endif #endif -#if !defined(VULKAN_HPP_DEFAULT_DISPATCHER_TYPE) -# if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 - #define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::DispatchLoaderDynamic -# else -# define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic -# endif +#if !defined( VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ) +# if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 +# define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::DispatchLoaderDynamic +# else +# define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic +# endif #endif struct AllocationCallbacks; @@ -3110,136 +4389,153 @@ namespace VULKAN_HPP_NAMESPACE template class ObjectDestroy { - public: - ObjectDestroy() - : m_owner() - , m_allocationCallbacks( nullptr ) - , m_dispatch( nullptr ) - {} + public: + ObjectDestroy() : m_owner(), m_allocationCallbacks( nullptr ), m_dispatch( nullptr ) {} - ObjectDestroy( OwnerType owner, Optional allocationCallbacks = nullptr, Dispatch const &dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT - : m_owner( owner ) - , m_allocationCallbacks( allocationCallbacks ) - , m_dispatch( &dispatch ) - {} + ObjectDestroy( OwnerType owner, + Optional allocationCallbacks = nullptr, + Dispatch const & dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT + : m_owner( owner ) + , m_allocationCallbacks( allocationCallbacks ) + , m_dispatch( &dispatch ) + {} - OwnerType getOwner() const VULKAN_HPP_NOEXCEPT { return m_owner; } - Optional getAllocator() const VULKAN_HPP_NOEXCEPT { return m_allocationCallbacks; } + OwnerType getOwner() const VULKAN_HPP_NOEXCEPT + { + return m_owner; + } + Optional getAllocator() const VULKAN_HPP_NOEXCEPT + { + return m_allocationCallbacks; + } - protected: - template - void destroy(T t) VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( m_owner && m_dispatch ); - m_owner.destroy( t, m_allocationCallbacks, *m_dispatch ); - } + protected: + template + void destroy( T t ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_owner && m_dispatch ); + m_owner.destroy( t, m_allocationCallbacks, *m_dispatch ); + } - private: - OwnerType m_owner; - Optional m_allocationCallbacks; - Dispatch const* m_dispatch; + private: + OwnerType m_owner; + Optional m_allocationCallbacks; + Dispatch const * m_dispatch; }; class NoParent; template - class ObjectDestroy + class ObjectDestroy { - public: - ObjectDestroy() - : m_allocationCallbacks( nullptr ) - , m_dispatch( nullptr ) - {} + public: + ObjectDestroy() : m_allocationCallbacks( nullptr ), m_dispatch( nullptr ) {} - ObjectDestroy( Optional allocationCallbacks, Dispatch const &dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT - : m_allocationCallbacks( allocationCallbacks ) - , m_dispatch( &dispatch ) - {} + ObjectDestroy( Optional allocationCallbacks, + Dispatch const & dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT + : m_allocationCallbacks( allocationCallbacks ) + , m_dispatch( &dispatch ) + {} - Optional getAllocator() const VULKAN_HPP_NOEXCEPT { return m_allocationCallbacks; } + Optional getAllocator() const VULKAN_HPP_NOEXCEPT + { + return m_allocationCallbacks; + } - protected: - template - void destroy(T t) VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( m_dispatch ); - t.destroy( m_allocationCallbacks, *m_dispatch ); - } + protected: + template + void destroy( T t ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_dispatch ); + t.destroy( m_allocationCallbacks, *m_dispatch ); + } - private: - Optional m_allocationCallbacks; - Dispatch const* m_dispatch; + private: + Optional m_allocationCallbacks; + Dispatch const * m_dispatch; }; template class ObjectFree { - public: - ObjectFree() - : m_owner() - , m_allocationCallbacks( nullptr ) - , m_dispatch( nullptr ) - {} + public: + ObjectFree() : m_owner(), m_allocationCallbacks( nullptr ), m_dispatch( nullptr ) {} - ObjectFree( OwnerType owner, Optional allocationCallbacks, Dispatch const &dispatch ) VULKAN_HPP_NOEXCEPT - : m_owner( owner ) - , m_allocationCallbacks( allocationCallbacks ) - , m_dispatch( &dispatch ) - {} + ObjectFree( OwnerType owner, + Optional allocationCallbacks, + Dispatch const & dispatch ) VULKAN_HPP_NOEXCEPT + : m_owner( owner ) + , m_allocationCallbacks( allocationCallbacks ) + , m_dispatch( &dispatch ) + {} - OwnerType getOwner() const VULKAN_HPP_NOEXCEPT { return m_owner; } - Optional getAllocator() const VULKAN_HPP_NOEXCEPT { return m_allocationCallbacks; } + OwnerType getOwner() const VULKAN_HPP_NOEXCEPT + { + return m_owner; + } + Optional getAllocator() const VULKAN_HPP_NOEXCEPT + { + return m_allocationCallbacks; + } - protected: - template - void destroy(T t) VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( m_owner && m_dispatch ); - m_owner.free( t, m_allocationCallbacks, *m_dispatch ); - } + protected: + template + void destroy( T t ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_owner && m_dispatch ); + m_owner.free( t, m_allocationCallbacks, *m_dispatch ); + } - private: - OwnerType m_owner; - Optional m_allocationCallbacks; - Dispatch const* m_dispatch; + private: + OwnerType m_owner; + Optional m_allocationCallbacks; + Dispatch const * m_dispatch; }; template class PoolFree { - public: - PoolFree( OwnerType owner = OwnerType(), PoolType pool = PoolType(), Dispatch const &dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT - : m_owner( owner ) - , m_pool( pool ) - , m_dispatch( &dispatch ) - {} + public: + PoolFree( OwnerType owner = OwnerType(), + PoolType pool = PoolType(), + Dispatch const & dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT + : m_owner( owner ) + , m_pool( pool ) + , m_dispatch( &dispatch ) + {} - OwnerType getOwner() const VULKAN_HPP_NOEXCEPT { return m_owner; } - PoolType getPool() const VULKAN_HPP_NOEXCEPT { return m_pool; } + OwnerType getOwner() const VULKAN_HPP_NOEXCEPT + { + return m_owner; + } + PoolType getPool() const VULKAN_HPP_NOEXCEPT + { + return m_pool; + } - protected: - template - void destroy(T t) VULKAN_HPP_NOEXCEPT - { - m_owner.free( m_pool, t, *m_dispatch ); - } + protected: + template + void destroy( T t ) VULKAN_HPP_NOEXCEPT + { + m_owner.free( m_pool, t, *m_dispatch ); + } - private: - OwnerType m_owner; - PoolType m_pool; - Dispatch const* m_dispatch; + private: + OwnerType m_owner; + PoolType m_pool; + Dispatch const * m_dispatch; }; - using Bool32 = uint32_t; + using Bool32 = uint32_t; using DeviceAddress = uint64_t; - using DeviceSize = uint64_t; - using SampleMask = uint32_t; + using DeviceSize = uint64_t; + using SampleMask = uint32_t; #ifdef VK_ENABLE_BETA_EXTENSIONS enum class AccelerationStructureBuildTypeKHR { - eHost = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_KHR, - eDevice = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR, + eHost = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_KHR, + eDevice = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR, eHostOrDevice = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_OR_DEVICE_KHR }; @@ -3247,9 +4543,9 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case AccelerationStructureBuildTypeKHR::eHost : return "Host"; - case AccelerationStructureBuildTypeKHR::eDevice : return "Device"; - case AccelerationStructureBuildTypeKHR::eHostOrDevice : return "HostOrDevice"; + case AccelerationStructureBuildTypeKHR::eHost: return "Host"; + case AccelerationStructureBuildTypeKHR::eDevice: return "Device"; + case AccelerationStructureBuildTypeKHR::eHostOrDevice: return "HostOrDevice"; default: return "invalid"; } } @@ -3257,8 +4553,8 @@ namespace VULKAN_HPP_NAMESPACE enum class AccelerationStructureMemoryRequirementsTypeKHR { - eObject = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_KHR, - eBuildScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_KHR, + eObject = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_KHR, + eBuildScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_KHR, eUpdateScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_KHR }; using AccelerationStructureMemoryRequirementsTypeNV = AccelerationStructureMemoryRequirementsTypeKHR; @@ -3267,16 +4563,16 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case AccelerationStructureMemoryRequirementsTypeKHR::eObject : return "Object"; - case AccelerationStructureMemoryRequirementsTypeKHR::eBuildScratch : return "BuildScratch"; - case AccelerationStructureMemoryRequirementsTypeKHR::eUpdateScratch : return "UpdateScratch"; + case AccelerationStructureMemoryRequirementsTypeKHR::eObject: return "Object"; + case AccelerationStructureMemoryRequirementsTypeKHR::eBuildScratch: return "BuildScratch"; + case AccelerationStructureMemoryRequirementsTypeKHR::eUpdateScratch: return "UpdateScratch"; default: return "invalid"; } } enum class AccelerationStructureTypeKHR { - eTopLevel = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR, + eTopLevel = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR, eBottomLevel = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR }; using AccelerationStructureTypeNV = AccelerationStructureTypeKHR; @@ -3285,84 +4581,85 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case AccelerationStructureTypeKHR::eTopLevel : return "TopLevel"; - case AccelerationStructureTypeKHR::eBottomLevel : return "BottomLevel"; + case AccelerationStructureTypeKHR::eTopLevel: return "TopLevel"; + case AccelerationStructureTypeKHR::eBottomLevel: return "BottomLevel"; default: return "invalid"; } } enum class AccessFlagBits : VkAccessFlags { - eIndirectCommandRead = VK_ACCESS_INDIRECT_COMMAND_READ_BIT, - eIndexRead = VK_ACCESS_INDEX_READ_BIT, - eVertexAttributeRead = VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT, - eUniformRead = VK_ACCESS_UNIFORM_READ_BIT, - eInputAttachmentRead = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT, - eShaderRead = VK_ACCESS_SHADER_READ_BIT, - eShaderWrite = VK_ACCESS_SHADER_WRITE_BIT, - eColorAttachmentRead = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT, - eColorAttachmentWrite = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, - eDepthStencilAttachmentRead = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT, - eDepthStencilAttachmentWrite = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT, - eTransferRead = VK_ACCESS_TRANSFER_READ_BIT, - eTransferWrite = VK_ACCESS_TRANSFER_WRITE_BIT, - eHostRead = VK_ACCESS_HOST_READ_BIT, - eHostWrite = VK_ACCESS_HOST_WRITE_BIT, - eMemoryRead = VK_ACCESS_MEMORY_READ_BIT, - eMemoryWrite = VK_ACCESS_MEMORY_WRITE_BIT, - eTransformFeedbackWriteEXT = VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT, - eTransformFeedbackCounterReadEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT, - eTransformFeedbackCounterWriteEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT, - eConditionalRenderingReadEXT = VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT, + eIndirectCommandRead = VK_ACCESS_INDIRECT_COMMAND_READ_BIT, + eIndexRead = VK_ACCESS_INDEX_READ_BIT, + eVertexAttributeRead = VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT, + eUniformRead = VK_ACCESS_UNIFORM_READ_BIT, + eInputAttachmentRead = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT, + eShaderRead = VK_ACCESS_SHADER_READ_BIT, + eShaderWrite = VK_ACCESS_SHADER_WRITE_BIT, + eColorAttachmentRead = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT, + eColorAttachmentWrite = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, + eDepthStencilAttachmentRead = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT, + eDepthStencilAttachmentWrite = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT, + eTransferRead = VK_ACCESS_TRANSFER_READ_BIT, + eTransferWrite = VK_ACCESS_TRANSFER_WRITE_BIT, + eHostRead = VK_ACCESS_HOST_READ_BIT, + eHostWrite = VK_ACCESS_HOST_WRITE_BIT, + eMemoryRead = VK_ACCESS_MEMORY_READ_BIT, + eMemoryWrite = VK_ACCESS_MEMORY_WRITE_BIT, + eTransformFeedbackWriteEXT = VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT, + eTransformFeedbackCounterReadEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT, + eTransformFeedbackCounterWriteEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT, + eConditionalRenderingReadEXT = VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT, eColorAttachmentReadNoncoherentEXT = VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT, - eAccelerationStructureReadKHR = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR, - eAccelerationStructureWriteKHR = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR, - eShadingRateImageReadNV = VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV, - eFragmentDensityMapReadEXT = VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT, - eCommandPreprocessReadNV = VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV, - eCommandPreprocessWriteNV = VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV, - eAccelerationStructureReadNV = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV, - eAccelerationStructureWriteNV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV + eAccelerationStructureReadKHR = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR, + eAccelerationStructureWriteKHR = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR, + eShadingRateImageReadNV = VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV, + eFragmentDensityMapReadEXT = VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT, + eCommandPreprocessReadNV = VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV, + eCommandPreprocessWriteNV = VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV, + eAccelerationStructureReadNV = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV, + eAccelerationStructureWriteNV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV }; VULKAN_HPP_INLINE std::string to_string( AccessFlagBits value ) { switch ( value ) { - case AccessFlagBits::eIndirectCommandRead : return "IndirectCommandRead"; - case AccessFlagBits::eIndexRead : return "IndexRead"; - case AccessFlagBits::eVertexAttributeRead : return "VertexAttributeRead"; - case AccessFlagBits::eUniformRead : return "UniformRead"; - case AccessFlagBits::eInputAttachmentRead : return "InputAttachmentRead"; - case AccessFlagBits::eShaderRead : return "ShaderRead"; - case AccessFlagBits::eShaderWrite : return "ShaderWrite"; - case AccessFlagBits::eColorAttachmentRead : return "ColorAttachmentRead"; - case AccessFlagBits::eColorAttachmentWrite : return "ColorAttachmentWrite"; - case AccessFlagBits::eDepthStencilAttachmentRead : return "DepthStencilAttachmentRead"; - case AccessFlagBits::eDepthStencilAttachmentWrite : return "DepthStencilAttachmentWrite"; - case AccessFlagBits::eTransferRead : return "TransferRead"; - case AccessFlagBits::eTransferWrite : return "TransferWrite"; - case AccessFlagBits::eHostRead : return "HostRead"; - case AccessFlagBits::eHostWrite : return "HostWrite"; - case AccessFlagBits::eMemoryRead : return "MemoryRead"; - case AccessFlagBits::eMemoryWrite : return "MemoryWrite"; - case AccessFlagBits::eTransformFeedbackWriteEXT : return "TransformFeedbackWriteEXT"; - case AccessFlagBits::eTransformFeedbackCounterReadEXT : return "TransformFeedbackCounterReadEXT"; - case AccessFlagBits::eTransformFeedbackCounterWriteEXT : return "TransformFeedbackCounterWriteEXT"; - case AccessFlagBits::eConditionalRenderingReadEXT : return "ConditionalRenderingReadEXT"; - case AccessFlagBits::eColorAttachmentReadNoncoherentEXT : return "ColorAttachmentReadNoncoherentEXT"; - case AccessFlagBits::eAccelerationStructureReadKHR : return "AccelerationStructureReadKHR"; - case AccessFlagBits::eAccelerationStructureWriteKHR : return "AccelerationStructureWriteKHR"; - case AccessFlagBits::eShadingRateImageReadNV : return "ShadingRateImageReadNV"; - case AccessFlagBits::eFragmentDensityMapReadEXT : return "FragmentDensityMapReadEXT"; - case AccessFlagBits::eCommandPreprocessReadNV : return "CommandPreprocessReadNV"; - case AccessFlagBits::eCommandPreprocessWriteNV : return "CommandPreprocessWriteNV"; + case AccessFlagBits::eIndirectCommandRead: return "IndirectCommandRead"; + case AccessFlagBits::eIndexRead: return "IndexRead"; + case AccessFlagBits::eVertexAttributeRead: return "VertexAttributeRead"; + case AccessFlagBits::eUniformRead: return "UniformRead"; + case AccessFlagBits::eInputAttachmentRead: return "InputAttachmentRead"; + case AccessFlagBits::eShaderRead: return "ShaderRead"; + case AccessFlagBits::eShaderWrite: return "ShaderWrite"; + case AccessFlagBits::eColorAttachmentRead: return "ColorAttachmentRead"; + case AccessFlagBits::eColorAttachmentWrite: return "ColorAttachmentWrite"; + case AccessFlagBits::eDepthStencilAttachmentRead: return "DepthStencilAttachmentRead"; + case AccessFlagBits::eDepthStencilAttachmentWrite: return "DepthStencilAttachmentWrite"; + case AccessFlagBits::eTransferRead: return "TransferRead"; + case AccessFlagBits::eTransferWrite: return "TransferWrite"; + case AccessFlagBits::eHostRead: return "HostRead"; + case AccessFlagBits::eHostWrite: return "HostWrite"; + case AccessFlagBits::eMemoryRead: return "MemoryRead"; + case AccessFlagBits::eMemoryWrite: return "MemoryWrite"; + case AccessFlagBits::eTransformFeedbackWriteEXT: return "TransformFeedbackWriteEXT"; + case AccessFlagBits::eTransformFeedbackCounterReadEXT: return "TransformFeedbackCounterReadEXT"; + case AccessFlagBits::eTransformFeedbackCounterWriteEXT: return "TransformFeedbackCounterWriteEXT"; + case AccessFlagBits::eConditionalRenderingReadEXT: return "ConditionalRenderingReadEXT"; + case AccessFlagBits::eColorAttachmentReadNoncoherentEXT: return "ColorAttachmentReadNoncoherentEXT"; + case AccessFlagBits::eAccelerationStructureReadKHR: return "AccelerationStructureReadKHR"; + case AccessFlagBits::eAccelerationStructureWriteKHR: return "AccelerationStructureWriteKHR"; + case AccessFlagBits::eShadingRateImageReadNV: return "ShadingRateImageReadNV"; + case AccessFlagBits::eFragmentDensityMapReadEXT: return "FragmentDensityMapReadEXT"; + case AccessFlagBits::eCommandPreprocessReadNV: return "CommandPreprocessReadNV"; + case AccessFlagBits::eCommandPreprocessWriteNV: return "CommandPreprocessWriteNV"; default: return "invalid"; } } enum class AcquireProfilingLockFlagBitsKHR : VkAcquireProfilingLockFlagsKHR - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( AcquireProfilingLockFlagBitsKHR ) { @@ -3378,15 +4675,15 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case AttachmentDescriptionFlagBits::eMayAlias : return "MayAlias"; + case AttachmentDescriptionFlagBits::eMayAlias: return "MayAlias"; default: return "invalid"; } } enum class AttachmentLoadOp { - eLoad = VK_ATTACHMENT_LOAD_OP_LOAD, - eClear = VK_ATTACHMENT_LOAD_OP_CLEAR, + eLoad = VK_ATTACHMENT_LOAD_OP_LOAD, + eClear = VK_ATTACHMENT_LOAD_OP_CLEAR, eDontCare = VK_ATTACHMENT_LOAD_OP_DONT_CARE }; @@ -3394,16 +4691,16 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case AttachmentLoadOp::eLoad : return "Load"; - case AttachmentLoadOp::eClear : return "Clear"; - case AttachmentLoadOp::eDontCare : return "DontCare"; + case AttachmentLoadOp::eLoad: return "Load"; + case AttachmentLoadOp::eClear: return "Clear"; + case AttachmentLoadOp::eDontCare: return "DontCare"; default: return "invalid"; } } enum class AttachmentStoreOp { - eStore = VK_ATTACHMENT_STORE_OP_STORE, + eStore = VK_ATTACHMENT_STORE_OP_STORE, eDontCare = VK_ATTACHMENT_STORE_OP_DONT_CARE, eNoneQCOM = VK_ATTACHMENT_STORE_OP_NONE_QCOM }; @@ -3412,173 +4709,173 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case AttachmentStoreOp::eStore : return "Store"; - case AttachmentStoreOp::eDontCare : return "DontCare"; - case AttachmentStoreOp::eNoneQCOM : return "NoneQCOM"; + case AttachmentStoreOp::eStore: return "Store"; + case AttachmentStoreOp::eDontCare: return "DontCare"; + case AttachmentStoreOp::eNoneQCOM: return "NoneQCOM"; default: return "invalid"; } } enum class BlendFactor { - eZero = VK_BLEND_FACTOR_ZERO, - eOne = VK_BLEND_FACTOR_ONE, - eSrcColor = VK_BLEND_FACTOR_SRC_COLOR, - eOneMinusSrcColor = VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR, - eDstColor = VK_BLEND_FACTOR_DST_COLOR, - eOneMinusDstColor = VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR, - eSrcAlpha = VK_BLEND_FACTOR_SRC_ALPHA, - eOneMinusSrcAlpha = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA, - eDstAlpha = VK_BLEND_FACTOR_DST_ALPHA, - eOneMinusDstAlpha = VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA, - eConstantColor = VK_BLEND_FACTOR_CONSTANT_COLOR, + eZero = VK_BLEND_FACTOR_ZERO, + eOne = VK_BLEND_FACTOR_ONE, + eSrcColor = VK_BLEND_FACTOR_SRC_COLOR, + eOneMinusSrcColor = VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR, + eDstColor = VK_BLEND_FACTOR_DST_COLOR, + eOneMinusDstColor = VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR, + eSrcAlpha = VK_BLEND_FACTOR_SRC_ALPHA, + eOneMinusSrcAlpha = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA, + eDstAlpha = VK_BLEND_FACTOR_DST_ALPHA, + eOneMinusDstAlpha = VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA, + eConstantColor = VK_BLEND_FACTOR_CONSTANT_COLOR, eOneMinusConstantColor = VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR, - eConstantAlpha = VK_BLEND_FACTOR_CONSTANT_ALPHA, + eConstantAlpha = VK_BLEND_FACTOR_CONSTANT_ALPHA, eOneMinusConstantAlpha = VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA, - eSrcAlphaSaturate = VK_BLEND_FACTOR_SRC_ALPHA_SATURATE, - eSrc1Color = VK_BLEND_FACTOR_SRC1_COLOR, - eOneMinusSrc1Color = VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR, - eSrc1Alpha = VK_BLEND_FACTOR_SRC1_ALPHA, - eOneMinusSrc1Alpha = VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA + eSrcAlphaSaturate = VK_BLEND_FACTOR_SRC_ALPHA_SATURATE, + eSrc1Color = VK_BLEND_FACTOR_SRC1_COLOR, + eOneMinusSrc1Color = VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR, + eSrc1Alpha = VK_BLEND_FACTOR_SRC1_ALPHA, + eOneMinusSrc1Alpha = VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA }; VULKAN_HPP_INLINE std::string to_string( BlendFactor value ) { switch ( value ) { - case BlendFactor::eZero : return "Zero"; - case BlendFactor::eOne : return "One"; - case BlendFactor::eSrcColor : return "SrcColor"; - case BlendFactor::eOneMinusSrcColor : return "OneMinusSrcColor"; - case BlendFactor::eDstColor : return "DstColor"; - case BlendFactor::eOneMinusDstColor : return "OneMinusDstColor"; - case BlendFactor::eSrcAlpha : return "SrcAlpha"; - case BlendFactor::eOneMinusSrcAlpha : return "OneMinusSrcAlpha"; - case BlendFactor::eDstAlpha : return "DstAlpha"; - case BlendFactor::eOneMinusDstAlpha : return "OneMinusDstAlpha"; - case BlendFactor::eConstantColor : return "ConstantColor"; - case BlendFactor::eOneMinusConstantColor : return "OneMinusConstantColor"; - case BlendFactor::eConstantAlpha : return "ConstantAlpha"; - case BlendFactor::eOneMinusConstantAlpha : return "OneMinusConstantAlpha"; - case BlendFactor::eSrcAlphaSaturate : return "SrcAlphaSaturate"; - case BlendFactor::eSrc1Color : return "Src1Color"; - case BlendFactor::eOneMinusSrc1Color : return "OneMinusSrc1Color"; - case BlendFactor::eSrc1Alpha : return "Src1Alpha"; - case BlendFactor::eOneMinusSrc1Alpha : return "OneMinusSrc1Alpha"; + case BlendFactor::eZero: return "Zero"; + case BlendFactor::eOne: return "One"; + case BlendFactor::eSrcColor: return "SrcColor"; + case BlendFactor::eOneMinusSrcColor: return "OneMinusSrcColor"; + case BlendFactor::eDstColor: return "DstColor"; + case BlendFactor::eOneMinusDstColor: return "OneMinusDstColor"; + case BlendFactor::eSrcAlpha: return "SrcAlpha"; + case BlendFactor::eOneMinusSrcAlpha: return "OneMinusSrcAlpha"; + case BlendFactor::eDstAlpha: return "DstAlpha"; + case BlendFactor::eOneMinusDstAlpha: return "OneMinusDstAlpha"; + case BlendFactor::eConstantColor: return "ConstantColor"; + case BlendFactor::eOneMinusConstantColor: return "OneMinusConstantColor"; + case BlendFactor::eConstantAlpha: return "ConstantAlpha"; + case BlendFactor::eOneMinusConstantAlpha: return "OneMinusConstantAlpha"; + case BlendFactor::eSrcAlphaSaturate: return "SrcAlphaSaturate"; + case BlendFactor::eSrc1Color: return "Src1Color"; + case BlendFactor::eOneMinusSrc1Color: return "OneMinusSrc1Color"; + case BlendFactor::eSrc1Alpha: return "Src1Alpha"; + case BlendFactor::eOneMinusSrc1Alpha: return "OneMinusSrc1Alpha"; default: return "invalid"; } } enum class BlendOp { - eAdd = VK_BLEND_OP_ADD, - eSubtract = VK_BLEND_OP_SUBTRACT, - eReverseSubtract = VK_BLEND_OP_REVERSE_SUBTRACT, - eMin = VK_BLEND_OP_MIN, - eMax = VK_BLEND_OP_MAX, - eZeroEXT = VK_BLEND_OP_ZERO_EXT, - eSrcEXT = VK_BLEND_OP_SRC_EXT, - eDstEXT = VK_BLEND_OP_DST_EXT, - eSrcOverEXT = VK_BLEND_OP_SRC_OVER_EXT, - eDstOverEXT = VK_BLEND_OP_DST_OVER_EXT, - eSrcInEXT = VK_BLEND_OP_SRC_IN_EXT, - eDstInEXT = VK_BLEND_OP_DST_IN_EXT, - eSrcOutEXT = VK_BLEND_OP_SRC_OUT_EXT, - eDstOutEXT = VK_BLEND_OP_DST_OUT_EXT, - eSrcAtopEXT = VK_BLEND_OP_SRC_ATOP_EXT, - eDstAtopEXT = VK_BLEND_OP_DST_ATOP_EXT, - eXorEXT = VK_BLEND_OP_XOR_EXT, - eMultiplyEXT = VK_BLEND_OP_MULTIPLY_EXT, - eScreenEXT = VK_BLEND_OP_SCREEN_EXT, - eOverlayEXT = VK_BLEND_OP_OVERLAY_EXT, - eDarkenEXT = VK_BLEND_OP_DARKEN_EXT, - eLightenEXT = VK_BLEND_OP_LIGHTEN_EXT, - eColordodgeEXT = VK_BLEND_OP_COLORDODGE_EXT, - eColorburnEXT = VK_BLEND_OP_COLORBURN_EXT, - eHardlightEXT = VK_BLEND_OP_HARDLIGHT_EXT, - eSoftlightEXT = VK_BLEND_OP_SOFTLIGHT_EXT, - eDifferenceEXT = VK_BLEND_OP_DIFFERENCE_EXT, - eExclusionEXT = VK_BLEND_OP_EXCLUSION_EXT, - eInvertEXT = VK_BLEND_OP_INVERT_EXT, - eInvertRgbEXT = VK_BLEND_OP_INVERT_RGB_EXT, - eLineardodgeEXT = VK_BLEND_OP_LINEARDODGE_EXT, - eLinearburnEXT = VK_BLEND_OP_LINEARBURN_EXT, - eVividlightEXT = VK_BLEND_OP_VIVIDLIGHT_EXT, - eLinearlightEXT = VK_BLEND_OP_LINEARLIGHT_EXT, - ePinlightEXT = VK_BLEND_OP_PINLIGHT_EXT, - eHardmixEXT = VK_BLEND_OP_HARDMIX_EXT, - eHslHueEXT = VK_BLEND_OP_HSL_HUE_EXT, - eHslSaturationEXT = VK_BLEND_OP_HSL_SATURATION_EXT, - eHslColorEXT = VK_BLEND_OP_HSL_COLOR_EXT, - eHslLuminosityEXT = VK_BLEND_OP_HSL_LUMINOSITY_EXT, - ePlusEXT = VK_BLEND_OP_PLUS_EXT, - ePlusClampedEXT = VK_BLEND_OP_PLUS_CLAMPED_EXT, + eAdd = VK_BLEND_OP_ADD, + eSubtract = VK_BLEND_OP_SUBTRACT, + eReverseSubtract = VK_BLEND_OP_REVERSE_SUBTRACT, + eMin = VK_BLEND_OP_MIN, + eMax = VK_BLEND_OP_MAX, + eZeroEXT = VK_BLEND_OP_ZERO_EXT, + eSrcEXT = VK_BLEND_OP_SRC_EXT, + eDstEXT = VK_BLEND_OP_DST_EXT, + eSrcOverEXT = VK_BLEND_OP_SRC_OVER_EXT, + eDstOverEXT = VK_BLEND_OP_DST_OVER_EXT, + eSrcInEXT = VK_BLEND_OP_SRC_IN_EXT, + eDstInEXT = VK_BLEND_OP_DST_IN_EXT, + eSrcOutEXT = VK_BLEND_OP_SRC_OUT_EXT, + eDstOutEXT = VK_BLEND_OP_DST_OUT_EXT, + eSrcAtopEXT = VK_BLEND_OP_SRC_ATOP_EXT, + eDstAtopEXT = VK_BLEND_OP_DST_ATOP_EXT, + eXorEXT = VK_BLEND_OP_XOR_EXT, + eMultiplyEXT = VK_BLEND_OP_MULTIPLY_EXT, + eScreenEXT = VK_BLEND_OP_SCREEN_EXT, + eOverlayEXT = VK_BLEND_OP_OVERLAY_EXT, + eDarkenEXT = VK_BLEND_OP_DARKEN_EXT, + eLightenEXT = VK_BLEND_OP_LIGHTEN_EXT, + eColordodgeEXT = VK_BLEND_OP_COLORDODGE_EXT, + eColorburnEXT = VK_BLEND_OP_COLORBURN_EXT, + eHardlightEXT = VK_BLEND_OP_HARDLIGHT_EXT, + eSoftlightEXT = VK_BLEND_OP_SOFTLIGHT_EXT, + eDifferenceEXT = VK_BLEND_OP_DIFFERENCE_EXT, + eExclusionEXT = VK_BLEND_OP_EXCLUSION_EXT, + eInvertEXT = VK_BLEND_OP_INVERT_EXT, + eInvertRgbEXT = VK_BLEND_OP_INVERT_RGB_EXT, + eLineardodgeEXT = VK_BLEND_OP_LINEARDODGE_EXT, + eLinearburnEXT = VK_BLEND_OP_LINEARBURN_EXT, + eVividlightEXT = VK_BLEND_OP_VIVIDLIGHT_EXT, + eLinearlightEXT = VK_BLEND_OP_LINEARLIGHT_EXT, + ePinlightEXT = VK_BLEND_OP_PINLIGHT_EXT, + eHardmixEXT = VK_BLEND_OP_HARDMIX_EXT, + eHslHueEXT = VK_BLEND_OP_HSL_HUE_EXT, + eHslSaturationEXT = VK_BLEND_OP_HSL_SATURATION_EXT, + eHslColorEXT = VK_BLEND_OP_HSL_COLOR_EXT, + eHslLuminosityEXT = VK_BLEND_OP_HSL_LUMINOSITY_EXT, + ePlusEXT = VK_BLEND_OP_PLUS_EXT, + ePlusClampedEXT = VK_BLEND_OP_PLUS_CLAMPED_EXT, ePlusClampedAlphaEXT = VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT, - ePlusDarkerEXT = VK_BLEND_OP_PLUS_DARKER_EXT, - eMinusEXT = VK_BLEND_OP_MINUS_EXT, - eMinusClampedEXT = VK_BLEND_OP_MINUS_CLAMPED_EXT, - eContrastEXT = VK_BLEND_OP_CONTRAST_EXT, - eInvertOvgEXT = VK_BLEND_OP_INVERT_OVG_EXT, - eRedEXT = VK_BLEND_OP_RED_EXT, - eGreenEXT = VK_BLEND_OP_GREEN_EXT, - eBlueEXT = VK_BLEND_OP_BLUE_EXT + ePlusDarkerEXT = VK_BLEND_OP_PLUS_DARKER_EXT, + eMinusEXT = VK_BLEND_OP_MINUS_EXT, + eMinusClampedEXT = VK_BLEND_OP_MINUS_CLAMPED_EXT, + eContrastEXT = VK_BLEND_OP_CONTRAST_EXT, + eInvertOvgEXT = VK_BLEND_OP_INVERT_OVG_EXT, + eRedEXT = VK_BLEND_OP_RED_EXT, + eGreenEXT = VK_BLEND_OP_GREEN_EXT, + eBlueEXT = VK_BLEND_OP_BLUE_EXT }; VULKAN_HPP_INLINE std::string to_string( BlendOp value ) { switch ( value ) { - case BlendOp::eAdd : return "Add"; - case BlendOp::eSubtract : return "Subtract"; - case BlendOp::eReverseSubtract : return "ReverseSubtract"; - case BlendOp::eMin : return "Min"; - case BlendOp::eMax : return "Max"; - case BlendOp::eZeroEXT : return "ZeroEXT"; - case BlendOp::eSrcEXT : return "SrcEXT"; - case BlendOp::eDstEXT : return "DstEXT"; - case BlendOp::eSrcOverEXT : return "SrcOverEXT"; - case BlendOp::eDstOverEXT : return "DstOverEXT"; - case BlendOp::eSrcInEXT : return "SrcInEXT"; - case BlendOp::eDstInEXT : return "DstInEXT"; - case BlendOp::eSrcOutEXT : return "SrcOutEXT"; - case BlendOp::eDstOutEXT : return "DstOutEXT"; - case BlendOp::eSrcAtopEXT : return "SrcAtopEXT"; - case BlendOp::eDstAtopEXT : return "DstAtopEXT"; - case BlendOp::eXorEXT : return "XorEXT"; - case BlendOp::eMultiplyEXT : return "MultiplyEXT"; - case BlendOp::eScreenEXT : return "ScreenEXT"; - case BlendOp::eOverlayEXT : return "OverlayEXT"; - case BlendOp::eDarkenEXT : return "DarkenEXT"; - case BlendOp::eLightenEXT : return "LightenEXT"; - case BlendOp::eColordodgeEXT : return "ColordodgeEXT"; - case BlendOp::eColorburnEXT : return "ColorburnEXT"; - case BlendOp::eHardlightEXT : return "HardlightEXT"; - case BlendOp::eSoftlightEXT : return "SoftlightEXT"; - case BlendOp::eDifferenceEXT : return "DifferenceEXT"; - case BlendOp::eExclusionEXT : return "ExclusionEXT"; - case BlendOp::eInvertEXT : return "InvertEXT"; - case BlendOp::eInvertRgbEXT : return "InvertRgbEXT"; - case BlendOp::eLineardodgeEXT : return "LineardodgeEXT"; - case BlendOp::eLinearburnEXT : return "LinearburnEXT"; - case BlendOp::eVividlightEXT : return "VividlightEXT"; - case BlendOp::eLinearlightEXT : return "LinearlightEXT"; - case BlendOp::ePinlightEXT : return "PinlightEXT"; - case BlendOp::eHardmixEXT : return "HardmixEXT"; - case BlendOp::eHslHueEXT : return "HslHueEXT"; - case BlendOp::eHslSaturationEXT : return "HslSaturationEXT"; - case BlendOp::eHslColorEXT : return "HslColorEXT"; - case BlendOp::eHslLuminosityEXT : return "HslLuminosityEXT"; - case BlendOp::ePlusEXT : return "PlusEXT"; - case BlendOp::ePlusClampedEXT : return "PlusClampedEXT"; - case BlendOp::ePlusClampedAlphaEXT : return "PlusClampedAlphaEXT"; - case BlendOp::ePlusDarkerEXT : return "PlusDarkerEXT"; - case BlendOp::eMinusEXT : return "MinusEXT"; - case BlendOp::eMinusClampedEXT : return "MinusClampedEXT"; - case BlendOp::eContrastEXT : return "ContrastEXT"; - case BlendOp::eInvertOvgEXT : return "InvertOvgEXT"; - case BlendOp::eRedEXT : return "RedEXT"; - case BlendOp::eGreenEXT : return "GreenEXT"; - case BlendOp::eBlueEXT : return "BlueEXT"; + case BlendOp::eAdd: return "Add"; + case BlendOp::eSubtract: return "Subtract"; + case BlendOp::eReverseSubtract: return "ReverseSubtract"; + case BlendOp::eMin: return "Min"; + case BlendOp::eMax: return "Max"; + case BlendOp::eZeroEXT: return "ZeroEXT"; + case BlendOp::eSrcEXT: return "SrcEXT"; + case BlendOp::eDstEXT: return "DstEXT"; + case BlendOp::eSrcOverEXT: return "SrcOverEXT"; + case BlendOp::eDstOverEXT: return "DstOverEXT"; + case BlendOp::eSrcInEXT: return "SrcInEXT"; + case BlendOp::eDstInEXT: return "DstInEXT"; + case BlendOp::eSrcOutEXT: return "SrcOutEXT"; + case BlendOp::eDstOutEXT: return "DstOutEXT"; + case BlendOp::eSrcAtopEXT: return "SrcAtopEXT"; + case BlendOp::eDstAtopEXT: return "DstAtopEXT"; + case BlendOp::eXorEXT: return "XorEXT"; + case BlendOp::eMultiplyEXT: return "MultiplyEXT"; + case BlendOp::eScreenEXT: return "ScreenEXT"; + case BlendOp::eOverlayEXT: return "OverlayEXT"; + case BlendOp::eDarkenEXT: return "DarkenEXT"; + case BlendOp::eLightenEXT: return "LightenEXT"; + case BlendOp::eColordodgeEXT: return "ColordodgeEXT"; + case BlendOp::eColorburnEXT: return "ColorburnEXT"; + case BlendOp::eHardlightEXT: return "HardlightEXT"; + case BlendOp::eSoftlightEXT: return "SoftlightEXT"; + case BlendOp::eDifferenceEXT: return "DifferenceEXT"; + case BlendOp::eExclusionEXT: return "ExclusionEXT"; + case BlendOp::eInvertEXT: return "InvertEXT"; + case BlendOp::eInvertRgbEXT: return "InvertRgbEXT"; + case BlendOp::eLineardodgeEXT: return "LineardodgeEXT"; + case BlendOp::eLinearburnEXT: return "LinearburnEXT"; + case BlendOp::eVividlightEXT: return "VividlightEXT"; + case BlendOp::eLinearlightEXT: return "LinearlightEXT"; + case BlendOp::ePinlightEXT: return "PinlightEXT"; + case BlendOp::eHardmixEXT: return "HardmixEXT"; + case BlendOp::eHslHueEXT: return "HslHueEXT"; + case BlendOp::eHslSaturationEXT: return "HslSaturationEXT"; + case BlendOp::eHslColorEXT: return "HslColorEXT"; + case BlendOp::eHslLuminosityEXT: return "HslLuminosityEXT"; + case BlendOp::ePlusEXT: return "PlusEXT"; + case BlendOp::ePlusClampedEXT: return "PlusClampedEXT"; + case BlendOp::ePlusClampedAlphaEXT: return "PlusClampedAlphaEXT"; + case BlendOp::ePlusDarkerEXT: return "PlusDarkerEXT"; + case BlendOp::eMinusEXT: return "MinusEXT"; + case BlendOp::eMinusClampedEXT: return "MinusClampedEXT"; + case BlendOp::eContrastEXT: return "ContrastEXT"; + case BlendOp::eInvertOvgEXT: return "InvertOvgEXT"; + case BlendOp::eRedEXT: return "RedEXT"; + case BlendOp::eGreenEXT: return "GreenEXT"; + case BlendOp::eBlueEXT: return "BlueEXT"; default: return "invalid"; } } @@ -3586,17 +4883,17 @@ namespace VULKAN_HPP_NAMESPACE enum class BlendOverlapEXT { eUncorrelated = VK_BLEND_OVERLAP_UNCORRELATED_EXT, - eDisjoint = VK_BLEND_OVERLAP_DISJOINT_EXT, - eConjoint = VK_BLEND_OVERLAP_CONJOINT_EXT + eDisjoint = VK_BLEND_OVERLAP_DISJOINT_EXT, + eConjoint = VK_BLEND_OVERLAP_CONJOINT_EXT }; VULKAN_HPP_INLINE std::string to_string( BlendOverlapEXT value ) { switch ( value ) { - case BlendOverlapEXT::eUncorrelated : return "Uncorrelated"; - case BlendOverlapEXT::eDisjoint : return "Disjoint"; - case BlendOverlapEXT::eConjoint : return "Conjoint"; + case BlendOverlapEXT::eUncorrelated: return "Uncorrelated"; + case BlendOverlapEXT::eDisjoint: return "Disjoint"; + case BlendOverlapEXT::eConjoint: return "Conjoint"; default: return "invalid"; } } @@ -3604,34 +4901,34 @@ namespace VULKAN_HPP_NAMESPACE enum class BorderColor { eFloatTransparentBlack = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK, - eIntTransparentBlack = VK_BORDER_COLOR_INT_TRANSPARENT_BLACK, - eFloatOpaqueBlack = VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK, - eIntOpaqueBlack = VK_BORDER_COLOR_INT_OPAQUE_BLACK, - eFloatOpaqueWhite = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE, - eIntOpaqueWhite = VK_BORDER_COLOR_INT_OPAQUE_WHITE + eIntTransparentBlack = VK_BORDER_COLOR_INT_TRANSPARENT_BLACK, + eFloatOpaqueBlack = VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK, + eIntOpaqueBlack = VK_BORDER_COLOR_INT_OPAQUE_BLACK, + eFloatOpaqueWhite = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE, + eIntOpaqueWhite = VK_BORDER_COLOR_INT_OPAQUE_WHITE }; VULKAN_HPP_INLINE std::string to_string( BorderColor value ) { switch ( value ) { - case BorderColor::eFloatTransparentBlack : return "FloatTransparentBlack"; - case BorderColor::eIntTransparentBlack : return "IntTransparentBlack"; - case BorderColor::eFloatOpaqueBlack : return "FloatOpaqueBlack"; - case BorderColor::eIntOpaqueBlack : return "IntOpaqueBlack"; - case BorderColor::eFloatOpaqueWhite : return "FloatOpaqueWhite"; - case BorderColor::eIntOpaqueWhite : return "IntOpaqueWhite"; + case BorderColor::eFloatTransparentBlack: return "FloatTransparentBlack"; + case BorderColor::eIntTransparentBlack: return "IntTransparentBlack"; + case BorderColor::eFloatOpaqueBlack: return "FloatOpaqueBlack"; + case BorderColor::eIntOpaqueBlack: return "IntOpaqueBlack"; + case BorderColor::eFloatOpaqueWhite: return "FloatOpaqueWhite"; + case BorderColor::eIntOpaqueWhite: return "IntOpaqueWhite"; default: return "invalid"; } } enum class BufferCreateFlagBits : VkBufferCreateFlags { - eSparseBinding = VK_BUFFER_CREATE_SPARSE_BINDING_BIT, - eSparseResidency = VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT, - eSparseAliased = VK_BUFFER_CREATE_SPARSE_ALIASED_BIT, - eProtected = VK_BUFFER_CREATE_PROTECTED_BIT, - eDeviceAddressCaptureReplay = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT, + eSparseBinding = VK_BUFFER_CREATE_SPARSE_BINDING_BIT, + eSparseResidency = VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT, + eSparseAliased = VK_BUFFER_CREATE_SPARSE_ALIASED_BIT, + eProtected = VK_BUFFER_CREATE_PROTECTED_BIT, + eDeviceAddressCaptureReplay = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT, eDeviceAddressCaptureReplayEXT = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT, eDeviceAddressCaptureReplayKHR = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR }; @@ -3640,60 +4937,61 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case BufferCreateFlagBits::eSparseBinding : return "SparseBinding"; - case BufferCreateFlagBits::eSparseResidency : return "SparseResidency"; - case BufferCreateFlagBits::eSparseAliased : return "SparseAliased"; - case BufferCreateFlagBits::eProtected : return "Protected"; - case BufferCreateFlagBits::eDeviceAddressCaptureReplay : return "DeviceAddressCaptureReplay"; + case BufferCreateFlagBits::eSparseBinding: return "SparseBinding"; + case BufferCreateFlagBits::eSparseResidency: return "SparseResidency"; + case BufferCreateFlagBits::eSparseAliased: return "SparseAliased"; + case BufferCreateFlagBits::eProtected: return "Protected"; + case BufferCreateFlagBits::eDeviceAddressCaptureReplay: return "DeviceAddressCaptureReplay"; default: return "invalid"; } } enum class BufferUsageFlagBits : VkBufferUsageFlags { - eTransferSrc = VK_BUFFER_USAGE_TRANSFER_SRC_BIT, - eTransferDst = VK_BUFFER_USAGE_TRANSFER_DST_BIT, - eUniformTexelBuffer = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT, - eStorageTexelBuffer = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT, - eUniformBuffer = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, - eStorageBuffer = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT, - eIndexBuffer = VK_BUFFER_USAGE_INDEX_BUFFER_BIT, - eVertexBuffer = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, - eIndirectBuffer = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT, - eShaderDeviceAddress = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT, - eTransformFeedbackBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT, + eTransferSrc = VK_BUFFER_USAGE_TRANSFER_SRC_BIT, + eTransferDst = VK_BUFFER_USAGE_TRANSFER_DST_BIT, + eUniformTexelBuffer = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT, + eStorageTexelBuffer = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT, + eUniformBuffer = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, + eStorageBuffer = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT, + eIndexBuffer = VK_BUFFER_USAGE_INDEX_BUFFER_BIT, + eVertexBuffer = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, + eIndirectBuffer = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT, + eShaderDeviceAddress = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT, + eTransformFeedbackBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT, eTransformFeedbackCounterBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT, - eConditionalRenderingEXT = VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT, - eRayTracingKHR = VK_BUFFER_USAGE_RAY_TRACING_BIT_KHR, - eRayTracingNV = VK_BUFFER_USAGE_RAY_TRACING_BIT_NV, - eShaderDeviceAddressEXT = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT, - eShaderDeviceAddressKHR = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR + eConditionalRenderingEXT = VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT, + eRayTracingKHR = VK_BUFFER_USAGE_RAY_TRACING_BIT_KHR, + eRayTracingNV = VK_BUFFER_USAGE_RAY_TRACING_BIT_NV, + eShaderDeviceAddressEXT = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT, + eShaderDeviceAddressKHR = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR }; VULKAN_HPP_INLINE std::string to_string( BufferUsageFlagBits value ) { switch ( value ) { - case BufferUsageFlagBits::eTransferSrc : return "TransferSrc"; - case BufferUsageFlagBits::eTransferDst : return "TransferDst"; - case BufferUsageFlagBits::eUniformTexelBuffer : return "UniformTexelBuffer"; - case BufferUsageFlagBits::eStorageTexelBuffer : return "StorageTexelBuffer"; - case BufferUsageFlagBits::eUniformBuffer : return "UniformBuffer"; - case BufferUsageFlagBits::eStorageBuffer : return "StorageBuffer"; - case BufferUsageFlagBits::eIndexBuffer : return "IndexBuffer"; - case BufferUsageFlagBits::eVertexBuffer : return "VertexBuffer"; - case BufferUsageFlagBits::eIndirectBuffer : return "IndirectBuffer"; - case BufferUsageFlagBits::eShaderDeviceAddress : return "ShaderDeviceAddress"; - case BufferUsageFlagBits::eTransformFeedbackBufferEXT : return "TransformFeedbackBufferEXT"; - case BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT : return "TransformFeedbackCounterBufferEXT"; - case BufferUsageFlagBits::eConditionalRenderingEXT : return "ConditionalRenderingEXT"; - case BufferUsageFlagBits::eRayTracingKHR : return "RayTracingKHR"; + case BufferUsageFlagBits::eTransferSrc: return "TransferSrc"; + case BufferUsageFlagBits::eTransferDst: return "TransferDst"; + case BufferUsageFlagBits::eUniformTexelBuffer: return "UniformTexelBuffer"; + case BufferUsageFlagBits::eStorageTexelBuffer: return "StorageTexelBuffer"; + case BufferUsageFlagBits::eUniformBuffer: return "UniformBuffer"; + case BufferUsageFlagBits::eStorageBuffer: return "StorageBuffer"; + case BufferUsageFlagBits::eIndexBuffer: return "IndexBuffer"; + case BufferUsageFlagBits::eVertexBuffer: return "VertexBuffer"; + case BufferUsageFlagBits::eIndirectBuffer: return "IndirectBuffer"; + case BufferUsageFlagBits::eShaderDeviceAddress: return "ShaderDeviceAddress"; + case BufferUsageFlagBits::eTransformFeedbackBufferEXT: return "TransformFeedbackBufferEXT"; + case BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT: return "TransformFeedbackCounterBufferEXT"; + case BufferUsageFlagBits::eConditionalRenderingEXT: return "ConditionalRenderingEXT"; + case BufferUsageFlagBits::eRayTracingKHR: return "RayTracingKHR"; default: return "invalid"; } } enum class BufferViewCreateFlagBits - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( BufferViewCreateFlagBits ) { @@ -3702,11 +5000,11 @@ namespace VULKAN_HPP_NAMESPACE enum class BuildAccelerationStructureFlagBitsKHR : VkBuildAccelerationStructureFlagsKHR { - eAllowUpdate = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR, + eAllowUpdate = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR, eAllowCompaction = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR, ePreferFastTrace = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR, ePreferFastBuild = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR, - eLowMemory = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR + eLowMemory = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR }; using BuildAccelerationStructureFlagBitsNV = BuildAccelerationStructureFlagBitsKHR; @@ -3714,11 +5012,11 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case BuildAccelerationStructureFlagBitsKHR::eAllowUpdate : return "AllowUpdate"; - case BuildAccelerationStructureFlagBitsKHR::eAllowCompaction : return "AllowCompaction"; - case BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace : return "PreferFastTrace"; - case BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild : return "PreferFastBuild"; - case BuildAccelerationStructureFlagBitsKHR::eLowMemory : return "LowMemory"; + case BuildAccelerationStructureFlagBitsKHR::eAllowUpdate: return "AllowUpdate"; + case BuildAccelerationStructureFlagBitsKHR::eAllowCompaction: return "AllowCompaction"; + case BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace: return "PreferFastTrace"; + case BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild: return "PreferFastBuild"; + case BuildAccelerationStructureFlagBitsKHR::eLowMemory: return "LowMemory"; default: return "invalid"; } } @@ -3726,7 +5024,7 @@ namespace VULKAN_HPP_NAMESPACE enum class ChromaLocation { eCositedEven = VK_CHROMA_LOCATION_COSITED_EVEN, - eMidpoint = VK_CHROMA_LOCATION_MIDPOINT + eMidpoint = VK_CHROMA_LOCATION_MIDPOINT }; using ChromaLocationKHR = ChromaLocation; @@ -3734,17 +5032,17 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case ChromaLocation::eCositedEven : return "CositedEven"; - case ChromaLocation::eMidpoint : return "Midpoint"; + case ChromaLocation::eCositedEven: return "CositedEven"; + case ChromaLocation::eMidpoint: return "Midpoint"; default: return "invalid"; } } enum class CoarseSampleOrderTypeNV { - eDefault = VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV, - eCustom = VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV, - ePixelMajor = VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV, + eDefault = VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV, + eCustom = VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV, + ePixelMajor = VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV, eSampleMajor = VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV }; @@ -3752,10 +5050,10 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case CoarseSampleOrderTypeNV::eDefault : return "Default"; - case CoarseSampleOrderTypeNV::eCustom : return "Custom"; - case CoarseSampleOrderTypeNV::ePixelMajor : return "PixelMajor"; - case CoarseSampleOrderTypeNV::eSampleMajor : return "SampleMajor"; + case CoarseSampleOrderTypeNV::eDefault: return "Default"; + case CoarseSampleOrderTypeNV::eCustom: return "Custom"; + case CoarseSampleOrderTypeNV::ePixelMajor: return "PixelMajor"; + case CoarseSampleOrderTypeNV::eSampleMajor: return "SampleMajor"; default: return "invalid"; } } @@ -3772,63 +5070,63 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case ColorComponentFlagBits::eR : return "R"; - case ColorComponentFlagBits::eG : return "G"; - case ColorComponentFlagBits::eB : return "B"; - case ColorComponentFlagBits::eA : return "A"; + case ColorComponentFlagBits::eR: return "R"; + case ColorComponentFlagBits::eG: return "G"; + case ColorComponentFlagBits::eB: return "B"; + case ColorComponentFlagBits::eA: return "A"; default: return "invalid"; } } enum class ColorSpaceKHR { - eSrgbNonlinear = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, - eDisplayP3NonlinearEXT = VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT, - eExtendedSrgbLinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT, - eDisplayP3LinearEXT = VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT, - eDciP3NonlinearEXT = VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT, - eBt709LinearEXT = VK_COLOR_SPACE_BT709_LINEAR_EXT, - eBt709NonlinearEXT = VK_COLOR_SPACE_BT709_NONLINEAR_EXT, - eBt2020LinearEXT = VK_COLOR_SPACE_BT2020_LINEAR_EXT, - eHdr10St2084EXT = VK_COLOR_SPACE_HDR10_ST2084_EXT, - eDolbyvisionEXT = VK_COLOR_SPACE_DOLBYVISION_EXT, - eHdr10HlgEXT = VK_COLOR_SPACE_HDR10_HLG_EXT, - eAdobergbLinearEXT = VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT, - eAdobergbNonlinearEXT = VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT, - ePassThroughEXT = VK_COLOR_SPACE_PASS_THROUGH_EXT, - eExtendedSrgbNonlinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT, - eDisplayNativeAMD = VK_COLOR_SPACE_DISPLAY_NATIVE_AMD, + eSrgbNonlinear = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, + eDisplayP3NonlinearEXT = VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT, + eExtendedSrgbLinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT, + eDisplayP3LinearEXT = VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT, + eDciP3NonlinearEXT = VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT, + eBt709LinearEXT = VK_COLOR_SPACE_BT709_LINEAR_EXT, + eBt709NonlinearEXT = VK_COLOR_SPACE_BT709_NONLINEAR_EXT, + eBt2020LinearEXT = VK_COLOR_SPACE_BT2020_LINEAR_EXT, + eHdr10St2084EXT = VK_COLOR_SPACE_HDR10_ST2084_EXT, + eDolbyvisionEXT = VK_COLOR_SPACE_DOLBYVISION_EXT, + eHdr10HlgEXT = VK_COLOR_SPACE_HDR10_HLG_EXT, + eAdobergbLinearEXT = VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT, + eAdobergbNonlinearEXT = VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT, + ePassThroughEXT = VK_COLOR_SPACE_PASS_THROUGH_EXT, + eExtendedSrgbNonlinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT, + eDisplayNativeAMD = VK_COLOR_SPACE_DISPLAY_NATIVE_AMD, eVkColorspaceSrgbNonlinear = VK_COLORSPACE_SRGB_NONLINEAR_KHR, - eDciP3LinearEXT = VK_COLOR_SPACE_DCI_P3_LINEAR_EXT + eDciP3LinearEXT = VK_COLOR_SPACE_DCI_P3_LINEAR_EXT }; VULKAN_HPP_INLINE std::string to_string( ColorSpaceKHR value ) { switch ( value ) { - case ColorSpaceKHR::eSrgbNonlinear : return "SrgbNonlinear"; - case ColorSpaceKHR::eDisplayP3NonlinearEXT : return "DisplayP3NonlinearEXT"; - case ColorSpaceKHR::eExtendedSrgbLinearEXT : return "ExtendedSrgbLinearEXT"; - case ColorSpaceKHR::eDisplayP3LinearEXT : return "DisplayP3LinearEXT"; - case ColorSpaceKHR::eDciP3NonlinearEXT : return "DciP3NonlinearEXT"; - case ColorSpaceKHR::eBt709LinearEXT : return "Bt709LinearEXT"; - case ColorSpaceKHR::eBt709NonlinearEXT : return "Bt709NonlinearEXT"; - case ColorSpaceKHR::eBt2020LinearEXT : return "Bt2020LinearEXT"; - case ColorSpaceKHR::eHdr10St2084EXT : return "Hdr10St2084EXT"; - case ColorSpaceKHR::eDolbyvisionEXT : return "DolbyvisionEXT"; - case ColorSpaceKHR::eHdr10HlgEXT : return "Hdr10HlgEXT"; - case ColorSpaceKHR::eAdobergbLinearEXT : return "AdobergbLinearEXT"; - case ColorSpaceKHR::eAdobergbNonlinearEXT : return "AdobergbNonlinearEXT"; - case ColorSpaceKHR::ePassThroughEXT : return "PassThroughEXT"; - case ColorSpaceKHR::eExtendedSrgbNonlinearEXT : return "ExtendedSrgbNonlinearEXT"; - case ColorSpaceKHR::eDisplayNativeAMD : return "DisplayNativeAMD"; + case ColorSpaceKHR::eSrgbNonlinear: return "SrgbNonlinear"; + case ColorSpaceKHR::eDisplayP3NonlinearEXT: return "DisplayP3NonlinearEXT"; + case ColorSpaceKHR::eExtendedSrgbLinearEXT: return "ExtendedSrgbLinearEXT"; + case ColorSpaceKHR::eDisplayP3LinearEXT: return "DisplayP3LinearEXT"; + case ColorSpaceKHR::eDciP3NonlinearEXT: return "DciP3NonlinearEXT"; + case ColorSpaceKHR::eBt709LinearEXT: return "Bt709LinearEXT"; + case ColorSpaceKHR::eBt709NonlinearEXT: return "Bt709NonlinearEXT"; + case ColorSpaceKHR::eBt2020LinearEXT: return "Bt2020LinearEXT"; + case ColorSpaceKHR::eHdr10St2084EXT: return "Hdr10St2084EXT"; + case ColorSpaceKHR::eDolbyvisionEXT: return "DolbyvisionEXT"; + case ColorSpaceKHR::eHdr10HlgEXT: return "Hdr10HlgEXT"; + case ColorSpaceKHR::eAdobergbLinearEXT: return "AdobergbLinearEXT"; + case ColorSpaceKHR::eAdobergbNonlinearEXT: return "AdobergbNonlinearEXT"; + case ColorSpaceKHR::ePassThroughEXT: return "PassThroughEXT"; + case ColorSpaceKHR::eExtendedSrgbNonlinearEXT: return "ExtendedSrgbNonlinearEXT"; + case ColorSpaceKHR::eDisplayNativeAMD: return "DisplayNativeAMD"; default: return "invalid"; } } enum class CommandBufferLevel { - ePrimary = VK_COMMAND_BUFFER_LEVEL_PRIMARY, + ePrimary = VK_COMMAND_BUFFER_LEVEL_PRIMARY, eSecondary = VK_COMMAND_BUFFER_LEVEL_SECONDARY }; @@ -3836,8 +5134,8 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case CommandBufferLevel::ePrimary : return "Primary"; - case CommandBufferLevel::eSecondary : return "Secondary"; + case CommandBufferLevel::ePrimary: return "Primary"; + case CommandBufferLevel::eSecondary: return "Secondary"; default: return "invalid"; } } @@ -3851,43 +5149,43 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case CommandBufferResetFlagBits::eReleaseResources : return "ReleaseResources"; + case CommandBufferResetFlagBits::eReleaseResources: return "ReleaseResources"; default: return "invalid"; } } enum class CommandBufferUsageFlagBits : VkCommandBufferUsageFlags { - eOneTimeSubmit = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT, + eOneTimeSubmit = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT, eRenderPassContinue = VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT, - eSimultaneousUse = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT + eSimultaneousUse = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT }; VULKAN_HPP_INLINE std::string to_string( CommandBufferUsageFlagBits value ) { switch ( value ) { - case CommandBufferUsageFlagBits::eOneTimeSubmit : return "OneTimeSubmit"; - case CommandBufferUsageFlagBits::eRenderPassContinue : return "RenderPassContinue"; - case CommandBufferUsageFlagBits::eSimultaneousUse : return "SimultaneousUse"; + case CommandBufferUsageFlagBits::eOneTimeSubmit: return "OneTimeSubmit"; + case CommandBufferUsageFlagBits::eRenderPassContinue: return "RenderPassContinue"; + case CommandBufferUsageFlagBits::eSimultaneousUse: return "SimultaneousUse"; default: return "invalid"; } } enum class CommandPoolCreateFlagBits : VkCommandPoolCreateFlags { - eTransient = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT, + eTransient = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT, eResetCommandBuffer = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, - eProtected = VK_COMMAND_POOL_CREATE_PROTECTED_BIT + eProtected = VK_COMMAND_POOL_CREATE_PROTECTED_BIT }; VULKAN_HPP_INLINE std::string to_string( CommandPoolCreateFlagBits value ) { switch ( value ) { - case CommandPoolCreateFlagBits::eTransient : return "Transient"; - case CommandPoolCreateFlagBits::eResetCommandBuffer : return "ResetCommandBuffer"; - case CommandPoolCreateFlagBits::eProtected : return "Protected"; + case CommandPoolCreateFlagBits::eTransient: return "Transient"; + case CommandPoolCreateFlagBits::eResetCommandBuffer: return "ResetCommandBuffer"; + case CommandPoolCreateFlagBits::eProtected: return "Protected"; default: return "invalid"; } } @@ -3901,35 +5199,35 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case CommandPoolResetFlagBits::eReleaseResources : return "ReleaseResources"; + case CommandPoolResetFlagBits::eReleaseResources: return "ReleaseResources"; default: return "invalid"; } } enum class CompareOp { - eNever = VK_COMPARE_OP_NEVER, - eLess = VK_COMPARE_OP_LESS, - eEqual = VK_COMPARE_OP_EQUAL, - eLessOrEqual = VK_COMPARE_OP_LESS_OR_EQUAL, - eGreater = VK_COMPARE_OP_GREATER, - eNotEqual = VK_COMPARE_OP_NOT_EQUAL, + eNever = VK_COMPARE_OP_NEVER, + eLess = VK_COMPARE_OP_LESS, + eEqual = VK_COMPARE_OP_EQUAL, + eLessOrEqual = VK_COMPARE_OP_LESS_OR_EQUAL, + eGreater = VK_COMPARE_OP_GREATER, + eNotEqual = VK_COMPARE_OP_NOT_EQUAL, eGreaterOrEqual = VK_COMPARE_OP_GREATER_OR_EQUAL, - eAlways = VK_COMPARE_OP_ALWAYS + eAlways = VK_COMPARE_OP_ALWAYS }; VULKAN_HPP_INLINE std::string to_string( CompareOp value ) { switch ( value ) { - case CompareOp::eNever : return "Never"; - case CompareOp::eLess : return "Less"; - case CompareOp::eEqual : return "Equal"; - case CompareOp::eLessOrEqual : return "LessOrEqual"; - case CompareOp::eGreater : return "Greater"; - case CompareOp::eNotEqual : return "NotEqual"; - case CompareOp::eGreaterOrEqual : return "GreaterOrEqual"; - case CompareOp::eAlways : return "Always"; + case CompareOp::eNever: return "Never"; + case CompareOp::eLess: return "Less"; + case CompareOp::eEqual: return "Equal"; + case CompareOp::eLessOrEqual: return "LessOrEqual"; + case CompareOp::eGreater: return "Greater"; + case CompareOp::eNotEqual: return "NotEqual"; + case CompareOp::eGreaterOrEqual: return "GreaterOrEqual"; + case CompareOp::eAlways: return "Always"; default: return "invalid"; } } @@ -3937,25 +5235,25 @@ namespace VULKAN_HPP_NAMESPACE enum class ComponentSwizzle { eIdentity = VK_COMPONENT_SWIZZLE_IDENTITY, - eZero = VK_COMPONENT_SWIZZLE_ZERO, - eOne = VK_COMPONENT_SWIZZLE_ONE, - eR = VK_COMPONENT_SWIZZLE_R, - eG = VK_COMPONENT_SWIZZLE_G, - eB = VK_COMPONENT_SWIZZLE_B, - eA = VK_COMPONENT_SWIZZLE_A + eZero = VK_COMPONENT_SWIZZLE_ZERO, + eOne = VK_COMPONENT_SWIZZLE_ONE, + eR = VK_COMPONENT_SWIZZLE_R, + eG = VK_COMPONENT_SWIZZLE_G, + eB = VK_COMPONENT_SWIZZLE_B, + eA = VK_COMPONENT_SWIZZLE_A }; VULKAN_HPP_INLINE std::string to_string( ComponentSwizzle value ) { switch ( value ) { - case ComponentSwizzle::eIdentity : return "Identity"; - case ComponentSwizzle::eZero : return "Zero"; - case ComponentSwizzle::eOne : return "One"; - case ComponentSwizzle::eR : return "R"; - case ComponentSwizzle::eG : return "G"; - case ComponentSwizzle::eB : return "B"; - case ComponentSwizzle::eA : return "A"; + case ComponentSwizzle::eIdentity: return "Identity"; + case ComponentSwizzle::eZero: return "Zero"; + case ComponentSwizzle::eOne: return "One"; + case ComponentSwizzle::eR: return "R"; + case ComponentSwizzle::eG: return "G"; + case ComponentSwizzle::eB: return "B"; + case ComponentSwizzle::eA: return "A"; default: return "invalid"; } } @@ -3965,51 +5263,51 @@ namespace VULKAN_HPP_NAMESPACE eFloat16 = VK_COMPONENT_TYPE_FLOAT16_NV, eFloat32 = VK_COMPONENT_TYPE_FLOAT32_NV, eFloat64 = VK_COMPONENT_TYPE_FLOAT64_NV, - eSint8 = VK_COMPONENT_TYPE_SINT8_NV, - eSint16 = VK_COMPONENT_TYPE_SINT16_NV, - eSint32 = VK_COMPONENT_TYPE_SINT32_NV, - eSint64 = VK_COMPONENT_TYPE_SINT64_NV, - eUint8 = VK_COMPONENT_TYPE_UINT8_NV, - eUint16 = VK_COMPONENT_TYPE_UINT16_NV, - eUint32 = VK_COMPONENT_TYPE_UINT32_NV, - eUint64 = VK_COMPONENT_TYPE_UINT64_NV + eSint8 = VK_COMPONENT_TYPE_SINT8_NV, + eSint16 = VK_COMPONENT_TYPE_SINT16_NV, + eSint32 = VK_COMPONENT_TYPE_SINT32_NV, + eSint64 = VK_COMPONENT_TYPE_SINT64_NV, + eUint8 = VK_COMPONENT_TYPE_UINT8_NV, + eUint16 = VK_COMPONENT_TYPE_UINT16_NV, + eUint32 = VK_COMPONENT_TYPE_UINT32_NV, + eUint64 = VK_COMPONENT_TYPE_UINT64_NV }; VULKAN_HPP_INLINE std::string to_string( ComponentTypeNV value ) { switch ( value ) { - case ComponentTypeNV::eFloat16 : return "Float16"; - case ComponentTypeNV::eFloat32 : return "Float32"; - case ComponentTypeNV::eFloat64 : return "Float64"; - case ComponentTypeNV::eSint8 : return "Sint8"; - case ComponentTypeNV::eSint16 : return "Sint16"; - case ComponentTypeNV::eSint32 : return "Sint32"; - case ComponentTypeNV::eSint64 : return "Sint64"; - case ComponentTypeNV::eUint8 : return "Uint8"; - case ComponentTypeNV::eUint16 : return "Uint16"; - case ComponentTypeNV::eUint32 : return "Uint32"; - case ComponentTypeNV::eUint64 : return "Uint64"; + case ComponentTypeNV::eFloat16: return "Float16"; + case ComponentTypeNV::eFloat32: return "Float32"; + case ComponentTypeNV::eFloat64: return "Float64"; + case ComponentTypeNV::eSint8: return "Sint8"; + case ComponentTypeNV::eSint16: return "Sint16"; + case ComponentTypeNV::eSint32: return "Sint32"; + case ComponentTypeNV::eSint64: return "Sint64"; + case ComponentTypeNV::eUint8: return "Uint8"; + case ComponentTypeNV::eUint16: return "Uint16"; + case ComponentTypeNV::eUint32: return "Uint32"; + case ComponentTypeNV::eUint64: return "Uint64"; default: return "invalid"; } } enum class CompositeAlphaFlagBitsKHR : VkCompositeAlphaFlagsKHR { - eOpaque = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR, - ePreMultiplied = VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR, + eOpaque = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR, + ePreMultiplied = VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR, ePostMultiplied = VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR, - eInherit = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR + eInherit = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR }; VULKAN_HPP_INLINE std::string to_string( CompositeAlphaFlagBitsKHR value ) { switch ( value ) { - case CompositeAlphaFlagBitsKHR::eOpaque : return "Opaque"; - case CompositeAlphaFlagBitsKHR::ePreMultiplied : return "PreMultiplied"; - case CompositeAlphaFlagBitsKHR::ePostMultiplied : return "PostMultiplied"; - case CompositeAlphaFlagBitsKHR::eInherit : return "Inherit"; + case CompositeAlphaFlagBitsKHR::eOpaque: return "Opaque"; + case CompositeAlphaFlagBitsKHR::ePreMultiplied: return "PreMultiplied"; + case CompositeAlphaFlagBitsKHR::ePostMultiplied: return "PostMultiplied"; + case CompositeAlphaFlagBitsKHR::eInherit: return "Inherit"; default: return "invalid"; } } @@ -4023,15 +5321,15 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case ConditionalRenderingFlagBitsEXT::eInverted : return "Inverted"; + case ConditionalRenderingFlagBitsEXT::eInverted: return "Inverted"; default: return "invalid"; } } enum class ConservativeRasterizationModeEXT { - eDisabled = VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT, - eOverestimate = VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT, + eDisabled = VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT, + eOverestimate = VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT, eUnderestimate = VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT }; @@ -4039,18 +5337,18 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case ConservativeRasterizationModeEXT::eDisabled : return "Disabled"; - case ConservativeRasterizationModeEXT::eOverestimate : return "Overestimate"; - case ConservativeRasterizationModeEXT::eUnderestimate : return "Underestimate"; + case ConservativeRasterizationModeEXT::eDisabled: return "Disabled"; + case ConservativeRasterizationModeEXT::eOverestimate: return "Overestimate"; + case ConservativeRasterizationModeEXT::eUnderestimate: return "Underestimate"; default: return "invalid"; } } enum class CopyAccelerationStructureModeKHR { - eClone = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR, - eCompact = VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR, - eSerialize = VK_COPY_ACCELERATION_STRUCTURE_MODE_SERIALIZE_KHR, + eClone = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR, + eCompact = VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR, + eSerialize = VK_COPY_ACCELERATION_STRUCTURE_MODE_SERIALIZE_KHR, eDeserialize = VK_COPY_ACCELERATION_STRUCTURE_MODE_DESERIALIZE_KHR }; using CopyAccelerationStructureModeNV = CopyAccelerationStructureModeKHR; @@ -4059,37 +5357,37 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case CopyAccelerationStructureModeKHR::eClone : return "Clone"; - case CopyAccelerationStructureModeKHR::eCompact : return "Compact"; - case CopyAccelerationStructureModeKHR::eSerialize : return "Serialize"; - case CopyAccelerationStructureModeKHR::eDeserialize : return "Deserialize"; + case CopyAccelerationStructureModeKHR::eClone: return "Clone"; + case CopyAccelerationStructureModeKHR::eCompact: return "Compact"; + case CopyAccelerationStructureModeKHR::eSerialize: return "Serialize"; + case CopyAccelerationStructureModeKHR::eDeserialize: return "Deserialize"; default: return "invalid"; } } enum class CoverageModulationModeNV { - eNone = VK_COVERAGE_MODULATION_MODE_NONE_NV, - eRgb = VK_COVERAGE_MODULATION_MODE_RGB_NV, + eNone = VK_COVERAGE_MODULATION_MODE_NONE_NV, + eRgb = VK_COVERAGE_MODULATION_MODE_RGB_NV, eAlpha = VK_COVERAGE_MODULATION_MODE_ALPHA_NV, - eRgba = VK_COVERAGE_MODULATION_MODE_RGBA_NV + eRgba = VK_COVERAGE_MODULATION_MODE_RGBA_NV }; VULKAN_HPP_INLINE std::string to_string( CoverageModulationModeNV value ) { switch ( value ) { - case CoverageModulationModeNV::eNone : return "None"; - case CoverageModulationModeNV::eRgb : return "Rgb"; - case CoverageModulationModeNV::eAlpha : return "Alpha"; - case CoverageModulationModeNV::eRgba : return "Rgba"; + case CoverageModulationModeNV::eNone: return "None"; + case CoverageModulationModeNV::eRgb: return "Rgb"; + case CoverageModulationModeNV::eAlpha: return "Alpha"; + case CoverageModulationModeNV::eRgba: return "Rgba"; default: return "invalid"; } } enum class CoverageReductionModeNV { - eMerge = VK_COVERAGE_REDUCTION_MODE_MERGE_NV, + eMerge = VK_COVERAGE_REDUCTION_MODE_MERGE_NV, eTruncate = VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV }; @@ -4097,17 +5395,17 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case CoverageReductionModeNV::eMerge : return "Merge"; - case CoverageReductionModeNV::eTruncate : return "Truncate"; + case CoverageReductionModeNV::eMerge: return "Merge"; + case CoverageReductionModeNV::eTruncate: return "Truncate"; default: return "invalid"; } } enum class CullModeFlagBits : VkCullModeFlags { - eNone = VK_CULL_MODE_NONE, - eFront = VK_CULL_MODE_FRONT_BIT, - eBack = VK_CULL_MODE_BACK_BIT, + eNone = VK_CULL_MODE_NONE, + eFront = VK_CULL_MODE_FRONT_BIT, + eBack = VK_CULL_MODE_BACK_BIT, eFrontAndBack = VK_CULL_MODE_FRONT_AND_BACK }; @@ -4115,119 +5413,119 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case CullModeFlagBits::eNone : return "None"; - case CullModeFlagBits::eFront : return "Front"; - case CullModeFlagBits::eBack : return "Back"; - case CullModeFlagBits::eFrontAndBack : return "FrontAndBack"; + case CullModeFlagBits::eNone: return "None"; + case CullModeFlagBits::eFront: return "Front"; + case CullModeFlagBits::eBack: return "Back"; + case CullModeFlagBits::eFrontAndBack: return "FrontAndBack"; default: return "invalid"; } } enum class DebugReportFlagBitsEXT : VkDebugReportFlagsEXT { - eInformation = VK_DEBUG_REPORT_INFORMATION_BIT_EXT, - eWarning = VK_DEBUG_REPORT_WARNING_BIT_EXT, + eInformation = VK_DEBUG_REPORT_INFORMATION_BIT_EXT, + eWarning = VK_DEBUG_REPORT_WARNING_BIT_EXT, ePerformanceWarning = VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, - eError = VK_DEBUG_REPORT_ERROR_BIT_EXT, - eDebug = VK_DEBUG_REPORT_DEBUG_BIT_EXT + eError = VK_DEBUG_REPORT_ERROR_BIT_EXT, + eDebug = VK_DEBUG_REPORT_DEBUG_BIT_EXT }; VULKAN_HPP_INLINE std::string to_string( DebugReportFlagBitsEXT value ) { switch ( value ) { - case DebugReportFlagBitsEXT::eInformation : return "Information"; - case DebugReportFlagBitsEXT::eWarning : return "Warning"; - case DebugReportFlagBitsEXT::ePerformanceWarning : return "PerformanceWarning"; - case DebugReportFlagBitsEXT::eError : return "Error"; - case DebugReportFlagBitsEXT::eDebug : return "Debug"; + case DebugReportFlagBitsEXT::eInformation: return "Information"; + case DebugReportFlagBitsEXT::eWarning: return "Warning"; + case DebugReportFlagBitsEXT::ePerformanceWarning: return "PerformanceWarning"; + case DebugReportFlagBitsEXT::eError: return "Error"; + case DebugReportFlagBitsEXT::eDebug: return "Debug"; default: return "invalid"; } } enum class DebugReportObjectTypeEXT { - eUnknown = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, - eInstance = VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, - ePhysicalDevice = VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, - eDevice = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, - eQueue = VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, - eSemaphore = VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, - eCommandBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, - eFence = VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, - eDeviceMemory = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, - eBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, - eImage = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, - eEvent = VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, - eQueryPool = VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, - eBufferView = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, - eImageView = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, - eShaderModule = VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, - ePipelineCache = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, - ePipelineLayout = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, - eRenderPass = VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, - ePipeline = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, - eDescriptorSetLayout = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, - eSampler = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, - eDescriptorPool = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, - eDescriptorSet = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, - eFramebuffer = VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, - eCommandPool = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, - eSurfaceKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, - eSwapchainKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, - eDebugReportCallbackEXT = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT, - eDisplayKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT, - eDisplayModeKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT, - eValidationCacheEXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT, - eSamplerYcbcrConversion = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, - eDescriptorUpdateTemplate = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT, - eAccelerationStructureKHR = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT, - eDebugReport = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT, - eValidationCache = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT, + eUnknown = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, + eInstance = VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, + ePhysicalDevice = VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, + eDevice = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, + eQueue = VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, + eSemaphore = VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, + eCommandBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, + eFence = VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, + eDeviceMemory = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, + eBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, + eImage = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, + eEvent = VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, + eQueryPool = VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, + eBufferView = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, + eImageView = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, + eShaderModule = VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, + ePipelineCache = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, + ePipelineLayout = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, + eRenderPass = VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, + ePipeline = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, + eDescriptorSetLayout = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, + eSampler = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, + eDescriptorPool = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, + eDescriptorSet = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, + eFramebuffer = VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, + eCommandPool = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, + eSurfaceKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, + eSwapchainKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, + eDebugReportCallbackEXT = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT, + eDisplayKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT, + eDisplayModeKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT, + eValidationCacheEXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT, + eSamplerYcbcrConversion = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, + eDescriptorUpdateTemplate = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT, + eAccelerationStructureKHR = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT, + eDebugReport = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT, + eValidationCache = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT, eDescriptorUpdateTemplateKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT, - eSamplerYcbcrConversionKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT, - eAccelerationStructureNV = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT + eSamplerYcbcrConversionKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT, + eAccelerationStructureNV = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT }; VULKAN_HPP_INLINE std::string to_string( DebugReportObjectTypeEXT value ) { switch ( value ) { - case DebugReportObjectTypeEXT::eUnknown : return "Unknown"; - case DebugReportObjectTypeEXT::eInstance : return "Instance"; - case DebugReportObjectTypeEXT::ePhysicalDevice : return "PhysicalDevice"; - case DebugReportObjectTypeEXT::eDevice : return "Device"; - case DebugReportObjectTypeEXT::eQueue : return "Queue"; - case DebugReportObjectTypeEXT::eSemaphore : return "Semaphore"; - case DebugReportObjectTypeEXT::eCommandBuffer : return "CommandBuffer"; - case DebugReportObjectTypeEXT::eFence : return "Fence"; - case DebugReportObjectTypeEXT::eDeviceMemory : return "DeviceMemory"; - case DebugReportObjectTypeEXT::eBuffer : return "Buffer"; - case DebugReportObjectTypeEXT::eImage : return "Image"; - case DebugReportObjectTypeEXT::eEvent : return "Event"; - case DebugReportObjectTypeEXT::eQueryPool : return "QueryPool"; - case DebugReportObjectTypeEXT::eBufferView : return "BufferView"; - case DebugReportObjectTypeEXT::eImageView : return "ImageView"; - case DebugReportObjectTypeEXT::eShaderModule : return "ShaderModule"; - case DebugReportObjectTypeEXT::ePipelineCache : return "PipelineCache"; - case DebugReportObjectTypeEXT::ePipelineLayout : return "PipelineLayout"; - case DebugReportObjectTypeEXT::eRenderPass : return "RenderPass"; - case DebugReportObjectTypeEXT::ePipeline : return "Pipeline"; - case DebugReportObjectTypeEXT::eDescriptorSetLayout : return "DescriptorSetLayout"; - case DebugReportObjectTypeEXT::eSampler : return "Sampler"; - case DebugReportObjectTypeEXT::eDescriptorPool : return "DescriptorPool"; - case DebugReportObjectTypeEXT::eDescriptorSet : return "DescriptorSet"; - case DebugReportObjectTypeEXT::eFramebuffer : return "Framebuffer"; - case DebugReportObjectTypeEXT::eCommandPool : return "CommandPool"; - case DebugReportObjectTypeEXT::eSurfaceKHR : return "SurfaceKHR"; - case DebugReportObjectTypeEXT::eSwapchainKHR : return "SwapchainKHR"; - case DebugReportObjectTypeEXT::eDebugReportCallbackEXT : return "DebugReportCallbackEXT"; - case DebugReportObjectTypeEXT::eDisplayKHR : return "DisplayKHR"; - case DebugReportObjectTypeEXT::eDisplayModeKHR : return "DisplayModeKHR"; - case DebugReportObjectTypeEXT::eValidationCacheEXT : return "ValidationCacheEXT"; - case DebugReportObjectTypeEXT::eSamplerYcbcrConversion : return "SamplerYcbcrConversion"; - case DebugReportObjectTypeEXT::eDescriptorUpdateTemplate : return "DescriptorUpdateTemplate"; - case DebugReportObjectTypeEXT::eAccelerationStructureKHR : return "AccelerationStructureKHR"; + case DebugReportObjectTypeEXT::eUnknown: return "Unknown"; + case DebugReportObjectTypeEXT::eInstance: return "Instance"; + case DebugReportObjectTypeEXT::ePhysicalDevice: return "PhysicalDevice"; + case DebugReportObjectTypeEXT::eDevice: return "Device"; + case DebugReportObjectTypeEXT::eQueue: return "Queue"; + case DebugReportObjectTypeEXT::eSemaphore: return "Semaphore"; + case DebugReportObjectTypeEXT::eCommandBuffer: return "CommandBuffer"; + case DebugReportObjectTypeEXT::eFence: return "Fence"; + case DebugReportObjectTypeEXT::eDeviceMemory: return "DeviceMemory"; + case DebugReportObjectTypeEXT::eBuffer: return "Buffer"; + case DebugReportObjectTypeEXT::eImage: return "Image"; + case DebugReportObjectTypeEXT::eEvent: return "Event"; + case DebugReportObjectTypeEXT::eQueryPool: return "QueryPool"; + case DebugReportObjectTypeEXT::eBufferView: return "BufferView"; + case DebugReportObjectTypeEXT::eImageView: return "ImageView"; + case DebugReportObjectTypeEXT::eShaderModule: return "ShaderModule"; + case DebugReportObjectTypeEXT::ePipelineCache: return "PipelineCache"; + case DebugReportObjectTypeEXT::ePipelineLayout: return "PipelineLayout"; + case DebugReportObjectTypeEXT::eRenderPass: return "RenderPass"; + case DebugReportObjectTypeEXT::ePipeline: return "Pipeline"; + case DebugReportObjectTypeEXT::eDescriptorSetLayout: return "DescriptorSetLayout"; + case DebugReportObjectTypeEXT::eSampler: return "Sampler"; + case DebugReportObjectTypeEXT::eDescriptorPool: return "DescriptorPool"; + case DebugReportObjectTypeEXT::eDescriptorSet: return "DescriptorSet"; + case DebugReportObjectTypeEXT::eFramebuffer: return "Framebuffer"; + case DebugReportObjectTypeEXT::eCommandPool: return "CommandPool"; + case DebugReportObjectTypeEXT::eSurfaceKHR: return "SurfaceKHR"; + case DebugReportObjectTypeEXT::eSwapchainKHR: return "SwapchainKHR"; + case DebugReportObjectTypeEXT::eDebugReportCallbackEXT: return "DebugReportCallbackEXT"; + case DebugReportObjectTypeEXT::eDisplayKHR: return "DisplayKHR"; + case DebugReportObjectTypeEXT::eDisplayModeKHR: return "DisplayModeKHR"; + case DebugReportObjectTypeEXT::eValidationCacheEXT: return "ValidationCacheEXT"; + case DebugReportObjectTypeEXT::eSamplerYcbcrConversion: return "SamplerYcbcrConversion"; + case DebugReportObjectTypeEXT::eDescriptorUpdateTemplate: return "DescriptorUpdateTemplate"; + case DebugReportObjectTypeEXT::eAccelerationStructureKHR: return "AccelerationStructureKHR"; default: return "invalid"; } } @@ -4235,27 +5533,27 @@ namespace VULKAN_HPP_NAMESPACE enum class DebugUtilsMessageSeverityFlagBitsEXT : VkDebugUtilsMessageSeverityFlagsEXT { eVerbose = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT, - eInfo = VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT, + eInfo = VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT, eWarning = VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT, - eError = VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT + eError = VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT }; VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageSeverityFlagBitsEXT value ) { switch ( value ) { - case DebugUtilsMessageSeverityFlagBitsEXT::eVerbose : return "Verbose"; - case DebugUtilsMessageSeverityFlagBitsEXT::eInfo : return "Info"; - case DebugUtilsMessageSeverityFlagBitsEXT::eWarning : return "Warning"; - case DebugUtilsMessageSeverityFlagBitsEXT::eError : return "Error"; + case DebugUtilsMessageSeverityFlagBitsEXT::eVerbose: return "Verbose"; + case DebugUtilsMessageSeverityFlagBitsEXT::eInfo: return "Info"; + case DebugUtilsMessageSeverityFlagBitsEXT::eWarning: return "Warning"; + case DebugUtilsMessageSeverityFlagBitsEXT::eError: return "Error"; default: return "invalid"; } } enum class DebugUtilsMessageTypeFlagBitsEXT : VkDebugUtilsMessageTypeFlagsEXT { - eGeneral = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT, - eValidation = VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT, + eGeneral = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT, + eValidation = VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT, ePerformance = VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT }; @@ -4263,19 +5561,19 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case DebugUtilsMessageTypeFlagBitsEXT::eGeneral : return "General"; - case DebugUtilsMessageTypeFlagBitsEXT::eValidation : return "Validation"; - case DebugUtilsMessageTypeFlagBitsEXT::ePerformance : return "Performance"; + case DebugUtilsMessageTypeFlagBitsEXT::eGeneral: return "General"; + case DebugUtilsMessageTypeFlagBitsEXT::eValidation: return "Validation"; + case DebugUtilsMessageTypeFlagBitsEXT::ePerformance: return "Performance"; default: return "invalid"; } } enum class DependencyFlagBits : VkDependencyFlags { - eByRegion = VK_DEPENDENCY_BY_REGION_BIT, - eDeviceGroup = VK_DEPENDENCY_DEVICE_GROUP_BIT, - eViewLocal = VK_DEPENDENCY_VIEW_LOCAL_BIT, - eViewLocalKHR = VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR, + eByRegion = VK_DEPENDENCY_BY_REGION_BIT, + eDeviceGroup = VK_DEPENDENCY_DEVICE_GROUP_BIT, + eViewLocal = VK_DEPENDENCY_VIEW_LOCAL_BIT, + eViewLocalKHR = VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR, eDeviceGroupKHR = VK_DEPENDENCY_DEVICE_GROUP_BIT_KHR }; @@ -4283,19 +5581,19 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case DependencyFlagBits::eByRegion : return "ByRegion"; - case DependencyFlagBits::eDeviceGroup : return "DeviceGroup"; - case DependencyFlagBits::eViewLocal : return "ViewLocal"; + case DependencyFlagBits::eByRegion: return "ByRegion"; + case DependencyFlagBits::eDeviceGroup: return "DeviceGroup"; + case DependencyFlagBits::eViewLocal: return "ViewLocal"; default: return "invalid"; } } enum class DescriptorBindingFlagBits : VkDescriptorBindingFlags { - eUpdateAfterBind = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT, + eUpdateAfterBind = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT, eUpdateUnusedWhilePending = VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT, - ePartiallyBound = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT, - eVariableDescriptorCount = VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT + ePartiallyBound = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT, + eVariableDescriptorCount = VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT }; using DescriptorBindingFlagBitsEXT = DescriptorBindingFlagBits; @@ -4303,18 +5601,18 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case DescriptorBindingFlagBits::eUpdateAfterBind : return "UpdateAfterBind"; - case DescriptorBindingFlagBits::eUpdateUnusedWhilePending : return "UpdateUnusedWhilePending"; - case DescriptorBindingFlagBits::ePartiallyBound : return "PartiallyBound"; - case DescriptorBindingFlagBits::eVariableDescriptorCount : return "VariableDescriptorCount"; + case DescriptorBindingFlagBits::eUpdateAfterBind: return "UpdateAfterBind"; + case DescriptorBindingFlagBits::eUpdateUnusedWhilePending: return "UpdateUnusedWhilePending"; + case DescriptorBindingFlagBits::ePartiallyBound: return "PartiallyBound"; + case DescriptorBindingFlagBits::eVariableDescriptorCount: return "VariableDescriptorCount"; default: return "invalid"; } } enum class DescriptorPoolCreateFlagBits : VkDescriptorPoolCreateFlags { - eFreeDescriptorSet = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, - eUpdateAfterBind = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT, + eFreeDescriptorSet = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, + eUpdateAfterBind = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT, eUpdateAfterBindEXT = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT }; @@ -4322,16 +5620,16 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case DescriptorPoolCreateFlagBits::eFreeDescriptorSet : return "FreeDescriptorSet"; - case DescriptorPoolCreateFlagBits::eUpdateAfterBind : return "UpdateAfterBind"; + case DescriptorPoolCreateFlagBits::eFreeDescriptorSet: return "FreeDescriptorSet"; + case DescriptorPoolCreateFlagBits::eUpdateAfterBind: return "UpdateAfterBind"; default: return "invalid"; } } enum class DescriptorSetLayoutCreateFlagBits : VkDescriptorSetLayoutCreateFlags { - eUpdateAfterBindPool = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT, - ePushDescriptorKHR = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR, + eUpdateAfterBindPool = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT, + ePushDescriptorKHR = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR, eUpdateAfterBindPoolEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT }; @@ -4339,54 +5637,54 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool : return "UpdateAfterBindPool"; - case DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR : return "PushDescriptorKHR"; + case DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool: return "UpdateAfterBindPool"; + case DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR: return "PushDescriptorKHR"; default: return "invalid"; } } enum class DescriptorType { - eSampler = VK_DESCRIPTOR_TYPE_SAMPLER, - eCombinedImageSampler = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, - eSampledImage = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, - eStorageImage = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, - eUniformTexelBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, - eStorageTexelBuffer = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, - eUniformBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, - eStorageBuffer = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, - eUniformBufferDynamic = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, - eStorageBufferDynamic = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, - eInputAttachment = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, - eInlineUniformBlockEXT = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT, + eSampler = VK_DESCRIPTOR_TYPE_SAMPLER, + eCombinedImageSampler = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, + eSampledImage = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, + eStorageImage = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, + eUniformTexelBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, + eStorageTexelBuffer = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, + eUniformBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, + eStorageBuffer = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, + eUniformBufferDynamic = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, + eStorageBufferDynamic = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, + eInputAttachment = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, + eInlineUniformBlockEXT = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT, eAccelerationStructureKHR = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR, - eAccelerationStructureNV = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV + eAccelerationStructureNV = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV }; VULKAN_HPP_INLINE std::string to_string( DescriptorType value ) { switch ( value ) { - case DescriptorType::eSampler : return "Sampler"; - case DescriptorType::eCombinedImageSampler : return "CombinedImageSampler"; - case DescriptorType::eSampledImage : return "SampledImage"; - case DescriptorType::eStorageImage : return "StorageImage"; - case DescriptorType::eUniformTexelBuffer : return "UniformTexelBuffer"; - case DescriptorType::eStorageTexelBuffer : return "StorageTexelBuffer"; - case DescriptorType::eUniformBuffer : return "UniformBuffer"; - case DescriptorType::eStorageBuffer : return "StorageBuffer"; - case DescriptorType::eUniformBufferDynamic : return "UniformBufferDynamic"; - case DescriptorType::eStorageBufferDynamic : return "StorageBufferDynamic"; - case DescriptorType::eInputAttachment : return "InputAttachment"; - case DescriptorType::eInlineUniformBlockEXT : return "InlineUniformBlockEXT"; - case DescriptorType::eAccelerationStructureKHR : return "AccelerationStructureKHR"; + case DescriptorType::eSampler: return "Sampler"; + case DescriptorType::eCombinedImageSampler: return "CombinedImageSampler"; + case DescriptorType::eSampledImage: return "SampledImage"; + case DescriptorType::eStorageImage: return "StorageImage"; + case DescriptorType::eUniformTexelBuffer: return "UniformTexelBuffer"; + case DescriptorType::eStorageTexelBuffer: return "StorageTexelBuffer"; + case DescriptorType::eUniformBuffer: return "UniformBuffer"; + case DescriptorType::eStorageBuffer: return "StorageBuffer"; + case DescriptorType::eUniformBufferDynamic: return "UniformBufferDynamic"; + case DescriptorType::eStorageBufferDynamic: return "StorageBufferDynamic"; + case DescriptorType::eInputAttachment: return "InputAttachment"; + case DescriptorType::eInlineUniformBlockEXT: return "InlineUniformBlockEXT"; + case DescriptorType::eAccelerationStructureKHR: return "AccelerationStructureKHR"; default: return "invalid"; } } enum class DescriptorUpdateTemplateType { - eDescriptorSet = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, + eDescriptorSet = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, ePushDescriptorsKHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR }; using DescriptorUpdateTemplateTypeKHR = DescriptorUpdateTemplateType; @@ -4395,14 +5693,15 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case DescriptorUpdateTemplateType::eDescriptorSet : return "DescriptorSet"; - case DescriptorUpdateTemplateType::ePushDescriptorsKHR : return "PushDescriptorsKHR"; + case DescriptorUpdateTemplateType::eDescriptorSet: return "DescriptorSet"; + case DescriptorUpdateTemplateType::ePushDescriptorsKHR: return "PushDescriptorsKHR"; default: return "invalid"; } } enum class DeviceCreateFlagBits - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( DeviceCreateFlagBits ) { @@ -4411,8 +5710,8 @@ namespace VULKAN_HPP_NAMESPACE enum class DeviceDiagnosticsConfigFlagBitsNV : VkDeviceDiagnosticsConfigFlagsNV { - eEnableShaderDebugInfo = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_DEBUG_INFO_BIT_NV, - eEnableResourceTracking = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_RESOURCE_TRACKING_BIT_NV, + eEnableShaderDebugInfo = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_DEBUG_INFO_BIT_NV, + eEnableResourceTracking = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_RESOURCE_TRACKING_BIT_NV, eEnableAutomaticCheckpoints = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_AUTOMATIC_CHECKPOINTS_BIT_NV }; @@ -4420,9 +5719,9 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo : return "EnableShaderDebugInfo"; - case DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking : return "EnableResourceTracking"; - case DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints : return "EnableAutomaticCheckpoints"; + case DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo: return "EnableShaderDebugInfo"; + case DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking: return "EnableResourceTracking"; + case DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints: return "EnableAutomaticCheckpoints"; default: return "invalid"; } } @@ -4436,16 +5735,16 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case DeviceEventTypeEXT::eDisplayHotplug : return "DisplayHotplug"; + case DeviceEventTypeEXT::eDisplayHotplug: return "DisplayHotplug"; default: return "invalid"; } } enum class DeviceGroupPresentModeFlagBitsKHR : VkDeviceGroupPresentModeFlagsKHR { - eLocal = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR, - eRemote = VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR, - eSum = VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR, + eLocal = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR, + eRemote = VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR, + eSum = VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR, eLocalMultiDevice = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR }; @@ -4453,10 +5752,10 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case DeviceGroupPresentModeFlagBitsKHR::eLocal : return "Local"; - case DeviceGroupPresentModeFlagBitsKHR::eRemote : return "Remote"; - case DeviceGroupPresentModeFlagBitsKHR::eSum : return "Sum"; - case DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice : return "LocalMultiDevice"; + case DeviceGroupPresentModeFlagBitsKHR::eLocal: return "Local"; + case DeviceGroupPresentModeFlagBitsKHR::eRemote: return "Remote"; + case DeviceGroupPresentModeFlagBitsKHR::eSum: return "Sum"; + case DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice: return "LocalMultiDevice"; default: return "invalid"; } } @@ -4470,7 +5769,7 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case DeviceQueueCreateFlagBits::eProtected : return "Protected"; + case DeviceQueueCreateFlagBits::eProtected: return "Protected"; default: return "invalid"; } } @@ -4485,8 +5784,8 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case DiscardRectangleModeEXT::eInclusive : return "Inclusive"; - case DiscardRectangleModeEXT::eExclusive : return "Exclusive"; + case DiscardRectangleModeEXT::eInclusive: return "Inclusive"; + case DiscardRectangleModeEXT::eExclusive: return "Exclusive"; default: return "invalid"; } } @@ -4500,16 +5799,16 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case DisplayEventTypeEXT::eFirstPixelOut : return "FirstPixelOut"; + case DisplayEventTypeEXT::eFirstPixelOut: return "FirstPixelOut"; default: return "invalid"; } } enum class DisplayPlaneAlphaFlagBitsKHR : VkDisplayPlaneAlphaFlagsKHR { - eOpaque = VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR, - eGlobal = VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR, - ePerPixel = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR, + eOpaque = VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR, + eGlobal = VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR, + ePerPixel = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR, ePerPixelPremultiplied = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR }; @@ -4517,47 +5816,47 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case DisplayPlaneAlphaFlagBitsKHR::eOpaque : return "Opaque"; - case DisplayPlaneAlphaFlagBitsKHR::eGlobal : return "Global"; - case DisplayPlaneAlphaFlagBitsKHR::ePerPixel : return "PerPixel"; - case DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied : return "PerPixelPremultiplied"; + case DisplayPlaneAlphaFlagBitsKHR::eOpaque: return "Opaque"; + case DisplayPlaneAlphaFlagBitsKHR::eGlobal: return "Global"; + case DisplayPlaneAlphaFlagBitsKHR::ePerPixel: return "PerPixel"; + case DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied: return "PerPixelPremultiplied"; default: return "invalid"; } } enum class DisplayPowerStateEXT { - eOff = VK_DISPLAY_POWER_STATE_OFF_EXT, + eOff = VK_DISPLAY_POWER_STATE_OFF_EXT, eSuspend = VK_DISPLAY_POWER_STATE_SUSPEND_EXT, - eOn = VK_DISPLAY_POWER_STATE_ON_EXT + eOn = VK_DISPLAY_POWER_STATE_ON_EXT }; VULKAN_HPP_INLINE std::string to_string( DisplayPowerStateEXT value ) { switch ( value ) { - case DisplayPowerStateEXT::eOff : return "Off"; - case DisplayPowerStateEXT::eSuspend : return "Suspend"; - case DisplayPowerStateEXT::eOn : return "On"; + case DisplayPowerStateEXT::eOff: return "Off"; + case DisplayPowerStateEXT::eSuspend: return "Suspend"; + case DisplayPowerStateEXT::eOn: return "On"; default: return "invalid"; } } enum class DriverId { - eAmdProprietary = VK_DRIVER_ID_AMD_PROPRIETARY, - eAmdOpenSource = VK_DRIVER_ID_AMD_OPEN_SOURCE, - eMesaRadv = VK_DRIVER_ID_MESA_RADV, - eNvidiaProprietary = VK_DRIVER_ID_NVIDIA_PROPRIETARY, + eAmdProprietary = VK_DRIVER_ID_AMD_PROPRIETARY, + eAmdOpenSource = VK_DRIVER_ID_AMD_OPEN_SOURCE, + eMesaRadv = VK_DRIVER_ID_MESA_RADV, + eNvidiaProprietary = VK_DRIVER_ID_NVIDIA_PROPRIETARY, eIntelProprietaryWindows = VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS, - eIntelOpenSourceMESA = VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA, - eImaginationProprietary = VK_DRIVER_ID_IMAGINATION_PROPRIETARY, - eQualcommProprietary = VK_DRIVER_ID_QUALCOMM_PROPRIETARY, - eArmProprietary = VK_DRIVER_ID_ARM_PROPRIETARY, - eGoogleSwiftshader = VK_DRIVER_ID_GOOGLE_SWIFTSHADER, - eGgpProprietary = VK_DRIVER_ID_GGP_PROPRIETARY, - eBroadcomProprietary = VK_DRIVER_ID_BROADCOM_PROPRIETARY, - eIntelOpenSourceMesa = VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA_KHR + eIntelOpenSourceMESA = VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA, + eImaginationProprietary = VK_DRIVER_ID_IMAGINATION_PROPRIETARY, + eQualcommProprietary = VK_DRIVER_ID_QUALCOMM_PROPRIETARY, + eArmProprietary = VK_DRIVER_ID_ARM_PROPRIETARY, + eGoogleSwiftshader = VK_DRIVER_ID_GOOGLE_SWIFTSHADER, + eGgpProprietary = VK_DRIVER_ID_GGP_PROPRIETARY, + eBroadcomProprietary = VK_DRIVER_ID_BROADCOM_PROPRIETARY, + eIntelOpenSourceMesa = VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA_KHR }; using DriverIdKHR = DriverId; @@ -4565,62 +5864,62 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case DriverId::eAmdProprietary : return "AmdProprietary"; - case DriverId::eAmdOpenSource : return "AmdOpenSource"; - case DriverId::eMesaRadv : return "MesaRadv"; - case DriverId::eNvidiaProprietary : return "NvidiaProprietary"; - case DriverId::eIntelProprietaryWindows : return "IntelProprietaryWindows"; - case DriverId::eIntelOpenSourceMESA : return "IntelOpenSourceMESA"; - case DriverId::eImaginationProprietary : return "ImaginationProprietary"; - case DriverId::eQualcommProprietary : return "QualcommProprietary"; - case DriverId::eArmProprietary : return "ArmProprietary"; - case DriverId::eGoogleSwiftshader : return "GoogleSwiftshader"; - case DriverId::eGgpProprietary : return "GgpProprietary"; - case DriverId::eBroadcomProprietary : return "BroadcomProprietary"; + case DriverId::eAmdProprietary: return "AmdProprietary"; + case DriverId::eAmdOpenSource: return "AmdOpenSource"; + case DriverId::eMesaRadv: return "MesaRadv"; + case DriverId::eNvidiaProprietary: return "NvidiaProprietary"; + case DriverId::eIntelProprietaryWindows: return "IntelProprietaryWindows"; + case DriverId::eIntelOpenSourceMESA: return "IntelOpenSourceMESA"; + case DriverId::eImaginationProprietary: return "ImaginationProprietary"; + case DriverId::eQualcommProprietary: return "QualcommProprietary"; + case DriverId::eArmProprietary: return "ArmProprietary"; + case DriverId::eGoogleSwiftshader: return "GoogleSwiftshader"; + case DriverId::eGgpProprietary: return "GgpProprietary"; + case DriverId::eBroadcomProprietary: return "BroadcomProprietary"; default: return "invalid"; } } enum class DynamicState { - eViewport = VK_DYNAMIC_STATE_VIEWPORT, - eScissor = VK_DYNAMIC_STATE_SCISSOR, - eLineWidth = VK_DYNAMIC_STATE_LINE_WIDTH, - eDepthBias = VK_DYNAMIC_STATE_DEPTH_BIAS, - eBlendConstants = VK_DYNAMIC_STATE_BLEND_CONSTANTS, - eDepthBounds = VK_DYNAMIC_STATE_DEPTH_BOUNDS, - eStencilCompareMask = VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK, - eStencilWriteMask = VK_DYNAMIC_STATE_STENCIL_WRITE_MASK, - eStencilReference = VK_DYNAMIC_STATE_STENCIL_REFERENCE, - eViewportWScalingNV = VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV, - eDiscardRectangleEXT = VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT, - eSampleLocationsEXT = VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT, + eViewport = VK_DYNAMIC_STATE_VIEWPORT, + eScissor = VK_DYNAMIC_STATE_SCISSOR, + eLineWidth = VK_DYNAMIC_STATE_LINE_WIDTH, + eDepthBias = VK_DYNAMIC_STATE_DEPTH_BIAS, + eBlendConstants = VK_DYNAMIC_STATE_BLEND_CONSTANTS, + eDepthBounds = VK_DYNAMIC_STATE_DEPTH_BOUNDS, + eStencilCompareMask = VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK, + eStencilWriteMask = VK_DYNAMIC_STATE_STENCIL_WRITE_MASK, + eStencilReference = VK_DYNAMIC_STATE_STENCIL_REFERENCE, + eViewportWScalingNV = VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV, + eDiscardRectangleEXT = VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT, + eSampleLocationsEXT = VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT, eViewportShadingRatePaletteNV = VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV, - eViewportCoarseSampleOrderNV = VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV, - eExclusiveScissorNV = VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV, - eLineStippleEXT = VK_DYNAMIC_STATE_LINE_STIPPLE_EXT + eViewportCoarseSampleOrderNV = VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV, + eExclusiveScissorNV = VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV, + eLineStippleEXT = VK_DYNAMIC_STATE_LINE_STIPPLE_EXT }; VULKAN_HPP_INLINE std::string to_string( DynamicState value ) { switch ( value ) { - case DynamicState::eViewport : return "Viewport"; - case DynamicState::eScissor : return "Scissor"; - case DynamicState::eLineWidth : return "LineWidth"; - case DynamicState::eDepthBias : return "DepthBias"; - case DynamicState::eBlendConstants : return "BlendConstants"; - case DynamicState::eDepthBounds : return "DepthBounds"; - case DynamicState::eStencilCompareMask : return "StencilCompareMask"; - case DynamicState::eStencilWriteMask : return "StencilWriteMask"; - case DynamicState::eStencilReference : return "StencilReference"; - case DynamicState::eViewportWScalingNV : return "ViewportWScalingNV"; - case DynamicState::eDiscardRectangleEXT : return "DiscardRectangleEXT"; - case DynamicState::eSampleLocationsEXT : return "SampleLocationsEXT"; - case DynamicState::eViewportShadingRatePaletteNV : return "ViewportShadingRatePaletteNV"; - case DynamicState::eViewportCoarseSampleOrderNV : return "ViewportCoarseSampleOrderNV"; - case DynamicState::eExclusiveScissorNV : return "ExclusiveScissorNV"; - case DynamicState::eLineStippleEXT : return "LineStippleEXT"; + case DynamicState::eViewport: return "Viewport"; + case DynamicState::eScissor: return "Scissor"; + case DynamicState::eLineWidth: return "LineWidth"; + case DynamicState::eDepthBias: return "DepthBias"; + case DynamicState::eBlendConstants: return "BlendConstants"; + case DynamicState::eDepthBounds: return "DepthBounds"; + case DynamicState::eStencilCompareMask: return "StencilCompareMask"; + case DynamicState::eStencilWriteMask: return "StencilWriteMask"; + case DynamicState::eStencilReference: return "StencilReference"; + case DynamicState::eViewportWScalingNV: return "ViewportWScalingNV"; + case DynamicState::eDiscardRectangleEXT: return "DiscardRectangleEXT"; + case DynamicState::eSampleLocationsEXT: return "SampleLocationsEXT"; + case DynamicState::eViewportShadingRatePaletteNV: return "ViewportShadingRatePaletteNV"; + case DynamicState::eViewportCoarseSampleOrderNV: return "ViewportCoarseSampleOrderNV"; + case DynamicState::eExclusiveScissorNV: return "ExclusiveScissorNV"; + case DynamicState::eLineStippleEXT: return "LineStippleEXT"; default: return "invalid"; } } @@ -4636,18 +5935,18 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case ExternalFenceFeatureFlagBits::eExportable : return "Exportable"; - case ExternalFenceFeatureFlagBits::eImportable : return "Importable"; + case ExternalFenceFeatureFlagBits::eExportable: return "Exportable"; + case ExternalFenceFeatureFlagBits::eImportable: return "Importable"; default: return "invalid"; } } enum class ExternalFenceHandleTypeFlagBits : VkExternalFenceHandleTypeFlags { - eOpaqueFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT, - eOpaqueWin32 = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT, + eOpaqueFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT, + eOpaqueWin32 = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT, eOpaqueWin32Kmt = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, - eSyncFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT + eSyncFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT }; using ExternalFenceHandleTypeFlagBitsKHR = ExternalFenceHandleTypeFlagBits; @@ -4655,10 +5954,10 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case ExternalFenceHandleTypeFlagBits::eOpaqueFd : return "OpaqueFd"; - case ExternalFenceHandleTypeFlagBits::eOpaqueWin32 : return "OpaqueWin32"; - case ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt : return "OpaqueWin32Kmt"; - case ExternalFenceHandleTypeFlagBits::eSyncFd : return "SyncFd"; + case ExternalFenceHandleTypeFlagBits::eOpaqueFd: return "OpaqueFd"; + case ExternalFenceHandleTypeFlagBits::eOpaqueWin32: return "OpaqueWin32"; + case ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt: return "OpaqueWin32Kmt"; + case ExternalFenceHandleTypeFlagBits::eSyncFd: return "SyncFd"; default: return "invalid"; } } @@ -4666,8 +5965,8 @@ namespace VULKAN_HPP_NAMESPACE enum class ExternalMemoryFeatureFlagBits : VkExternalMemoryFeatureFlags { eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT, - eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT, - eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT + eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT, + eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT }; using ExternalMemoryFeatureFlagBitsKHR = ExternalMemoryFeatureFlagBits; @@ -4675,9 +5974,9 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case ExternalMemoryFeatureFlagBits::eDedicatedOnly : return "DedicatedOnly"; - case ExternalMemoryFeatureFlagBits::eExportable : return "Exportable"; - case ExternalMemoryFeatureFlagBits::eImportable : return "Importable"; + case ExternalMemoryFeatureFlagBits::eDedicatedOnly: return "DedicatedOnly"; + case ExternalMemoryFeatureFlagBits::eExportable: return "Exportable"; + case ExternalMemoryFeatureFlagBits::eImportable: return "Importable"; default: return "invalid"; } } @@ -4685,34 +5984,34 @@ namespace VULKAN_HPP_NAMESPACE enum class ExternalMemoryFeatureFlagBitsNV : VkExternalMemoryFeatureFlagsNV { eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV, - eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV, - eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV + eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV, + eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV }; VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagBitsNV value ) { switch ( value ) { - case ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly : return "DedicatedOnly"; - case ExternalMemoryFeatureFlagBitsNV::eExportable : return "Exportable"; - case ExternalMemoryFeatureFlagBitsNV::eImportable : return "Importable"; + case ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly: return "DedicatedOnly"; + case ExternalMemoryFeatureFlagBitsNV::eExportable: return "Exportable"; + case ExternalMemoryFeatureFlagBitsNV::eImportable: return "Importable"; default: return "invalid"; } } enum class ExternalMemoryHandleTypeFlagBits : VkExternalMemoryHandleTypeFlags { - eOpaqueFd = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT, - eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT, - eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, - eD3D11Texture = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT, - eD3D11TextureKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT, - eD3D12Heap = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT, - eD3D12Resource = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT, - eDmaBufEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT, + eOpaqueFd = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT, + eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT, + eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + eD3D11Texture = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT, + eD3D11TextureKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT, + eD3D12Heap = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT, + eD3D12Resource = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT, + eDmaBufEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT, eAndroidHardwareBufferANDROID = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID, - eHostAllocationEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT, - eHostMappedForeignMemoryEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT + eHostAllocationEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT, + eHostMappedForeignMemoryEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT }; using ExternalMemoryHandleTypeFlagBitsKHR = ExternalMemoryHandleTypeFlagBits; @@ -4720,37 +6019,37 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case ExternalMemoryHandleTypeFlagBits::eOpaqueFd : return "OpaqueFd"; - case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32 : return "OpaqueWin32"; - case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt : return "OpaqueWin32Kmt"; - case ExternalMemoryHandleTypeFlagBits::eD3D11Texture : return "D3D11Texture"; - case ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt : return "D3D11TextureKmt"; - case ExternalMemoryHandleTypeFlagBits::eD3D12Heap : return "D3D12Heap"; - case ExternalMemoryHandleTypeFlagBits::eD3D12Resource : return "D3D12Resource"; - case ExternalMemoryHandleTypeFlagBits::eDmaBufEXT : return "DmaBufEXT"; - case ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID : return "AndroidHardwareBufferANDROID"; - case ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT : return "HostAllocationEXT"; - case ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT : return "HostMappedForeignMemoryEXT"; + case ExternalMemoryHandleTypeFlagBits::eOpaqueFd: return "OpaqueFd"; + case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32: return "OpaqueWin32"; + case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt: return "OpaqueWin32Kmt"; + case ExternalMemoryHandleTypeFlagBits::eD3D11Texture: return "D3D11Texture"; + case ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt: return "D3D11TextureKmt"; + case ExternalMemoryHandleTypeFlagBits::eD3D12Heap: return "D3D12Heap"; + case ExternalMemoryHandleTypeFlagBits::eD3D12Resource: return "D3D12Resource"; + case ExternalMemoryHandleTypeFlagBits::eDmaBufEXT: return "DmaBufEXT"; + case ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID: return "AndroidHardwareBufferANDROID"; + case ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT: return "HostAllocationEXT"; + case ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT: return "HostMappedForeignMemoryEXT"; default: return "invalid"; } } enum class ExternalMemoryHandleTypeFlagBitsNV : VkExternalMemoryHandleTypeFlagsNV { - eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV, + eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV, eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV, - eD3D11Image = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV, - eD3D11ImageKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV + eD3D11Image = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV, + eD3D11ImageKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV }; VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagBitsNV value ) { switch ( value ) { - case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32 : return "OpaqueWin32"; - case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt : return "OpaqueWin32Kmt"; - case ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image : return "D3D11Image"; - case ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt : return "D3D11ImageKmt"; + case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32: return "OpaqueWin32"; + case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt: return "OpaqueWin32Kmt"; + case ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image: return "D3D11Image"; + case ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt: return "D3D11ImageKmt"; default: return "invalid"; } } @@ -4766,19 +6065,19 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case ExternalSemaphoreFeatureFlagBits::eExportable : return "Exportable"; - case ExternalSemaphoreFeatureFlagBits::eImportable : return "Importable"; + case ExternalSemaphoreFeatureFlagBits::eExportable: return "Exportable"; + case ExternalSemaphoreFeatureFlagBits::eImportable: return "Importable"; default: return "invalid"; } } enum class ExternalSemaphoreHandleTypeFlagBits : VkExternalSemaphoreHandleTypeFlags { - eOpaqueFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT, - eOpaqueWin32 = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT, + eOpaqueFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT, + eOpaqueWin32 = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT, eOpaqueWin32Kmt = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, - eD3D12Fence = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT, - eSyncFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT + eD3D12Fence = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT, + eSyncFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT }; using ExternalSemaphoreHandleTypeFlagBitsKHR = ExternalSemaphoreHandleTypeFlagBits; @@ -4786,11 +6085,11 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd : return "OpaqueFd"; - case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32 : return "OpaqueWin32"; - case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt : return "OpaqueWin32Kmt"; - case ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence : return "D3D12Fence"; - case ExternalSemaphoreHandleTypeFlagBits::eSyncFd : return "SyncFd"; + case ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd: return "OpaqueFd"; + case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32: return "OpaqueWin32"; + case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt: return "OpaqueWin32Kmt"; + case ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence: return "D3D12Fence"; + case ExternalSemaphoreHandleTypeFlagBits::eSyncFd: return "SyncFd"; default: return "invalid"; } } @@ -4804,7 +6103,7 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case FenceCreateFlagBits::eSignaled : return "Signaled"; + case FenceCreateFlagBits::eSignaled: return "Signaled"; default: return "invalid"; } } @@ -4819,15 +6118,15 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case FenceImportFlagBits::eTemporary : return "Temporary"; + case FenceImportFlagBits::eTemporary: return "Temporary"; default: return "invalid"; } } enum class Filter { - eNearest = VK_FILTER_NEAREST, - eLinear = VK_FILTER_LINEAR, + eNearest = VK_FILTER_NEAREST, + eLinear = VK_FILTER_LINEAR, eCubicIMG = VK_FILTER_CUBIC_IMG, eCubicEXT = VK_FILTER_CUBIC_EXT }; @@ -4836,579 +6135,586 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case Filter::eNearest : return "Nearest"; - case Filter::eLinear : return "Linear"; - case Filter::eCubicIMG : return "CubicIMG"; + case Filter::eNearest: return "Nearest"; + case Filter::eLinear: return "Linear"; + case Filter::eCubicIMG: return "CubicIMG"; default: return "invalid"; } } enum class Format { - eUndefined = VK_FORMAT_UNDEFINED, - eR4G4UnormPack8 = VK_FORMAT_R4G4_UNORM_PACK8, - eR4G4B4A4UnormPack16 = VK_FORMAT_R4G4B4A4_UNORM_PACK16, - eB4G4R4A4UnormPack16 = VK_FORMAT_B4G4R4A4_UNORM_PACK16, - eR5G6B5UnormPack16 = VK_FORMAT_R5G6B5_UNORM_PACK16, - eB5G6R5UnormPack16 = VK_FORMAT_B5G6R5_UNORM_PACK16, - eR5G5B5A1UnormPack16 = VK_FORMAT_R5G5B5A1_UNORM_PACK16, - eB5G5R5A1UnormPack16 = VK_FORMAT_B5G5R5A1_UNORM_PACK16, - eA1R5G5B5UnormPack16 = VK_FORMAT_A1R5G5B5_UNORM_PACK16, - eR8Unorm = VK_FORMAT_R8_UNORM, - eR8Snorm = VK_FORMAT_R8_SNORM, - eR8Uscaled = VK_FORMAT_R8_USCALED, - eR8Sscaled = VK_FORMAT_R8_SSCALED, - eR8Uint = VK_FORMAT_R8_UINT, - eR8Sint = VK_FORMAT_R8_SINT, - eR8Srgb = VK_FORMAT_R8_SRGB, - eR8G8Unorm = VK_FORMAT_R8G8_UNORM, - eR8G8Snorm = VK_FORMAT_R8G8_SNORM, - eR8G8Uscaled = VK_FORMAT_R8G8_USCALED, - eR8G8Sscaled = VK_FORMAT_R8G8_SSCALED, - eR8G8Uint = VK_FORMAT_R8G8_UINT, - eR8G8Sint = VK_FORMAT_R8G8_SINT, - eR8G8Srgb = VK_FORMAT_R8G8_SRGB, - eR8G8B8Unorm = VK_FORMAT_R8G8B8_UNORM, - eR8G8B8Snorm = VK_FORMAT_R8G8B8_SNORM, - eR8G8B8Uscaled = VK_FORMAT_R8G8B8_USCALED, - eR8G8B8Sscaled = VK_FORMAT_R8G8B8_SSCALED, - eR8G8B8Uint = VK_FORMAT_R8G8B8_UINT, - eR8G8B8Sint = VK_FORMAT_R8G8B8_SINT, - eR8G8B8Srgb = VK_FORMAT_R8G8B8_SRGB, - eB8G8R8Unorm = VK_FORMAT_B8G8R8_UNORM, - eB8G8R8Snorm = VK_FORMAT_B8G8R8_SNORM, - eB8G8R8Uscaled = VK_FORMAT_B8G8R8_USCALED, - eB8G8R8Sscaled = VK_FORMAT_B8G8R8_SSCALED, - eB8G8R8Uint = VK_FORMAT_B8G8R8_UINT, - eB8G8R8Sint = VK_FORMAT_B8G8R8_SINT, - eB8G8R8Srgb = VK_FORMAT_B8G8R8_SRGB, - eR8G8B8A8Unorm = VK_FORMAT_R8G8B8A8_UNORM, - eR8G8B8A8Snorm = VK_FORMAT_R8G8B8A8_SNORM, - eR8G8B8A8Uscaled = VK_FORMAT_R8G8B8A8_USCALED, - eR8G8B8A8Sscaled = VK_FORMAT_R8G8B8A8_SSCALED, - eR8G8B8A8Uint = VK_FORMAT_R8G8B8A8_UINT, - eR8G8B8A8Sint = VK_FORMAT_R8G8B8A8_SINT, - eR8G8B8A8Srgb = VK_FORMAT_R8G8B8A8_SRGB, - eB8G8R8A8Unorm = VK_FORMAT_B8G8R8A8_UNORM, - eB8G8R8A8Snorm = VK_FORMAT_B8G8R8A8_SNORM, - eB8G8R8A8Uscaled = VK_FORMAT_B8G8R8A8_USCALED, - eB8G8R8A8Sscaled = VK_FORMAT_B8G8R8A8_SSCALED, - eB8G8R8A8Uint = VK_FORMAT_B8G8R8A8_UINT, - eB8G8R8A8Sint = VK_FORMAT_B8G8R8A8_SINT, - eB8G8R8A8Srgb = VK_FORMAT_B8G8R8A8_SRGB, - eA8B8G8R8UnormPack32 = VK_FORMAT_A8B8G8R8_UNORM_PACK32, - eA8B8G8R8SnormPack32 = VK_FORMAT_A8B8G8R8_SNORM_PACK32, - eA8B8G8R8UscaledPack32 = VK_FORMAT_A8B8G8R8_USCALED_PACK32, - eA8B8G8R8SscaledPack32 = VK_FORMAT_A8B8G8R8_SSCALED_PACK32, - eA8B8G8R8UintPack32 = VK_FORMAT_A8B8G8R8_UINT_PACK32, - eA8B8G8R8SintPack32 = VK_FORMAT_A8B8G8R8_SINT_PACK32, - eA8B8G8R8SrgbPack32 = VK_FORMAT_A8B8G8R8_SRGB_PACK32, - eA2R10G10B10UnormPack32 = VK_FORMAT_A2R10G10B10_UNORM_PACK32, - eA2R10G10B10SnormPack32 = VK_FORMAT_A2R10G10B10_SNORM_PACK32, - eA2R10G10B10UscaledPack32 = VK_FORMAT_A2R10G10B10_USCALED_PACK32, - eA2R10G10B10SscaledPack32 = VK_FORMAT_A2R10G10B10_SSCALED_PACK32, - eA2R10G10B10UintPack32 = VK_FORMAT_A2R10G10B10_UINT_PACK32, - eA2R10G10B10SintPack32 = VK_FORMAT_A2R10G10B10_SINT_PACK32, - eA2B10G10R10UnormPack32 = VK_FORMAT_A2B10G10R10_UNORM_PACK32, - eA2B10G10R10SnormPack32 = VK_FORMAT_A2B10G10R10_SNORM_PACK32, - eA2B10G10R10UscaledPack32 = VK_FORMAT_A2B10G10R10_USCALED_PACK32, - eA2B10G10R10SscaledPack32 = VK_FORMAT_A2B10G10R10_SSCALED_PACK32, - eA2B10G10R10UintPack32 = VK_FORMAT_A2B10G10R10_UINT_PACK32, - eA2B10G10R10SintPack32 = VK_FORMAT_A2B10G10R10_SINT_PACK32, - eR16Unorm = VK_FORMAT_R16_UNORM, - eR16Snorm = VK_FORMAT_R16_SNORM, - eR16Uscaled = VK_FORMAT_R16_USCALED, - eR16Sscaled = VK_FORMAT_R16_SSCALED, - eR16Uint = VK_FORMAT_R16_UINT, - eR16Sint = VK_FORMAT_R16_SINT, - eR16Sfloat = VK_FORMAT_R16_SFLOAT, - eR16G16Unorm = VK_FORMAT_R16G16_UNORM, - eR16G16Snorm = VK_FORMAT_R16G16_SNORM, - eR16G16Uscaled = VK_FORMAT_R16G16_USCALED, - eR16G16Sscaled = VK_FORMAT_R16G16_SSCALED, - eR16G16Uint = VK_FORMAT_R16G16_UINT, - eR16G16Sint = VK_FORMAT_R16G16_SINT, - eR16G16Sfloat = VK_FORMAT_R16G16_SFLOAT, - eR16G16B16Unorm = VK_FORMAT_R16G16B16_UNORM, - eR16G16B16Snorm = VK_FORMAT_R16G16B16_SNORM, - eR16G16B16Uscaled = VK_FORMAT_R16G16B16_USCALED, - eR16G16B16Sscaled = VK_FORMAT_R16G16B16_SSCALED, - eR16G16B16Uint = VK_FORMAT_R16G16B16_UINT, - eR16G16B16Sint = VK_FORMAT_R16G16B16_SINT, - eR16G16B16Sfloat = VK_FORMAT_R16G16B16_SFLOAT, - eR16G16B16A16Unorm = VK_FORMAT_R16G16B16A16_UNORM, - eR16G16B16A16Snorm = VK_FORMAT_R16G16B16A16_SNORM, - eR16G16B16A16Uscaled = VK_FORMAT_R16G16B16A16_USCALED, - eR16G16B16A16Sscaled = VK_FORMAT_R16G16B16A16_SSCALED, - eR16G16B16A16Uint = VK_FORMAT_R16G16B16A16_UINT, - eR16G16B16A16Sint = VK_FORMAT_R16G16B16A16_SINT, - eR16G16B16A16Sfloat = VK_FORMAT_R16G16B16A16_SFLOAT, - eR32Uint = VK_FORMAT_R32_UINT, - eR32Sint = VK_FORMAT_R32_SINT, - eR32Sfloat = VK_FORMAT_R32_SFLOAT, - eR32G32Uint = VK_FORMAT_R32G32_UINT, - eR32G32Sint = VK_FORMAT_R32G32_SINT, - eR32G32Sfloat = VK_FORMAT_R32G32_SFLOAT, - eR32G32B32Uint = VK_FORMAT_R32G32B32_UINT, - eR32G32B32Sint = VK_FORMAT_R32G32B32_SINT, - eR32G32B32Sfloat = VK_FORMAT_R32G32B32_SFLOAT, - eR32G32B32A32Uint = VK_FORMAT_R32G32B32A32_UINT, - eR32G32B32A32Sint = VK_FORMAT_R32G32B32A32_SINT, - eR32G32B32A32Sfloat = VK_FORMAT_R32G32B32A32_SFLOAT, - eR64Uint = VK_FORMAT_R64_UINT, - eR64Sint = VK_FORMAT_R64_SINT, - eR64Sfloat = VK_FORMAT_R64_SFLOAT, - eR64G64Uint = VK_FORMAT_R64G64_UINT, - eR64G64Sint = VK_FORMAT_R64G64_SINT, - eR64G64Sfloat = VK_FORMAT_R64G64_SFLOAT, - eR64G64B64Uint = VK_FORMAT_R64G64B64_UINT, - eR64G64B64Sint = VK_FORMAT_R64G64B64_SINT, - eR64G64B64Sfloat = VK_FORMAT_R64G64B64_SFLOAT, - eR64G64B64A64Uint = VK_FORMAT_R64G64B64A64_UINT, - eR64G64B64A64Sint = VK_FORMAT_R64G64B64A64_SINT, - eR64G64B64A64Sfloat = VK_FORMAT_R64G64B64A64_SFLOAT, - eB10G11R11UfloatPack32 = VK_FORMAT_B10G11R11_UFLOAT_PACK32, - eE5B9G9R9UfloatPack32 = VK_FORMAT_E5B9G9R9_UFLOAT_PACK32, - eD16Unorm = VK_FORMAT_D16_UNORM, - eX8D24UnormPack32 = VK_FORMAT_X8_D24_UNORM_PACK32, - eD32Sfloat = VK_FORMAT_D32_SFLOAT, - eS8Uint = VK_FORMAT_S8_UINT, - eD16UnormS8Uint = VK_FORMAT_D16_UNORM_S8_UINT, - eD24UnormS8Uint = VK_FORMAT_D24_UNORM_S8_UINT, - eD32SfloatS8Uint = VK_FORMAT_D32_SFLOAT_S8_UINT, - eBc1RgbUnormBlock = VK_FORMAT_BC1_RGB_UNORM_BLOCK, - eBc1RgbSrgbBlock = VK_FORMAT_BC1_RGB_SRGB_BLOCK, - eBc1RgbaUnormBlock = VK_FORMAT_BC1_RGBA_UNORM_BLOCK, - eBc1RgbaSrgbBlock = VK_FORMAT_BC1_RGBA_SRGB_BLOCK, - eBc2UnormBlock = VK_FORMAT_BC2_UNORM_BLOCK, - eBc2SrgbBlock = VK_FORMAT_BC2_SRGB_BLOCK, - eBc3UnormBlock = VK_FORMAT_BC3_UNORM_BLOCK, - eBc3SrgbBlock = VK_FORMAT_BC3_SRGB_BLOCK, - eBc4UnormBlock = VK_FORMAT_BC4_UNORM_BLOCK, - eBc4SnormBlock = VK_FORMAT_BC4_SNORM_BLOCK, - eBc5UnormBlock = VK_FORMAT_BC5_UNORM_BLOCK, - eBc5SnormBlock = VK_FORMAT_BC5_SNORM_BLOCK, - eBc6HUfloatBlock = VK_FORMAT_BC6H_UFLOAT_BLOCK, - eBc6HSfloatBlock = VK_FORMAT_BC6H_SFLOAT_BLOCK, - eBc7UnormBlock = VK_FORMAT_BC7_UNORM_BLOCK, - eBc7SrgbBlock = VK_FORMAT_BC7_SRGB_BLOCK, - eEtc2R8G8B8UnormBlock = VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK, - eEtc2R8G8B8SrgbBlock = VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK, - eEtc2R8G8B8A1UnormBlock = VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK, - eEtc2R8G8B8A1SrgbBlock = VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK, - eEtc2R8G8B8A8UnormBlock = VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK, - eEtc2R8G8B8A8SrgbBlock = VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK, - eEacR11UnormBlock = VK_FORMAT_EAC_R11_UNORM_BLOCK, - eEacR11SnormBlock = VK_FORMAT_EAC_R11_SNORM_BLOCK, - eEacR11G11UnormBlock = VK_FORMAT_EAC_R11G11_UNORM_BLOCK, - eEacR11G11SnormBlock = VK_FORMAT_EAC_R11G11_SNORM_BLOCK, - eAstc4x4UnormBlock = VK_FORMAT_ASTC_4x4_UNORM_BLOCK, - eAstc4x4SrgbBlock = VK_FORMAT_ASTC_4x4_SRGB_BLOCK, - eAstc5x4UnormBlock = VK_FORMAT_ASTC_5x4_UNORM_BLOCK, - eAstc5x4SrgbBlock = VK_FORMAT_ASTC_5x4_SRGB_BLOCK, - eAstc5x5UnormBlock = VK_FORMAT_ASTC_5x5_UNORM_BLOCK, - eAstc5x5SrgbBlock = VK_FORMAT_ASTC_5x5_SRGB_BLOCK, - eAstc6x5UnormBlock = VK_FORMAT_ASTC_6x5_UNORM_BLOCK, - eAstc6x5SrgbBlock = VK_FORMAT_ASTC_6x5_SRGB_BLOCK, - eAstc6x6UnormBlock = VK_FORMAT_ASTC_6x6_UNORM_BLOCK, - eAstc6x6SrgbBlock = VK_FORMAT_ASTC_6x6_SRGB_BLOCK, - eAstc8x5UnormBlock = VK_FORMAT_ASTC_8x5_UNORM_BLOCK, - eAstc8x5SrgbBlock = VK_FORMAT_ASTC_8x5_SRGB_BLOCK, - eAstc8x6UnormBlock = VK_FORMAT_ASTC_8x6_UNORM_BLOCK, - eAstc8x6SrgbBlock = VK_FORMAT_ASTC_8x6_SRGB_BLOCK, - eAstc8x8UnormBlock = VK_FORMAT_ASTC_8x8_UNORM_BLOCK, - eAstc8x8SrgbBlock = VK_FORMAT_ASTC_8x8_SRGB_BLOCK, - eAstc10x5UnormBlock = VK_FORMAT_ASTC_10x5_UNORM_BLOCK, - eAstc10x5SrgbBlock = VK_FORMAT_ASTC_10x5_SRGB_BLOCK, - eAstc10x6UnormBlock = VK_FORMAT_ASTC_10x6_UNORM_BLOCK, - eAstc10x6SrgbBlock = VK_FORMAT_ASTC_10x6_SRGB_BLOCK, - eAstc10x8UnormBlock = VK_FORMAT_ASTC_10x8_UNORM_BLOCK, - eAstc10x8SrgbBlock = VK_FORMAT_ASTC_10x8_SRGB_BLOCK, - eAstc10x10UnormBlock = VK_FORMAT_ASTC_10x10_UNORM_BLOCK, - eAstc10x10SrgbBlock = VK_FORMAT_ASTC_10x10_SRGB_BLOCK, - eAstc12x10UnormBlock = VK_FORMAT_ASTC_12x10_UNORM_BLOCK, - eAstc12x10SrgbBlock = VK_FORMAT_ASTC_12x10_SRGB_BLOCK, - eAstc12x12UnormBlock = VK_FORMAT_ASTC_12x12_UNORM_BLOCK, - eAstc12x12SrgbBlock = VK_FORMAT_ASTC_12x12_SRGB_BLOCK, - eG8B8G8R8422Unorm = VK_FORMAT_G8B8G8R8_422_UNORM, - eB8G8R8G8422Unorm = VK_FORMAT_B8G8R8G8_422_UNORM, - eG8B8R83Plane420Unorm = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM, - eG8B8R82Plane420Unorm = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM, - eG8B8R83Plane422Unorm = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM, - eG8B8R82Plane422Unorm = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM, - eG8B8R83Plane444Unorm = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM, - eR10X6UnormPack16 = VK_FORMAT_R10X6_UNORM_PACK16, - eR10X6G10X6Unorm2Pack16 = VK_FORMAT_R10X6G10X6_UNORM_2PACK16, - eR10X6G10X6B10X6A10X6Unorm4Pack16 = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16, - eG10X6B10X6G10X6R10X6422Unorm4Pack16 = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16, - eB10X6G10X6R10X6G10X6422Unorm4Pack16 = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16, - eG10X6B10X6R10X63Plane420Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16, - eG10X6B10X6R10X62Plane420Unorm3Pack16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16, - eG10X6B10X6R10X63Plane422Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16, - eG10X6B10X6R10X62Plane422Unorm3Pack16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16, - eG10X6B10X6R10X63Plane444Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16, - eR12X4UnormPack16 = VK_FORMAT_R12X4_UNORM_PACK16, - eR12X4G12X4Unorm2Pack16 = VK_FORMAT_R12X4G12X4_UNORM_2PACK16, - eR12X4G12X4B12X4A12X4Unorm4Pack16 = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16, - eG12X4B12X4G12X4R12X4422Unorm4Pack16 = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16, - eB12X4G12X4R12X4G12X4422Unorm4Pack16 = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16, - eG12X4B12X4R12X43Plane420Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16, - eG12X4B12X4R12X42Plane420Unorm3Pack16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16, - eG12X4B12X4R12X43Plane422Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16, - eG12X4B12X4R12X42Plane422Unorm3Pack16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16, - eG12X4B12X4R12X43Plane444Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16, - eG16B16G16R16422Unorm = VK_FORMAT_G16B16G16R16_422_UNORM, - eB16G16R16G16422Unorm = VK_FORMAT_B16G16R16G16_422_UNORM, - eG16B16R163Plane420Unorm = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM, - eG16B16R162Plane420Unorm = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM, - eG16B16R163Plane422Unorm = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM, - eG16B16R162Plane422Unorm = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM, - eG16B16R163Plane444Unorm = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM, - ePvrtc12BppUnormBlockIMG = VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG, - ePvrtc14BppUnormBlockIMG = VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG, - ePvrtc22BppUnormBlockIMG = VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG, - ePvrtc24BppUnormBlockIMG = VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG, - ePvrtc12BppSrgbBlockIMG = VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG, - ePvrtc14BppSrgbBlockIMG = VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG, - ePvrtc22BppSrgbBlockIMG = VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG, - ePvrtc24BppSrgbBlockIMG = VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG, - eAstc4x4SfloatBlockEXT = VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT, - eAstc5x4SfloatBlockEXT = VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT, - eAstc5x5SfloatBlockEXT = VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT, - eAstc6x5SfloatBlockEXT = VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK_EXT, - eAstc6x6SfloatBlockEXT = VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK_EXT, - eAstc8x5SfloatBlockEXT = VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK_EXT, - eAstc8x6SfloatBlockEXT = VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK_EXT, - eAstc8x8SfloatBlockEXT = VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK_EXT, - eAstc10x5SfloatBlockEXT = VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK_EXT, - eAstc10x6SfloatBlockEXT = VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK_EXT, - eAstc10x8SfloatBlockEXT = VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK_EXT, - eAstc10x10SfloatBlockEXT = VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT, - eAstc12x10SfloatBlockEXT = VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT, - eAstc12x12SfloatBlockEXT = VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK_EXT, - eG8B8G8R8422UnormKHR = VK_FORMAT_G8B8G8R8_422_UNORM_KHR, - eB8G8R8G8422UnormKHR = VK_FORMAT_B8G8R8G8_422_UNORM_KHR, - eG8B8R83Plane420UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR, - eG8B8R82Plane420UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR, - eG8B8R83Plane422UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR, - eG8B8R82Plane422UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR, - eG8B8R83Plane444UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR, - eR10X6UnormPack16KHR = VK_FORMAT_R10X6_UNORM_PACK16_KHR, - eR10X6G10X6Unorm2Pack16KHR = VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR, - eR10X6G10X6B10X6A10X6Unorm4Pack16KHR = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR, - eG10X6B10X6G10X6R10X6422Unorm4Pack16KHR = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR, - eB10X6G10X6R10X6G10X6422Unorm4Pack16KHR = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR, + eUndefined = VK_FORMAT_UNDEFINED, + eR4G4UnormPack8 = VK_FORMAT_R4G4_UNORM_PACK8, + eR4G4B4A4UnormPack16 = VK_FORMAT_R4G4B4A4_UNORM_PACK16, + eB4G4R4A4UnormPack16 = VK_FORMAT_B4G4R4A4_UNORM_PACK16, + eR5G6B5UnormPack16 = VK_FORMAT_R5G6B5_UNORM_PACK16, + eB5G6R5UnormPack16 = VK_FORMAT_B5G6R5_UNORM_PACK16, + eR5G5B5A1UnormPack16 = VK_FORMAT_R5G5B5A1_UNORM_PACK16, + eB5G5R5A1UnormPack16 = VK_FORMAT_B5G5R5A1_UNORM_PACK16, + eA1R5G5B5UnormPack16 = VK_FORMAT_A1R5G5B5_UNORM_PACK16, + eR8Unorm = VK_FORMAT_R8_UNORM, + eR8Snorm = VK_FORMAT_R8_SNORM, + eR8Uscaled = VK_FORMAT_R8_USCALED, + eR8Sscaled = VK_FORMAT_R8_SSCALED, + eR8Uint = VK_FORMAT_R8_UINT, + eR8Sint = VK_FORMAT_R8_SINT, + eR8Srgb = VK_FORMAT_R8_SRGB, + eR8G8Unorm = VK_FORMAT_R8G8_UNORM, + eR8G8Snorm = VK_FORMAT_R8G8_SNORM, + eR8G8Uscaled = VK_FORMAT_R8G8_USCALED, + eR8G8Sscaled = VK_FORMAT_R8G8_SSCALED, + eR8G8Uint = VK_FORMAT_R8G8_UINT, + eR8G8Sint = VK_FORMAT_R8G8_SINT, + eR8G8Srgb = VK_FORMAT_R8G8_SRGB, + eR8G8B8Unorm = VK_FORMAT_R8G8B8_UNORM, + eR8G8B8Snorm = VK_FORMAT_R8G8B8_SNORM, + eR8G8B8Uscaled = VK_FORMAT_R8G8B8_USCALED, + eR8G8B8Sscaled = VK_FORMAT_R8G8B8_SSCALED, + eR8G8B8Uint = VK_FORMAT_R8G8B8_UINT, + eR8G8B8Sint = VK_FORMAT_R8G8B8_SINT, + eR8G8B8Srgb = VK_FORMAT_R8G8B8_SRGB, + eB8G8R8Unorm = VK_FORMAT_B8G8R8_UNORM, + eB8G8R8Snorm = VK_FORMAT_B8G8R8_SNORM, + eB8G8R8Uscaled = VK_FORMAT_B8G8R8_USCALED, + eB8G8R8Sscaled = VK_FORMAT_B8G8R8_SSCALED, + eB8G8R8Uint = VK_FORMAT_B8G8R8_UINT, + eB8G8R8Sint = VK_FORMAT_B8G8R8_SINT, + eB8G8R8Srgb = VK_FORMAT_B8G8R8_SRGB, + eR8G8B8A8Unorm = VK_FORMAT_R8G8B8A8_UNORM, + eR8G8B8A8Snorm = VK_FORMAT_R8G8B8A8_SNORM, + eR8G8B8A8Uscaled = VK_FORMAT_R8G8B8A8_USCALED, + eR8G8B8A8Sscaled = VK_FORMAT_R8G8B8A8_SSCALED, + eR8G8B8A8Uint = VK_FORMAT_R8G8B8A8_UINT, + eR8G8B8A8Sint = VK_FORMAT_R8G8B8A8_SINT, + eR8G8B8A8Srgb = VK_FORMAT_R8G8B8A8_SRGB, + eB8G8R8A8Unorm = VK_FORMAT_B8G8R8A8_UNORM, + eB8G8R8A8Snorm = VK_FORMAT_B8G8R8A8_SNORM, + eB8G8R8A8Uscaled = VK_FORMAT_B8G8R8A8_USCALED, + eB8G8R8A8Sscaled = VK_FORMAT_B8G8R8A8_SSCALED, + eB8G8R8A8Uint = VK_FORMAT_B8G8R8A8_UINT, + eB8G8R8A8Sint = VK_FORMAT_B8G8R8A8_SINT, + eB8G8R8A8Srgb = VK_FORMAT_B8G8R8A8_SRGB, + eA8B8G8R8UnormPack32 = VK_FORMAT_A8B8G8R8_UNORM_PACK32, + eA8B8G8R8SnormPack32 = VK_FORMAT_A8B8G8R8_SNORM_PACK32, + eA8B8G8R8UscaledPack32 = VK_FORMAT_A8B8G8R8_USCALED_PACK32, + eA8B8G8R8SscaledPack32 = VK_FORMAT_A8B8G8R8_SSCALED_PACK32, + eA8B8G8R8UintPack32 = VK_FORMAT_A8B8G8R8_UINT_PACK32, + eA8B8G8R8SintPack32 = VK_FORMAT_A8B8G8R8_SINT_PACK32, + eA8B8G8R8SrgbPack32 = VK_FORMAT_A8B8G8R8_SRGB_PACK32, + eA2R10G10B10UnormPack32 = VK_FORMAT_A2R10G10B10_UNORM_PACK32, + eA2R10G10B10SnormPack32 = VK_FORMAT_A2R10G10B10_SNORM_PACK32, + eA2R10G10B10UscaledPack32 = VK_FORMAT_A2R10G10B10_USCALED_PACK32, + eA2R10G10B10SscaledPack32 = VK_FORMAT_A2R10G10B10_SSCALED_PACK32, + eA2R10G10B10UintPack32 = VK_FORMAT_A2R10G10B10_UINT_PACK32, + eA2R10G10B10SintPack32 = VK_FORMAT_A2R10G10B10_SINT_PACK32, + eA2B10G10R10UnormPack32 = VK_FORMAT_A2B10G10R10_UNORM_PACK32, + eA2B10G10R10SnormPack32 = VK_FORMAT_A2B10G10R10_SNORM_PACK32, + eA2B10G10R10UscaledPack32 = VK_FORMAT_A2B10G10R10_USCALED_PACK32, + eA2B10G10R10SscaledPack32 = VK_FORMAT_A2B10G10R10_SSCALED_PACK32, + eA2B10G10R10UintPack32 = VK_FORMAT_A2B10G10R10_UINT_PACK32, + eA2B10G10R10SintPack32 = VK_FORMAT_A2B10G10R10_SINT_PACK32, + eR16Unorm = VK_FORMAT_R16_UNORM, + eR16Snorm = VK_FORMAT_R16_SNORM, + eR16Uscaled = VK_FORMAT_R16_USCALED, + eR16Sscaled = VK_FORMAT_R16_SSCALED, + eR16Uint = VK_FORMAT_R16_UINT, + eR16Sint = VK_FORMAT_R16_SINT, + eR16Sfloat = VK_FORMAT_R16_SFLOAT, + eR16G16Unorm = VK_FORMAT_R16G16_UNORM, + eR16G16Snorm = VK_FORMAT_R16G16_SNORM, + eR16G16Uscaled = VK_FORMAT_R16G16_USCALED, + eR16G16Sscaled = VK_FORMAT_R16G16_SSCALED, + eR16G16Uint = VK_FORMAT_R16G16_UINT, + eR16G16Sint = VK_FORMAT_R16G16_SINT, + eR16G16Sfloat = VK_FORMAT_R16G16_SFLOAT, + eR16G16B16Unorm = VK_FORMAT_R16G16B16_UNORM, + eR16G16B16Snorm = VK_FORMAT_R16G16B16_SNORM, + eR16G16B16Uscaled = VK_FORMAT_R16G16B16_USCALED, + eR16G16B16Sscaled = VK_FORMAT_R16G16B16_SSCALED, + eR16G16B16Uint = VK_FORMAT_R16G16B16_UINT, + eR16G16B16Sint = VK_FORMAT_R16G16B16_SINT, + eR16G16B16Sfloat = VK_FORMAT_R16G16B16_SFLOAT, + eR16G16B16A16Unorm = VK_FORMAT_R16G16B16A16_UNORM, + eR16G16B16A16Snorm = VK_FORMAT_R16G16B16A16_SNORM, + eR16G16B16A16Uscaled = VK_FORMAT_R16G16B16A16_USCALED, + eR16G16B16A16Sscaled = VK_FORMAT_R16G16B16A16_SSCALED, + eR16G16B16A16Uint = VK_FORMAT_R16G16B16A16_UINT, + eR16G16B16A16Sint = VK_FORMAT_R16G16B16A16_SINT, + eR16G16B16A16Sfloat = VK_FORMAT_R16G16B16A16_SFLOAT, + eR32Uint = VK_FORMAT_R32_UINT, + eR32Sint = VK_FORMAT_R32_SINT, + eR32Sfloat = VK_FORMAT_R32_SFLOAT, + eR32G32Uint = VK_FORMAT_R32G32_UINT, + eR32G32Sint = VK_FORMAT_R32G32_SINT, + eR32G32Sfloat = VK_FORMAT_R32G32_SFLOAT, + eR32G32B32Uint = VK_FORMAT_R32G32B32_UINT, + eR32G32B32Sint = VK_FORMAT_R32G32B32_SINT, + eR32G32B32Sfloat = VK_FORMAT_R32G32B32_SFLOAT, + eR32G32B32A32Uint = VK_FORMAT_R32G32B32A32_UINT, + eR32G32B32A32Sint = VK_FORMAT_R32G32B32A32_SINT, + eR32G32B32A32Sfloat = VK_FORMAT_R32G32B32A32_SFLOAT, + eR64Uint = VK_FORMAT_R64_UINT, + eR64Sint = VK_FORMAT_R64_SINT, + eR64Sfloat = VK_FORMAT_R64_SFLOAT, + eR64G64Uint = VK_FORMAT_R64G64_UINT, + eR64G64Sint = VK_FORMAT_R64G64_SINT, + eR64G64Sfloat = VK_FORMAT_R64G64_SFLOAT, + eR64G64B64Uint = VK_FORMAT_R64G64B64_UINT, + eR64G64B64Sint = VK_FORMAT_R64G64B64_SINT, + eR64G64B64Sfloat = VK_FORMAT_R64G64B64_SFLOAT, + eR64G64B64A64Uint = VK_FORMAT_R64G64B64A64_UINT, + eR64G64B64A64Sint = VK_FORMAT_R64G64B64A64_SINT, + eR64G64B64A64Sfloat = VK_FORMAT_R64G64B64A64_SFLOAT, + eB10G11R11UfloatPack32 = VK_FORMAT_B10G11R11_UFLOAT_PACK32, + eE5B9G9R9UfloatPack32 = VK_FORMAT_E5B9G9R9_UFLOAT_PACK32, + eD16Unorm = VK_FORMAT_D16_UNORM, + eX8D24UnormPack32 = VK_FORMAT_X8_D24_UNORM_PACK32, + eD32Sfloat = VK_FORMAT_D32_SFLOAT, + eS8Uint = VK_FORMAT_S8_UINT, + eD16UnormS8Uint = VK_FORMAT_D16_UNORM_S8_UINT, + eD24UnormS8Uint = VK_FORMAT_D24_UNORM_S8_UINT, + eD32SfloatS8Uint = VK_FORMAT_D32_SFLOAT_S8_UINT, + eBc1RgbUnormBlock = VK_FORMAT_BC1_RGB_UNORM_BLOCK, + eBc1RgbSrgbBlock = VK_FORMAT_BC1_RGB_SRGB_BLOCK, + eBc1RgbaUnormBlock = VK_FORMAT_BC1_RGBA_UNORM_BLOCK, + eBc1RgbaSrgbBlock = VK_FORMAT_BC1_RGBA_SRGB_BLOCK, + eBc2UnormBlock = VK_FORMAT_BC2_UNORM_BLOCK, + eBc2SrgbBlock = VK_FORMAT_BC2_SRGB_BLOCK, + eBc3UnormBlock = VK_FORMAT_BC3_UNORM_BLOCK, + eBc3SrgbBlock = VK_FORMAT_BC3_SRGB_BLOCK, + eBc4UnormBlock = VK_FORMAT_BC4_UNORM_BLOCK, + eBc4SnormBlock = VK_FORMAT_BC4_SNORM_BLOCK, + eBc5UnormBlock = VK_FORMAT_BC5_UNORM_BLOCK, + eBc5SnormBlock = VK_FORMAT_BC5_SNORM_BLOCK, + eBc6HUfloatBlock = VK_FORMAT_BC6H_UFLOAT_BLOCK, + eBc6HSfloatBlock = VK_FORMAT_BC6H_SFLOAT_BLOCK, + eBc7UnormBlock = VK_FORMAT_BC7_UNORM_BLOCK, + eBc7SrgbBlock = VK_FORMAT_BC7_SRGB_BLOCK, + eEtc2R8G8B8UnormBlock = VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK, + eEtc2R8G8B8SrgbBlock = VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK, + eEtc2R8G8B8A1UnormBlock = VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK, + eEtc2R8G8B8A1SrgbBlock = VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK, + eEtc2R8G8B8A8UnormBlock = VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK, + eEtc2R8G8B8A8SrgbBlock = VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK, + eEacR11UnormBlock = VK_FORMAT_EAC_R11_UNORM_BLOCK, + eEacR11SnormBlock = VK_FORMAT_EAC_R11_SNORM_BLOCK, + eEacR11G11UnormBlock = VK_FORMAT_EAC_R11G11_UNORM_BLOCK, + eEacR11G11SnormBlock = VK_FORMAT_EAC_R11G11_SNORM_BLOCK, + eAstc4x4UnormBlock = VK_FORMAT_ASTC_4x4_UNORM_BLOCK, + eAstc4x4SrgbBlock = VK_FORMAT_ASTC_4x4_SRGB_BLOCK, + eAstc5x4UnormBlock = VK_FORMAT_ASTC_5x4_UNORM_BLOCK, + eAstc5x4SrgbBlock = VK_FORMAT_ASTC_5x4_SRGB_BLOCK, + eAstc5x5UnormBlock = VK_FORMAT_ASTC_5x5_UNORM_BLOCK, + eAstc5x5SrgbBlock = VK_FORMAT_ASTC_5x5_SRGB_BLOCK, + eAstc6x5UnormBlock = VK_FORMAT_ASTC_6x5_UNORM_BLOCK, + eAstc6x5SrgbBlock = VK_FORMAT_ASTC_6x5_SRGB_BLOCK, + eAstc6x6UnormBlock = VK_FORMAT_ASTC_6x6_UNORM_BLOCK, + eAstc6x6SrgbBlock = VK_FORMAT_ASTC_6x6_SRGB_BLOCK, + eAstc8x5UnormBlock = VK_FORMAT_ASTC_8x5_UNORM_BLOCK, + eAstc8x5SrgbBlock = VK_FORMAT_ASTC_8x5_SRGB_BLOCK, + eAstc8x6UnormBlock = VK_FORMAT_ASTC_8x6_UNORM_BLOCK, + eAstc8x6SrgbBlock = VK_FORMAT_ASTC_8x6_SRGB_BLOCK, + eAstc8x8UnormBlock = VK_FORMAT_ASTC_8x8_UNORM_BLOCK, + eAstc8x8SrgbBlock = VK_FORMAT_ASTC_8x8_SRGB_BLOCK, + eAstc10x5UnormBlock = VK_FORMAT_ASTC_10x5_UNORM_BLOCK, + eAstc10x5SrgbBlock = VK_FORMAT_ASTC_10x5_SRGB_BLOCK, + eAstc10x6UnormBlock = VK_FORMAT_ASTC_10x6_UNORM_BLOCK, + eAstc10x6SrgbBlock = VK_FORMAT_ASTC_10x6_SRGB_BLOCK, + eAstc10x8UnormBlock = VK_FORMAT_ASTC_10x8_UNORM_BLOCK, + eAstc10x8SrgbBlock = VK_FORMAT_ASTC_10x8_SRGB_BLOCK, + eAstc10x10UnormBlock = VK_FORMAT_ASTC_10x10_UNORM_BLOCK, + eAstc10x10SrgbBlock = VK_FORMAT_ASTC_10x10_SRGB_BLOCK, + eAstc12x10UnormBlock = VK_FORMAT_ASTC_12x10_UNORM_BLOCK, + eAstc12x10SrgbBlock = VK_FORMAT_ASTC_12x10_SRGB_BLOCK, + eAstc12x12UnormBlock = VK_FORMAT_ASTC_12x12_UNORM_BLOCK, + eAstc12x12SrgbBlock = VK_FORMAT_ASTC_12x12_SRGB_BLOCK, + eG8B8G8R8422Unorm = VK_FORMAT_G8B8G8R8_422_UNORM, + eB8G8R8G8422Unorm = VK_FORMAT_B8G8R8G8_422_UNORM, + eG8B8R83Plane420Unorm = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM, + eG8B8R82Plane420Unorm = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM, + eG8B8R83Plane422Unorm = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM, + eG8B8R82Plane422Unorm = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM, + eG8B8R83Plane444Unorm = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM, + eR10X6UnormPack16 = VK_FORMAT_R10X6_UNORM_PACK16, + eR10X6G10X6Unorm2Pack16 = VK_FORMAT_R10X6G10X6_UNORM_2PACK16, + eR10X6G10X6B10X6A10X6Unorm4Pack16 = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16, + eG10X6B10X6G10X6R10X6422Unorm4Pack16 = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16, + eB10X6G10X6R10X6G10X6422Unorm4Pack16 = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16, + eG10X6B10X6R10X63Plane420Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16, + eG10X6B10X6R10X62Plane420Unorm3Pack16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16, + eG10X6B10X6R10X63Plane422Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16, + eG10X6B10X6R10X62Plane422Unorm3Pack16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16, + eG10X6B10X6R10X63Plane444Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16, + eR12X4UnormPack16 = VK_FORMAT_R12X4_UNORM_PACK16, + eR12X4G12X4Unorm2Pack16 = VK_FORMAT_R12X4G12X4_UNORM_2PACK16, + eR12X4G12X4B12X4A12X4Unorm4Pack16 = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16, + eG12X4B12X4G12X4R12X4422Unorm4Pack16 = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16, + eB12X4G12X4R12X4G12X4422Unorm4Pack16 = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16, + eG12X4B12X4R12X43Plane420Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16, + eG12X4B12X4R12X42Plane420Unorm3Pack16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16, + eG12X4B12X4R12X43Plane422Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16, + eG12X4B12X4R12X42Plane422Unorm3Pack16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16, + eG12X4B12X4R12X43Plane444Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16, + eG16B16G16R16422Unorm = VK_FORMAT_G16B16G16R16_422_UNORM, + eB16G16R16G16422Unorm = VK_FORMAT_B16G16R16G16_422_UNORM, + eG16B16R163Plane420Unorm = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM, + eG16B16R162Plane420Unorm = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM, + eG16B16R163Plane422Unorm = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM, + eG16B16R162Plane422Unorm = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM, + eG16B16R163Plane444Unorm = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM, + ePvrtc12BppUnormBlockIMG = VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG, + ePvrtc14BppUnormBlockIMG = VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG, + ePvrtc22BppUnormBlockIMG = VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG, + ePvrtc24BppUnormBlockIMG = VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG, + ePvrtc12BppSrgbBlockIMG = VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG, + ePvrtc14BppSrgbBlockIMG = VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG, + ePvrtc22BppSrgbBlockIMG = VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG, + ePvrtc24BppSrgbBlockIMG = VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG, + eAstc4x4SfloatBlockEXT = VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT, + eAstc5x4SfloatBlockEXT = VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT, + eAstc5x5SfloatBlockEXT = VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT, + eAstc6x5SfloatBlockEXT = VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK_EXT, + eAstc6x6SfloatBlockEXT = VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK_EXT, + eAstc8x5SfloatBlockEXT = VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK_EXT, + eAstc8x6SfloatBlockEXT = VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK_EXT, + eAstc8x8SfloatBlockEXT = VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK_EXT, + eAstc10x5SfloatBlockEXT = VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK_EXT, + eAstc10x6SfloatBlockEXT = VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK_EXT, + eAstc10x8SfloatBlockEXT = VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK_EXT, + eAstc10x10SfloatBlockEXT = VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT, + eAstc12x10SfloatBlockEXT = VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT, + eAstc12x12SfloatBlockEXT = VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK_EXT, + eG8B8G8R8422UnormKHR = VK_FORMAT_G8B8G8R8_422_UNORM_KHR, + eB8G8R8G8422UnormKHR = VK_FORMAT_B8G8R8G8_422_UNORM_KHR, + eG8B8R83Plane420UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR, + eG8B8R82Plane420UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR, + eG8B8R83Plane422UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR, + eG8B8R82Plane422UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR, + eG8B8R83Plane444UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR, + eR10X6UnormPack16KHR = VK_FORMAT_R10X6_UNORM_PACK16_KHR, + eR10X6G10X6Unorm2Pack16KHR = VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR, + eR10X6G10X6B10X6A10X6Unorm4Pack16KHR = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR, + eG10X6B10X6G10X6R10X6422Unorm4Pack16KHR = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR, + eB10X6G10X6R10X6G10X6422Unorm4Pack16KHR = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR, eG10X6B10X6R10X63Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR, eG10X6B10X6R10X62Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR, eG10X6B10X6R10X63Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR, eG10X6B10X6R10X62Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR, eG10X6B10X6R10X63Plane444Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16_KHR, - eR12X4UnormPack16KHR = VK_FORMAT_R12X4_UNORM_PACK16_KHR, - eR12X4G12X4Unorm2Pack16KHR = VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR, - eR12X4G12X4B12X4A12X4Unorm4Pack16KHR = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR, - eG12X4B12X4G12X4R12X4422Unorm4Pack16KHR = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR, - eB12X4G12X4R12X4G12X4422Unorm4Pack16KHR = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR, + eR12X4UnormPack16KHR = VK_FORMAT_R12X4_UNORM_PACK16_KHR, + eR12X4G12X4Unorm2Pack16KHR = VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR, + eR12X4G12X4B12X4A12X4Unorm4Pack16KHR = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR, + eG12X4B12X4G12X4R12X4422Unorm4Pack16KHR = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR, + eB12X4G12X4R12X4G12X4422Unorm4Pack16KHR = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR, eG12X4B12X4R12X43Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR, eG12X4B12X4R12X42Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16_KHR, eG12X4B12X4R12X43Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR, eG12X4B12X4R12X42Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16_KHR, eG12X4B12X4R12X43Plane444Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16_KHR, - eG16B16G16R16422UnormKHR = VK_FORMAT_G16B16G16R16_422_UNORM_KHR, - eB16G16R16G16422UnormKHR = VK_FORMAT_B16G16R16G16_422_UNORM_KHR, - eG16B16R163Plane420UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR, - eG16B16R162Plane420UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM_KHR, - eG16B16R163Plane422UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR, - eG16B16R162Plane422UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR, - eG16B16R163Plane444UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR + eG16B16G16R16422UnormKHR = VK_FORMAT_G16B16G16R16_422_UNORM_KHR, + eB16G16R16G16422UnormKHR = VK_FORMAT_B16G16R16G16_422_UNORM_KHR, + eG16B16R163Plane420UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR, + eG16B16R162Plane420UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM_KHR, + eG16B16R163Plane422UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR, + eG16B16R162Plane422UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR, + eG16B16R163Plane444UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR }; VULKAN_HPP_INLINE std::string to_string( Format value ) { switch ( value ) { - case Format::eUndefined : return "Undefined"; - case Format::eR4G4UnormPack8 : return "R4G4UnormPack8"; - case Format::eR4G4B4A4UnormPack16 : return "R4G4B4A4UnormPack16"; - case Format::eB4G4R4A4UnormPack16 : return "B4G4R4A4UnormPack16"; - case Format::eR5G6B5UnormPack16 : return "R5G6B5UnormPack16"; - case Format::eB5G6R5UnormPack16 : return "B5G6R5UnormPack16"; - case Format::eR5G5B5A1UnormPack16 : return "R5G5B5A1UnormPack16"; - case Format::eB5G5R5A1UnormPack16 : return "B5G5R5A1UnormPack16"; - case Format::eA1R5G5B5UnormPack16 : return "A1R5G5B5UnormPack16"; - case Format::eR8Unorm : return "R8Unorm"; - case Format::eR8Snorm : return "R8Snorm"; - case Format::eR8Uscaled : return "R8Uscaled"; - case Format::eR8Sscaled : return "R8Sscaled"; - case Format::eR8Uint : return "R8Uint"; - case Format::eR8Sint : return "R8Sint"; - case Format::eR8Srgb : return "R8Srgb"; - case Format::eR8G8Unorm : return "R8G8Unorm"; - case Format::eR8G8Snorm : return "R8G8Snorm"; - case Format::eR8G8Uscaled : return "R8G8Uscaled"; - case Format::eR8G8Sscaled : return "R8G8Sscaled"; - case Format::eR8G8Uint : return "R8G8Uint"; - case Format::eR8G8Sint : return "R8G8Sint"; - case Format::eR8G8Srgb : return "R8G8Srgb"; - case Format::eR8G8B8Unorm : return "R8G8B8Unorm"; - case Format::eR8G8B8Snorm : return "R8G8B8Snorm"; - case Format::eR8G8B8Uscaled : return "R8G8B8Uscaled"; - case Format::eR8G8B8Sscaled : return "R8G8B8Sscaled"; - case Format::eR8G8B8Uint : return "R8G8B8Uint"; - case Format::eR8G8B8Sint : return "R8G8B8Sint"; - case Format::eR8G8B8Srgb : return "R8G8B8Srgb"; - case Format::eB8G8R8Unorm : return "B8G8R8Unorm"; - case Format::eB8G8R8Snorm : return "B8G8R8Snorm"; - case Format::eB8G8R8Uscaled : return "B8G8R8Uscaled"; - case Format::eB8G8R8Sscaled : return "B8G8R8Sscaled"; - case Format::eB8G8R8Uint : return "B8G8R8Uint"; - case Format::eB8G8R8Sint : return "B8G8R8Sint"; - case Format::eB8G8R8Srgb : return "B8G8R8Srgb"; - case Format::eR8G8B8A8Unorm : return "R8G8B8A8Unorm"; - case Format::eR8G8B8A8Snorm : return "R8G8B8A8Snorm"; - case Format::eR8G8B8A8Uscaled : return "R8G8B8A8Uscaled"; - case Format::eR8G8B8A8Sscaled : return "R8G8B8A8Sscaled"; - case Format::eR8G8B8A8Uint : return "R8G8B8A8Uint"; - case Format::eR8G8B8A8Sint : return "R8G8B8A8Sint"; - case Format::eR8G8B8A8Srgb : return "R8G8B8A8Srgb"; - case Format::eB8G8R8A8Unorm : return "B8G8R8A8Unorm"; - case Format::eB8G8R8A8Snorm : return "B8G8R8A8Snorm"; - case Format::eB8G8R8A8Uscaled : return "B8G8R8A8Uscaled"; - case Format::eB8G8R8A8Sscaled : return "B8G8R8A8Sscaled"; - case Format::eB8G8R8A8Uint : return "B8G8R8A8Uint"; - case Format::eB8G8R8A8Sint : return "B8G8R8A8Sint"; - case Format::eB8G8R8A8Srgb : return "B8G8R8A8Srgb"; - case Format::eA8B8G8R8UnormPack32 : return "A8B8G8R8UnormPack32"; - case Format::eA8B8G8R8SnormPack32 : return "A8B8G8R8SnormPack32"; - case Format::eA8B8G8R8UscaledPack32 : return "A8B8G8R8UscaledPack32"; - case Format::eA8B8G8R8SscaledPack32 : return "A8B8G8R8SscaledPack32"; - case Format::eA8B8G8R8UintPack32 : return "A8B8G8R8UintPack32"; - case Format::eA8B8G8R8SintPack32 : return "A8B8G8R8SintPack32"; - case Format::eA8B8G8R8SrgbPack32 : return "A8B8G8R8SrgbPack32"; - case Format::eA2R10G10B10UnormPack32 : return "A2R10G10B10UnormPack32"; - case Format::eA2R10G10B10SnormPack32 : return "A2R10G10B10SnormPack32"; - case Format::eA2R10G10B10UscaledPack32 : return "A2R10G10B10UscaledPack32"; - case Format::eA2R10G10B10SscaledPack32 : return "A2R10G10B10SscaledPack32"; - case Format::eA2R10G10B10UintPack32 : return "A2R10G10B10UintPack32"; - case Format::eA2R10G10B10SintPack32 : return "A2R10G10B10SintPack32"; - case Format::eA2B10G10R10UnormPack32 : return "A2B10G10R10UnormPack32"; - case Format::eA2B10G10R10SnormPack32 : return "A2B10G10R10SnormPack32"; - case Format::eA2B10G10R10UscaledPack32 : return "A2B10G10R10UscaledPack32"; - case Format::eA2B10G10R10SscaledPack32 : return "A2B10G10R10SscaledPack32"; - case Format::eA2B10G10R10UintPack32 : return "A2B10G10R10UintPack32"; - case Format::eA2B10G10R10SintPack32 : return "A2B10G10R10SintPack32"; - case Format::eR16Unorm : return "R16Unorm"; - case Format::eR16Snorm : return "R16Snorm"; - case Format::eR16Uscaled : return "R16Uscaled"; - case Format::eR16Sscaled : return "R16Sscaled"; - case Format::eR16Uint : return "R16Uint"; - case Format::eR16Sint : return "R16Sint"; - case Format::eR16Sfloat : return "R16Sfloat"; - case Format::eR16G16Unorm : return "R16G16Unorm"; - case Format::eR16G16Snorm : return "R16G16Snorm"; - case Format::eR16G16Uscaled : return "R16G16Uscaled"; - case Format::eR16G16Sscaled : return "R16G16Sscaled"; - case Format::eR16G16Uint : return "R16G16Uint"; - case Format::eR16G16Sint : return "R16G16Sint"; - case Format::eR16G16Sfloat : return "R16G16Sfloat"; - case Format::eR16G16B16Unorm : return "R16G16B16Unorm"; - case Format::eR16G16B16Snorm : return "R16G16B16Snorm"; - case Format::eR16G16B16Uscaled : return "R16G16B16Uscaled"; - case Format::eR16G16B16Sscaled : return "R16G16B16Sscaled"; - case Format::eR16G16B16Uint : return "R16G16B16Uint"; - case Format::eR16G16B16Sint : return "R16G16B16Sint"; - case Format::eR16G16B16Sfloat : return "R16G16B16Sfloat"; - case Format::eR16G16B16A16Unorm : return "R16G16B16A16Unorm"; - case Format::eR16G16B16A16Snorm : return "R16G16B16A16Snorm"; - case Format::eR16G16B16A16Uscaled : return "R16G16B16A16Uscaled"; - case Format::eR16G16B16A16Sscaled : return "R16G16B16A16Sscaled"; - case Format::eR16G16B16A16Uint : return "R16G16B16A16Uint"; - case Format::eR16G16B16A16Sint : return "R16G16B16A16Sint"; - case Format::eR16G16B16A16Sfloat : return "R16G16B16A16Sfloat"; - case Format::eR32Uint : return "R32Uint"; - case Format::eR32Sint : return "R32Sint"; - case Format::eR32Sfloat : return "R32Sfloat"; - case Format::eR32G32Uint : return "R32G32Uint"; - case Format::eR32G32Sint : return "R32G32Sint"; - case Format::eR32G32Sfloat : return "R32G32Sfloat"; - case Format::eR32G32B32Uint : return "R32G32B32Uint"; - case Format::eR32G32B32Sint : return "R32G32B32Sint"; - case Format::eR32G32B32Sfloat : return "R32G32B32Sfloat"; - case Format::eR32G32B32A32Uint : return "R32G32B32A32Uint"; - case Format::eR32G32B32A32Sint : return "R32G32B32A32Sint"; - case Format::eR32G32B32A32Sfloat : return "R32G32B32A32Sfloat"; - case Format::eR64Uint : return "R64Uint"; - case Format::eR64Sint : return "R64Sint"; - case Format::eR64Sfloat : return "R64Sfloat"; - case Format::eR64G64Uint : return "R64G64Uint"; - case Format::eR64G64Sint : return "R64G64Sint"; - case Format::eR64G64Sfloat : return "R64G64Sfloat"; - case Format::eR64G64B64Uint : return "R64G64B64Uint"; - case Format::eR64G64B64Sint : return "R64G64B64Sint"; - case Format::eR64G64B64Sfloat : return "R64G64B64Sfloat"; - case Format::eR64G64B64A64Uint : return "R64G64B64A64Uint"; - case Format::eR64G64B64A64Sint : return "R64G64B64A64Sint"; - case Format::eR64G64B64A64Sfloat : return "R64G64B64A64Sfloat"; - case Format::eB10G11R11UfloatPack32 : return "B10G11R11UfloatPack32"; - case Format::eE5B9G9R9UfloatPack32 : return "E5B9G9R9UfloatPack32"; - case Format::eD16Unorm : return "D16Unorm"; - case Format::eX8D24UnormPack32 : return "X8D24UnormPack32"; - case Format::eD32Sfloat : return "D32Sfloat"; - case Format::eS8Uint : return "S8Uint"; - case Format::eD16UnormS8Uint : return "D16UnormS8Uint"; - case Format::eD24UnormS8Uint : return "D24UnormS8Uint"; - case Format::eD32SfloatS8Uint : return "D32SfloatS8Uint"; - case Format::eBc1RgbUnormBlock : return "Bc1RgbUnormBlock"; - case Format::eBc1RgbSrgbBlock : return "Bc1RgbSrgbBlock"; - case Format::eBc1RgbaUnormBlock : return "Bc1RgbaUnormBlock"; - case Format::eBc1RgbaSrgbBlock : return "Bc1RgbaSrgbBlock"; - case Format::eBc2UnormBlock : return "Bc2UnormBlock"; - case Format::eBc2SrgbBlock : return "Bc2SrgbBlock"; - case Format::eBc3UnormBlock : return "Bc3UnormBlock"; - case Format::eBc3SrgbBlock : return "Bc3SrgbBlock"; - case Format::eBc4UnormBlock : return "Bc4UnormBlock"; - case Format::eBc4SnormBlock : return "Bc4SnormBlock"; - case Format::eBc5UnormBlock : return "Bc5UnormBlock"; - case Format::eBc5SnormBlock : return "Bc5SnormBlock"; - case Format::eBc6HUfloatBlock : return "Bc6HUfloatBlock"; - case Format::eBc6HSfloatBlock : return "Bc6HSfloatBlock"; - case Format::eBc7UnormBlock : return "Bc7UnormBlock"; - case Format::eBc7SrgbBlock : return "Bc7SrgbBlock"; - case Format::eEtc2R8G8B8UnormBlock : return "Etc2R8G8B8UnormBlock"; - case Format::eEtc2R8G8B8SrgbBlock : return "Etc2R8G8B8SrgbBlock"; - case Format::eEtc2R8G8B8A1UnormBlock : return "Etc2R8G8B8A1UnormBlock"; - case Format::eEtc2R8G8B8A1SrgbBlock : return "Etc2R8G8B8A1SrgbBlock"; - case Format::eEtc2R8G8B8A8UnormBlock : return "Etc2R8G8B8A8UnormBlock"; - case Format::eEtc2R8G8B8A8SrgbBlock : return "Etc2R8G8B8A8SrgbBlock"; - case Format::eEacR11UnormBlock : return "EacR11UnormBlock"; - case Format::eEacR11SnormBlock : return "EacR11SnormBlock"; - case Format::eEacR11G11UnormBlock : return "EacR11G11UnormBlock"; - case Format::eEacR11G11SnormBlock : return "EacR11G11SnormBlock"; - case Format::eAstc4x4UnormBlock : return "Astc4x4UnormBlock"; - case Format::eAstc4x4SrgbBlock : return "Astc4x4SrgbBlock"; - case Format::eAstc5x4UnormBlock : return "Astc5x4UnormBlock"; - case Format::eAstc5x4SrgbBlock : return "Astc5x4SrgbBlock"; - case Format::eAstc5x5UnormBlock : return "Astc5x5UnormBlock"; - case Format::eAstc5x5SrgbBlock : return "Astc5x5SrgbBlock"; - case Format::eAstc6x5UnormBlock : return "Astc6x5UnormBlock"; - case Format::eAstc6x5SrgbBlock : return "Astc6x5SrgbBlock"; - case Format::eAstc6x6UnormBlock : return "Astc6x6UnormBlock"; - case Format::eAstc6x6SrgbBlock : return "Astc6x6SrgbBlock"; - case Format::eAstc8x5UnormBlock : return "Astc8x5UnormBlock"; - case Format::eAstc8x5SrgbBlock : return "Astc8x5SrgbBlock"; - case Format::eAstc8x6UnormBlock : return "Astc8x6UnormBlock"; - case Format::eAstc8x6SrgbBlock : return "Astc8x6SrgbBlock"; - case Format::eAstc8x8UnormBlock : return "Astc8x8UnormBlock"; - case Format::eAstc8x8SrgbBlock : return "Astc8x8SrgbBlock"; - case Format::eAstc10x5UnormBlock : return "Astc10x5UnormBlock"; - case Format::eAstc10x5SrgbBlock : return "Astc10x5SrgbBlock"; - case Format::eAstc10x6UnormBlock : return "Astc10x6UnormBlock"; - case Format::eAstc10x6SrgbBlock : return "Astc10x6SrgbBlock"; - case Format::eAstc10x8UnormBlock : return "Astc10x8UnormBlock"; - case Format::eAstc10x8SrgbBlock : return "Astc10x8SrgbBlock"; - case Format::eAstc10x10UnormBlock : return "Astc10x10UnormBlock"; - case Format::eAstc10x10SrgbBlock : return "Astc10x10SrgbBlock"; - case Format::eAstc12x10UnormBlock : return "Astc12x10UnormBlock"; - case Format::eAstc12x10SrgbBlock : return "Astc12x10SrgbBlock"; - case Format::eAstc12x12UnormBlock : return "Astc12x12UnormBlock"; - case Format::eAstc12x12SrgbBlock : return "Astc12x12SrgbBlock"; - case Format::eG8B8G8R8422Unorm : return "G8B8G8R8422Unorm"; - case Format::eB8G8R8G8422Unorm : return "B8G8R8G8422Unorm"; - case Format::eG8B8R83Plane420Unorm : return "G8B8R83Plane420Unorm"; - case Format::eG8B8R82Plane420Unorm : return "G8B8R82Plane420Unorm"; - case Format::eG8B8R83Plane422Unorm : return "G8B8R83Plane422Unorm"; - case Format::eG8B8R82Plane422Unorm : return "G8B8R82Plane422Unorm"; - case Format::eG8B8R83Plane444Unorm : return "G8B8R83Plane444Unorm"; - case Format::eR10X6UnormPack16 : return "R10X6UnormPack16"; - case Format::eR10X6G10X6Unorm2Pack16 : return "R10X6G10X6Unorm2Pack16"; - case Format::eR10X6G10X6B10X6A10X6Unorm4Pack16 : return "R10X6G10X6B10X6A10X6Unorm4Pack16"; - case Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16 : return "G10X6B10X6G10X6R10X6422Unorm4Pack16"; - case Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16 : return "B10X6G10X6R10X6G10X6422Unorm4Pack16"; - case Format::eG10X6B10X6R10X63Plane420Unorm3Pack16 : return "G10X6B10X6R10X63Plane420Unorm3Pack16"; - case Format::eG10X6B10X6R10X62Plane420Unorm3Pack16 : return "G10X6B10X6R10X62Plane420Unorm3Pack16"; - case Format::eG10X6B10X6R10X63Plane422Unorm3Pack16 : return "G10X6B10X6R10X63Plane422Unorm3Pack16"; - case Format::eG10X6B10X6R10X62Plane422Unorm3Pack16 : return "G10X6B10X6R10X62Plane422Unorm3Pack16"; - case Format::eG10X6B10X6R10X63Plane444Unorm3Pack16 : return "G10X6B10X6R10X63Plane444Unorm3Pack16"; - case Format::eR12X4UnormPack16 : return "R12X4UnormPack16"; - case Format::eR12X4G12X4Unorm2Pack16 : return "R12X4G12X4Unorm2Pack16"; - case Format::eR12X4G12X4B12X4A12X4Unorm4Pack16 : return "R12X4G12X4B12X4A12X4Unorm4Pack16"; - case Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16 : return "G12X4B12X4G12X4R12X4422Unorm4Pack16"; - case Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16 : return "B12X4G12X4R12X4G12X4422Unorm4Pack16"; - case Format::eG12X4B12X4R12X43Plane420Unorm3Pack16 : return "G12X4B12X4R12X43Plane420Unorm3Pack16"; - case Format::eG12X4B12X4R12X42Plane420Unorm3Pack16 : return "G12X4B12X4R12X42Plane420Unorm3Pack16"; - case Format::eG12X4B12X4R12X43Plane422Unorm3Pack16 : return "G12X4B12X4R12X43Plane422Unorm3Pack16"; - case Format::eG12X4B12X4R12X42Plane422Unorm3Pack16 : return "G12X4B12X4R12X42Plane422Unorm3Pack16"; - case Format::eG12X4B12X4R12X43Plane444Unorm3Pack16 : return "G12X4B12X4R12X43Plane444Unorm3Pack16"; - case Format::eG16B16G16R16422Unorm : return "G16B16G16R16422Unorm"; - case Format::eB16G16R16G16422Unorm : return "B16G16R16G16422Unorm"; - case Format::eG16B16R163Plane420Unorm : return "G16B16R163Plane420Unorm"; - case Format::eG16B16R162Plane420Unorm : return "G16B16R162Plane420Unorm"; - case Format::eG16B16R163Plane422Unorm : return "G16B16R163Plane422Unorm"; - case Format::eG16B16R162Plane422Unorm : return "G16B16R162Plane422Unorm"; - case Format::eG16B16R163Plane444Unorm : return "G16B16R163Plane444Unorm"; - case Format::ePvrtc12BppUnormBlockIMG : return "Pvrtc12BppUnormBlockIMG"; - case Format::ePvrtc14BppUnormBlockIMG : return "Pvrtc14BppUnormBlockIMG"; - case Format::ePvrtc22BppUnormBlockIMG : return "Pvrtc22BppUnormBlockIMG"; - case Format::ePvrtc24BppUnormBlockIMG : return "Pvrtc24BppUnormBlockIMG"; - case Format::ePvrtc12BppSrgbBlockIMG : return "Pvrtc12BppSrgbBlockIMG"; - case Format::ePvrtc14BppSrgbBlockIMG : return "Pvrtc14BppSrgbBlockIMG"; - case Format::ePvrtc22BppSrgbBlockIMG : return "Pvrtc22BppSrgbBlockIMG"; - case Format::ePvrtc24BppSrgbBlockIMG : return "Pvrtc24BppSrgbBlockIMG"; - case Format::eAstc4x4SfloatBlockEXT : return "Astc4x4SfloatBlockEXT"; - case Format::eAstc5x4SfloatBlockEXT : return "Astc5x4SfloatBlockEXT"; - case Format::eAstc5x5SfloatBlockEXT : return "Astc5x5SfloatBlockEXT"; - case Format::eAstc6x5SfloatBlockEXT : return "Astc6x5SfloatBlockEXT"; - case Format::eAstc6x6SfloatBlockEXT : return "Astc6x6SfloatBlockEXT"; - case Format::eAstc8x5SfloatBlockEXT : return "Astc8x5SfloatBlockEXT"; - case Format::eAstc8x6SfloatBlockEXT : return "Astc8x6SfloatBlockEXT"; - case Format::eAstc8x8SfloatBlockEXT : return "Astc8x8SfloatBlockEXT"; - case Format::eAstc10x5SfloatBlockEXT : return "Astc10x5SfloatBlockEXT"; - case Format::eAstc10x6SfloatBlockEXT : return "Astc10x6SfloatBlockEXT"; - case Format::eAstc10x8SfloatBlockEXT : return "Astc10x8SfloatBlockEXT"; - case Format::eAstc10x10SfloatBlockEXT : return "Astc10x10SfloatBlockEXT"; - case Format::eAstc12x10SfloatBlockEXT : return "Astc12x10SfloatBlockEXT"; - case Format::eAstc12x12SfloatBlockEXT : return "Astc12x12SfloatBlockEXT"; + case Format::eUndefined: return "Undefined"; + case Format::eR4G4UnormPack8: return "R4G4UnormPack8"; + case Format::eR4G4B4A4UnormPack16: return "R4G4B4A4UnormPack16"; + case Format::eB4G4R4A4UnormPack16: return "B4G4R4A4UnormPack16"; + case Format::eR5G6B5UnormPack16: return "R5G6B5UnormPack16"; + case Format::eB5G6R5UnormPack16: return "B5G6R5UnormPack16"; + case Format::eR5G5B5A1UnormPack16: return "R5G5B5A1UnormPack16"; + case Format::eB5G5R5A1UnormPack16: return "B5G5R5A1UnormPack16"; + case Format::eA1R5G5B5UnormPack16: return "A1R5G5B5UnormPack16"; + case Format::eR8Unorm: return "R8Unorm"; + case Format::eR8Snorm: return "R8Snorm"; + case Format::eR8Uscaled: return "R8Uscaled"; + case Format::eR8Sscaled: return "R8Sscaled"; + case Format::eR8Uint: return "R8Uint"; + case Format::eR8Sint: return "R8Sint"; + case Format::eR8Srgb: return "R8Srgb"; + case Format::eR8G8Unorm: return "R8G8Unorm"; + case Format::eR8G8Snorm: return "R8G8Snorm"; + case Format::eR8G8Uscaled: return "R8G8Uscaled"; + case Format::eR8G8Sscaled: return "R8G8Sscaled"; + case Format::eR8G8Uint: return "R8G8Uint"; + case Format::eR8G8Sint: return "R8G8Sint"; + case Format::eR8G8Srgb: return "R8G8Srgb"; + case Format::eR8G8B8Unorm: return "R8G8B8Unorm"; + case Format::eR8G8B8Snorm: return "R8G8B8Snorm"; + case Format::eR8G8B8Uscaled: return "R8G8B8Uscaled"; + case Format::eR8G8B8Sscaled: return "R8G8B8Sscaled"; + case Format::eR8G8B8Uint: return "R8G8B8Uint"; + case Format::eR8G8B8Sint: return "R8G8B8Sint"; + case Format::eR8G8B8Srgb: return "R8G8B8Srgb"; + case Format::eB8G8R8Unorm: return "B8G8R8Unorm"; + case Format::eB8G8R8Snorm: return "B8G8R8Snorm"; + case Format::eB8G8R8Uscaled: return "B8G8R8Uscaled"; + case Format::eB8G8R8Sscaled: return "B8G8R8Sscaled"; + case Format::eB8G8R8Uint: return "B8G8R8Uint"; + case Format::eB8G8R8Sint: return "B8G8R8Sint"; + case Format::eB8G8R8Srgb: return "B8G8R8Srgb"; + case Format::eR8G8B8A8Unorm: return "R8G8B8A8Unorm"; + case Format::eR8G8B8A8Snorm: return "R8G8B8A8Snorm"; + case Format::eR8G8B8A8Uscaled: return "R8G8B8A8Uscaled"; + case Format::eR8G8B8A8Sscaled: return "R8G8B8A8Sscaled"; + case Format::eR8G8B8A8Uint: return "R8G8B8A8Uint"; + case Format::eR8G8B8A8Sint: return "R8G8B8A8Sint"; + case Format::eR8G8B8A8Srgb: return "R8G8B8A8Srgb"; + case Format::eB8G8R8A8Unorm: return "B8G8R8A8Unorm"; + case Format::eB8G8R8A8Snorm: return "B8G8R8A8Snorm"; + case Format::eB8G8R8A8Uscaled: return "B8G8R8A8Uscaled"; + case Format::eB8G8R8A8Sscaled: return "B8G8R8A8Sscaled"; + case Format::eB8G8R8A8Uint: return "B8G8R8A8Uint"; + case Format::eB8G8R8A8Sint: return "B8G8R8A8Sint"; + case Format::eB8G8R8A8Srgb: return "B8G8R8A8Srgb"; + case Format::eA8B8G8R8UnormPack32: return "A8B8G8R8UnormPack32"; + case Format::eA8B8G8R8SnormPack32: return "A8B8G8R8SnormPack32"; + case Format::eA8B8G8R8UscaledPack32: return "A8B8G8R8UscaledPack32"; + case Format::eA8B8G8R8SscaledPack32: return "A8B8G8R8SscaledPack32"; + case Format::eA8B8G8R8UintPack32: return "A8B8G8R8UintPack32"; + case Format::eA8B8G8R8SintPack32: return "A8B8G8R8SintPack32"; + case Format::eA8B8G8R8SrgbPack32: return "A8B8G8R8SrgbPack32"; + case Format::eA2R10G10B10UnormPack32: return "A2R10G10B10UnormPack32"; + case Format::eA2R10G10B10SnormPack32: return "A2R10G10B10SnormPack32"; + case Format::eA2R10G10B10UscaledPack32: return "A2R10G10B10UscaledPack32"; + case Format::eA2R10G10B10SscaledPack32: return "A2R10G10B10SscaledPack32"; + case Format::eA2R10G10B10UintPack32: return "A2R10G10B10UintPack32"; + case Format::eA2R10G10B10SintPack32: return "A2R10G10B10SintPack32"; + case Format::eA2B10G10R10UnormPack32: return "A2B10G10R10UnormPack32"; + case Format::eA2B10G10R10SnormPack32: return "A2B10G10R10SnormPack32"; + case Format::eA2B10G10R10UscaledPack32: return "A2B10G10R10UscaledPack32"; + case Format::eA2B10G10R10SscaledPack32: return "A2B10G10R10SscaledPack32"; + case Format::eA2B10G10R10UintPack32: return "A2B10G10R10UintPack32"; + case Format::eA2B10G10R10SintPack32: return "A2B10G10R10SintPack32"; + case Format::eR16Unorm: return "R16Unorm"; + case Format::eR16Snorm: return "R16Snorm"; + case Format::eR16Uscaled: return "R16Uscaled"; + case Format::eR16Sscaled: return "R16Sscaled"; + case Format::eR16Uint: return "R16Uint"; + case Format::eR16Sint: return "R16Sint"; + case Format::eR16Sfloat: return "R16Sfloat"; + case Format::eR16G16Unorm: return "R16G16Unorm"; + case Format::eR16G16Snorm: return "R16G16Snorm"; + case Format::eR16G16Uscaled: return "R16G16Uscaled"; + case Format::eR16G16Sscaled: return "R16G16Sscaled"; + case Format::eR16G16Uint: return "R16G16Uint"; + case Format::eR16G16Sint: return "R16G16Sint"; + case Format::eR16G16Sfloat: return "R16G16Sfloat"; + case Format::eR16G16B16Unorm: return "R16G16B16Unorm"; + case Format::eR16G16B16Snorm: return "R16G16B16Snorm"; + case Format::eR16G16B16Uscaled: return "R16G16B16Uscaled"; + case Format::eR16G16B16Sscaled: return "R16G16B16Sscaled"; + case Format::eR16G16B16Uint: return "R16G16B16Uint"; + case Format::eR16G16B16Sint: return "R16G16B16Sint"; + case Format::eR16G16B16Sfloat: return "R16G16B16Sfloat"; + case Format::eR16G16B16A16Unorm: return "R16G16B16A16Unorm"; + case Format::eR16G16B16A16Snorm: return "R16G16B16A16Snorm"; + case Format::eR16G16B16A16Uscaled: return "R16G16B16A16Uscaled"; + case Format::eR16G16B16A16Sscaled: return "R16G16B16A16Sscaled"; + case Format::eR16G16B16A16Uint: return "R16G16B16A16Uint"; + case Format::eR16G16B16A16Sint: return "R16G16B16A16Sint"; + case Format::eR16G16B16A16Sfloat: return "R16G16B16A16Sfloat"; + case Format::eR32Uint: return "R32Uint"; + case Format::eR32Sint: return "R32Sint"; + case Format::eR32Sfloat: return "R32Sfloat"; + case Format::eR32G32Uint: return "R32G32Uint"; + case Format::eR32G32Sint: return "R32G32Sint"; + case Format::eR32G32Sfloat: return "R32G32Sfloat"; + case Format::eR32G32B32Uint: return "R32G32B32Uint"; + case Format::eR32G32B32Sint: return "R32G32B32Sint"; + case Format::eR32G32B32Sfloat: return "R32G32B32Sfloat"; + case Format::eR32G32B32A32Uint: return "R32G32B32A32Uint"; + case Format::eR32G32B32A32Sint: return "R32G32B32A32Sint"; + case Format::eR32G32B32A32Sfloat: return "R32G32B32A32Sfloat"; + case Format::eR64Uint: return "R64Uint"; + case Format::eR64Sint: return "R64Sint"; + case Format::eR64Sfloat: return "R64Sfloat"; + case Format::eR64G64Uint: return "R64G64Uint"; + case Format::eR64G64Sint: return "R64G64Sint"; + case Format::eR64G64Sfloat: return "R64G64Sfloat"; + case Format::eR64G64B64Uint: return "R64G64B64Uint"; + case Format::eR64G64B64Sint: return "R64G64B64Sint"; + case Format::eR64G64B64Sfloat: return "R64G64B64Sfloat"; + case Format::eR64G64B64A64Uint: return "R64G64B64A64Uint"; + case Format::eR64G64B64A64Sint: return "R64G64B64A64Sint"; + case Format::eR64G64B64A64Sfloat: return "R64G64B64A64Sfloat"; + case Format::eB10G11R11UfloatPack32: return "B10G11R11UfloatPack32"; + case Format::eE5B9G9R9UfloatPack32: return "E5B9G9R9UfloatPack32"; + case Format::eD16Unorm: return "D16Unorm"; + case Format::eX8D24UnormPack32: return "X8D24UnormPack32"; + case Format::eD32Sfloat: return "D32Sfloat"; + case Format::eS8Uint: return "S8Uint"; + case Format::eD16UnormS8Uint: return "D16UnormS8Uint"; + case Format::eD24UnormS8Uint: return "D24UnormS8Uint"; + case Format::eD32SfloatS8Uint: return "D32SfloatS8Uint"; + case Format::eBc1RgbUnormBlock: return "Bc1RgbUnormBlock"; + case Format::eBc1RgbSrgbBlock: return "Bc1RgbSrgbBlock"; + case Format::eBc1RgbaUnormBlock: return "Bc1RgbaUnormBlock"; + case Format::eBc1RgbaSrgbBlock: return "Bc1RgbaSrgbBlock"; + case Format::eBc2UnormBlock: return "Bc2UnormBlock"; + case Format::eBc2SrgbBlock: return "Bc2SrgbBlock"; + case Format::eBc3UnormBlock: return "Bc3UnormBlock"; + case Format::eBc3SrgbBlock: return "Bc3SrgbBlock"; + case Format::eBc4UnormBlock: return "Bc4UnormBlock"; + case Format::eBc4SnormBlock: return "Bc4SnormBlock"; + case Format::eBc5UnormBlock: return "Bc5UnormBlock"; + case Format::eBc5SnormBlock: return "Bc5SnormBlock"; + case Format::eBc6HUfloatBlock: return "Bc6HUfloatBlock"; + case Format::eBc6HSfloatBlock: return "Bc6HSfloatBlock"; + case Format::eBc7UnormBlock: return "Bc7UnormBlock"; + case Format::eBc7SrgbBlock: return "Bc7SrgbBlock"; + case Format::eEtc2R8G8B8UnormBlock: return "Etc2R8G8B8UnormBlock"; + case Format::eEtc2R8G8B8SrgbBlock: return "Etc2R8G8B8SrgbBlock"; + case Format::eEtc2R8G8B8A1UnormBlock: return "Etc2R8G8B8A1UnormBlock"; + case Format::eEtc2R8G8B8A1SrgbBlock: return "Etc2R8G8B8A1SrgbBlock"; + case Format::eEtc2R8G8B8A8UnormBlock: return "Etc2R8G8B8A8UnormBlock"; + case Format::eEtc2R8G8B8A8SrgbBlock: return "Etc2R8G8B8A8SrgbBlock"; + case Format::eEacR11UnormBlock: return "EacR11UnormBlock"; + case Format::eEacR11SnormBlock: return "EacR11SnormBlock"; + case Format::eEacR11G11UnormBlock: return "EacR11G11UnormBlock"; + case Format::eEacR11G11SnormBlock: return "EacR11G11SnormBlock"; + case Format::eAstc4x4UnormBlock: return "Astc4x4UnormBlock"; + case Format::eAstc4x4SrgbBlock: return "Astc4x4SrgbBlock"; + case Format::eAstc5x4UnormBlock: return "Astc5x4UnormBlock"; + case Format::eAstc5x4SrgbBlock: return "Astc5x4SrgbBlock"; + case Format::eAstc5x5UnormBlock: return "Astc5x5UnormBlock"; + case Format::eAstc5x5SrgbBlock: return "Astc5x5SrgbBlock"; + case Format::eAstc6x5UnormBlock: return "Astc6x5UnormBlock"; + case Format::eAstc6x5SrgbBlock: return "Astc6x5SrgbBlock"; + case Format::eAstc6x6UnormBlock: return "Astc6x6UnormBlock"; + case Format::eAstc6x6SrgbBlock: return "Astc6x6SrgbBlock"; + case Format::eAstc8x5UnormBlock: return "Astc8x5UnormBlock"; + case Format::eAstc8x5SrgbBlock: return "Astc8x5SrgbBlock"; + case Format::eAstc8x6UnormBlock: return "Astc8x6UnormBlock"; + case Format::eAstc8x6SrgbBlock: return "Astc8x6SrgbBlock"; + case Format::eAstc8x8UnormBlock: return "Astc8x8UnormBlock"; + case Format::eAstc8x8SrgbBlock: return "Astc8x8SrgbBlock"; + case Format::eAstc10x5UnormBlock: return "Astc10x5UnormBlock"; + case Format::eAstc10x5SrgbBlock: return "Astc10x5SrgbBlock"; + case Format::eAstc10x6UnormBlock: return "Astc10x6UnormBlock"; + case Format::eAstc10x6SrgbBlock: return "Astc10x6SrgbBlock"; + case Format::eAstc10x8UnormBlock: return "Astc10x8UnormBlock"; + case Format::eAstc10x8SrgbBlock: return "Astc10x8SrgbBlock"; + case Format::eAstc10x10UnormBlock: return "Astc10x10UnormBlock"; + case Format::eAstc10x10SrgbBlock: return "Astc10x10SrgbBlock"; + case Format::eAstc12x10UnormBlock: return "Astc12x10UnormBlock"; + case Format::eAstc12x10SrgbBlock: return "Astc12x10SrgbBlock"; + case Format::eAstc12x12UnormBlock: return "Astc12x12UnormBlock"; + case Format::eAstc12x12SrgbBlock: return "Astc12x12SrgbBlock"; + case Format::eG8B8G8R8422Unorm: return "G8B8G8R8422Unorm"; + case Format::eB8G8R8G8422Unorm: return "B8G8R8G8422Unorm"; + case Format::eG8B8R83Plane420Unorm: return "G8B8R83Plane420Unorm"; + case Format::eG8B8R82Plane420Unorm: return "G8B8R82Plane420Unorm"; + case Format::eG8B8R83Plane422Unorm: return "G8B8R83Plane422Unorm"; + case Format::eG8B8R82Plane422Unorm: return "G8B8R82Plane422Unorm"; + case Format::eG8B8R83Plane444Unorm: return "G8B8R83Plane444Unorm"; + case Format::eR10X6UnormPack16: return "R10X6UnormPack16"; + case Format::eR10X6G10X6Unorm2Pack16: return "R10X6G10X6Unorm2Pack16"; + case Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return "R10X6G10X6B10X6A10X6Unorm4Pack16"; + case Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return "G10X6B10X6G10X6R10X6422Unorm4Pack16"; + case Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return "B10X6G10X6R10X6G10X6422Unorm4Pack16"; + case Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return "G10X6B10X6R10X63Plane420Unorm3Pack16"; + case Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return "G10X6B10X6R10X62Plane420Unorm3Pack16"; + case Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return "G10X6B10X6R10X63Plane422Unorm3Pack16"; + case Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return "G10X6B10X6R10X62Plane422Unorm3Pack16"; + case Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return "G10X6B10X6R10X63Plane444Unorm3Pack16"; + case Format::eR12X4UnormPack16: return "R12X4UnormPack16"; + case Format::eR12X4G12X4Unorm2Pack16: return "R12X4G12X4Unorm2Pack16"; + case Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return "R12X4G12X4B12X4A12X4Unorm4Pack16"; + case Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return "G12X4B12X4G12X4R12X4422Unorm4Pack16"; + case Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return "B12X4G12X4R12X4G12X4422Unorm4Pack16"; + case Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return "G12X4B12X4R12X43Plane420Unorm3Pack16"; + case Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return "G12X4B12X4R12X42Plane420Unorm3Pack16"; + case Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return "G12X4B12X4R12X43Plane422Unorm3Pack16"; + case Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return "G12X4B12X4R12X42Plane422Unorm3Pack16"; + case Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return "G12X4B12X4R12X43Plane444Unorm3Pack16"; + case Format::eG16B16G16R16422Unorm: return "G16B16G16R16422Unorm"; + case Format::eB16G16R16G16422Unorm: return "B16G16R16G16422Unorm"; + case Format::eG16B16R163Plane420Unorm: return "G16B16R163Plane420Unorm"; + case Format::eG16B16R162Plane420Unorm: return "G16B16R162Plane420Unorm"; + case Format::eG16B16R163Plane422Unorm: return "G16B16R163Plane422Unorm"; + case Format::eG16B16R162Plane422Unorm: return "G16B16R162Plane422Unorm"; + case Format::eG16B16R163Plane444Unorm: return "G16B16R163Plane444Unorm"; + case Format::ePvrtc12BppUnormBlockIMG: return "Pvrtc12BppUnormBlockIMG"; + case Format::ePvrtc14BppUnormBlockIMG: return "Pvrtc14BppUnormBlockIMG"; + case Format::ePvrtc22BppUnormBlockIMG: return "Pvrtc22BppUnormBlockIMG"; + case Format::ePvrtc24BppUnormBlockIMG: return "Pvrtc24BppUnormBlockIMG"; + case Format::ePvrtc12BppSrgbBlockIMG: return "Pvrtc12BppSrgbBlockIMG"; + case Format::ePvrtc14BppSrgbBlockIMG: return "Pvrtc14BppSrgbBlockIMG"; + case Format::ePvrtc22BppSrgbBlockIMG: return "Pvrtc22BppSrgbBlockIMG"; + case Format::ePvrtc24BppSrgbBlockIMG: return "Pvrtc24BppSrgbBlockIMG"; + case Format::eAstc4x4SfloatBlockEXT: return "Astc4x4SfloatBlockEXT"; + case Format::eAstc5x4SfloatBlockEXT: return "Astc5x4SfloatBlockEXT"; + case Format::eAstc5x5SfloatBlockEXT: return "Astc5x5SfloatBlockEXT"; + case Format::eAstc6x5SfloatBlockEXT: return "Astc6x5SfloatBlockEXT"; + case Format::eAstc6x6SfloatBlockEXT: return "Astc6x6SfloatBlockEXT"; + case Format::eAstc8x5SfloatBlockEXT: return "Astc8x5SfloatBlockEXT"; + case Format::eAstc8x6SfloatBlockEXT: return "Astc8x6SfloatBlockEXT"; + case Format::eAstc8x8SfloatBlockEXT: return "Astc8x8SfloatBlockEXT"; + case Format::eAstc10x5SfloatBlockEXT: return "Astc10x5SfloatBlockEXT"; + case Format::eAstc10x6SfloatBlockEXT: return "Astc10x6SfloatBlockEXT"; + case Format::eAstc10x8SfloatBlockEXT: return "Astc10x8SfloatBlockEXT"; + case Format::eAstc10x10SfloatBlockEXT: return "Astc10x10SfloatBlockEXT"; + case Format::eAstc12x10SfloatBlockEXT: return "Astc12x10SfloatBlockEXT"; + case Format::eAstc12x12SfloatBlockEXT: return "Astc12x12SfloatBlockEXT"; default: return "invalid"; } } enum class FormatFeatureFlagBits : VkFormatFeatureFlags { - eSampledImage = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT, - eStorageImage = VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT, - eStorageImageAtomic = VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT, - eUniformTexelBuffer = VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT, - eStorageTexelBuffer = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT, - eStorageTexelBufferAtomic = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT, - eVertexBuffer = VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT, - eColorAttachment = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT, - eColorAttachmentBlend = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT, - eDepthStencilAttachment = VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT, - eBlitSrc = VK_FORMAT_FEATURE_BLIT_SRC_BIT, - eBlitDst = VK_FORMAT_FEATURE_BLIT_DST_BIT, - eSampledImageFilterLinear = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT, - eTransferSrc = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT, - eTransferDst = VK_FORMAT_FEATURE_TRANSFER_DST_BIT, - eMidpointChromaSamples = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT, + eSampledImage = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT, + eStorageImage = VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT, + eStorageImageAtomic = VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT, + eUniformTexelBuffer = VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT, + eStorageTexelBuffer = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT, + eStorageTexelBufferAtomic = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT, + eVertexBuffer = VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT, + eColorAttachment = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT, + eColorAttachmentBlend = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT, + eDepthStencilAttachment = VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT, + eBlitSrc = VK_FORMAT_FEATURE_BLIT_SRC_BIT, + eBlitDst = VK_FORMAT_FEATURE_BLIT_DST_BIT, + eSampledImageFilterLinear = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT, + eTransferSrc = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT, + eTransferDst = VK_FORMAT_FEATURE_TRANSFER_DST_BIT, + eMidpointChromaSamples = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT, eSampledImageYcbcrConversionLinearFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT, - eSampledImageYcbcrConversionSeparateReconstructionFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT, - eSampledImageYcbcrConversionChromaReconstructionExplicit = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT, - eSampledImageYcbcrConversionChromaReconstructionExplicitForceable = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT, - eDisjoint = VK_FORMAT_FEATURE_DISJOINT_BIT, - eCositedChromaSamples = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT, - eSampledImageFilterMinmax = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT, - eSampledImageFilterCubicIMG = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG, + eSampledImageYcbcrConversionSeparateReconstructionFilter = + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT, + eSampledImageYcbcrConversionChromaReconstructionExplicit = + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT, + eSampledImageYcbcrConversionChromaReconstructionExplicitForceable = + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT, + eDisjoint = VK_FORMAT_FEATURE_DISJOINT_BIT, + eCositedChromaSamples = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT, + eSampledImageFilterMinmax = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT, + eSampledImageFilterCubicIMG = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG, eAccelerationStructureVertexBufferKHR = VK_FORMAT_FEATURE_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR, - eFragmentDensityMapEXT = VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT, - eTransferSrcKHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR, - eTransferDstKHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR, - eSampledImageFilterMinmaxEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT, - eMidpointChromaSamplesKHR = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR, - eSampledImageYcbcrConversionLinearFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR, - eSampledImageYcbcrConversionSeparateReconstructionFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR, - eSampledImageYcbcrConversionChromaReconstructionExplicitKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR, - eSampledImageYcbcrConversionChromaReconstructionExplicitForceableKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR, - eDisjointKHR = VK_FORMAT_FEATURE_DISJOINT_BIT_KHR, - eCositedChromaSamplesKHR = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR, + eFragmentDensityMapEXT = VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT, + eTransferSrcKHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR, + eTransferDstKHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR, + eSampledImageFilterMinmaxEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT, + eMidpointChromaSamplesKHR = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR, + eSampledImageYcbcrConversionLinearFilterKHR = + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR, + eSampledImageYcbcrConversionSeparateReconstructionFilterKHR = + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR, + eSampledImageYcbcrConversionChromaReconstructionExplicitKHR = + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR, + eSampledImageYcbcrConversionChromaReconstructionExplicitForceableKHR = + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR, + eDisjointKHR = VK_FORMAT_FEATURE_DISJOINT_BIT_KHR, + eCositedChromaSamplesKHR = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR, eSampledImageFilterCubicEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT }; @@ -5416,39 +6722,43 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case FormatFeatureFlagBits::eSampledImage : return "SampledImage"; - case FormatFeatureFlagBits::eStorageImage : return "StorageImage"; - case FormatFeatureFlagBits::eStorageImageAtomic : return "StorageImageAtomic"; - case FormatFeatureFlagBits::eUniformTexelBuffer : return "UniformTexelBuffer"; - case FormatFeatureFlagBits::eStorageTexelBuffer : return "StorageTexelBuffer"; - case FormatFeatureFlagBits::eStorageTexelBufferAtomic : return "StorageTexelBufferAtomic"; - case FormatFeatureFlagBits::eVertexBuffer : return "VertexBuffer"; - case FormatFeatureFlagBits::eColorAttachment : return "ColorAttachment"; - case FormatFeatureFlagBits::eColorAttachmentBlend : return "ColorAttachmentBlend"; - case FormatFeatureFlagBits::eDepthStencilAttachment : return "DepthStencilAttachment"; - case FormatFeatureFlagBits::eBlitSrc : return "BlitSrc"; - case FormatFeatureFlagBits::eBlitDst : return "BlitDst"; - case FormatFeatureFlagBits::eSampledImageFilterLinear : return "SampledImageFilterLinear"; - case FormatFeatureFlagBits::eTransferSrc : return "TransferSrc"; - case FormatFeatureFlagBits::eTransferDst : return "TransferDst"; - case FormatFeatureFlagBits::eMidpointChromaSamples : return "MidpointChromaSamples"; - case FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter : return "SampledImageYcbcrConversionLinearFilter"; - case FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter : return "SampledImageYcbcrConversionSeparateReconstructionFilter"; - case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit : return "SampledImageYcbcrConversionChromaReconstructionExplicit"; - case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable : return "SampledImageYcbcrConversionChromaReconstructionExplicitForceable"; - case FormatFeatureFlagBits::eDisjoint : return "Disjoint"; - case FormatFeatureFlagBits::eCositedChromaSamples : return "CositedChromaSamples"; - case FormatFeatureFlagBits::eSampledImageFilterMinmax : return "SampledImageFilterMinmax"; - case FormatFeatureFlagBits::eSampledImageFilterCubicIMG : return "SampledImageFilterCubicIMG"; - case FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR : return "AccelerationStructureVertexBufferKHR"; - case FormatFeatureFlagBits::eFragmentDensityMapEXT : return "FragmentDensityMapEXT"; + case FormatFeatureFlagBits::eSampledImage: return "SampledImage"; + case FormatFeatureFlagBits::eStorageImage: return "StorageImage"; + case FormatFeatureFlagBits::eStorageImageAtomic: return "StorageImageAtomic"; + case FormatFeatureFlagBits::eUniformTexelBuffer: return "UniformTexelBuffer"; + case FormatFeatureFlagBits::eStorageTexelBuffer: return "StorageTexelBuffer"; + case FormatFeatureFlagBits::eStorageTexelBufferAtomic: return "StorageTexelBufferAtomic"; + case FormatFeatureFlagBits::eVertexBuffer: return "VertexBuffer"; + case FormatFeatureFlagBits::eColorAttachment: return "ColorAttachment"; + case FormatFeatureFlagBits::eColorAttachmentBlend: return "ColorAttachmentBlend"; + case FormatFeatureFlagBits::eDepthStencilAttachment: return "DepthStencilAttachment"; + case FormatFeatureFlagBits::eBlitSrc: return "BlitSrc"; + case FormatFeatureFlagBits::eBlitDst: return "BlitDst"; + case FormatFeatureFlagBits::eSampledImageFilterLinear: return "SampledImageFilterLinear"; + case FormatFeatureFlagBits::eTransferSrc: return "TransferSrc"; + case FormatFeatureFlagBits::eTransferDst: return "TransferDst"; + case FormatFeatureFlagBits::eMidpointChromaSamples: return "MidpointChromaSamples"; + case FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter: + return "SampledImageYcbcrConversionLinearFilter"; + case FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter: + return "SampledImageYcbcrConversionSeparateReconstructionFilter"; + case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit: + return "SampledImageYcbcrConversionChromaReconstructionExplicit"; + case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable: + return "SampledImageYcbcrConversionChromaReconstructionExplicitForceable"; + case FormatFeatureFlagBits::eDisjoint: return "Disjoint"; + case FormatFeatureFlagBits::eCositedChromaSamples: return "CositedChromaSamples"; + case FormatFeatureFlagBits::eSampledImageFilterMinmax: return "SampledImageFilterMinmax"; + case FormatFeatureFlagBits::eSampledImageFilterCubicIMG: return "SampledImageFilterCubicIMG"; + case FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR: return "AccelerationStructureVertexBufferKHR"; + case FormatFeatureFlagBits::eFragmentDensityMapEXT: return "FragmentDensityMapEXT"; default: return "invalid"; } } enum class FramebufferCreateFlagBits : VkFramebufferCreateFlags { - eImageless = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, + eImageless = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, eImagelessKHR = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR }; @@ -5456,7 +6766,7 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case FramebufferCreateFlagBits::eImageless : return "Imageless"; + case FramebufferCreateFlagBits::eImageless: return "Imageless"; default: return "invalid"; } } @@ -5464,15 +6774,15 @@ namespace VULKAN_HPP_NAMESPACE enum class FrontFace { eCounterClockwise = VK_FRONT_FACE_COUNTER_CLOCKWISE, - eClockwise = VK_FRONT_FACE_CLOCKWISE + eClockwise = VK_FRONT_FACE_CLOCKWISE }; VULKAN_HPP_INLINE std::string to_string( FrontFace value ) { switch ( value ) { - case FrontFace::eCounterClockwise : return "CounterClockwise"; - case FrontFace::eClockwise : return "Clockwise"; + case FrontFace::eCounterClockwise: return "CounterClockwise"; + case FrontFace::eClockwise: return "Clockwise"; default: return "invalid"; } } @@ -5480,9 +6790,9 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VK_USE_PLATFORM_WIN32_KHR enum class FullScreenExclusiveEXT { - eDefault = VK_FULL_SCREEN_EXCLUSIVE_DEFAULT_EXT, - eAllowed = VK_FULL_SCREEN_EXCLUSIVE_ALLOWED_EXT, - eDisallowed = VK_FULL_SCREEN_EXCLUSIVE_DISALLOWED_EXT, + eDefault = VK_FULL_SCREEN_EXCLUSIVE_DEFAULT_EXT, + eAllowed = VK_FULL_SCREEN_EXCLUSIVE_ALLOWED_EXT, + eDisallowed = VK_FULL_SCREEN_EXCLUSIVE_DISALLOWED_EXT, eApplicationControlled = VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT }; @@ -5490,10 +6800,10 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case FullScreenExclusiveEXT::eDefault : return "Default"; - case FullScreenExclusiveEXT::eAllowed : return "Allowed"; - case FullScreenExclusiveEXT::eDisallowed : return "Disallowed"; - case FullScreenExclusiveEXT::eApplicationControlled : return "ApplicationControlled"; + case FullScreenExclusiveEXT::eDefault: return "Default"; + case FullScreenExclusiveEXT::eAllowed: return "Allowed"; + case FullScreenExclusiveEXT::eDisallowed: return "Disallowed"; + case FullScreenExclusiveEXT::eApplicationControlled: return "ApplicationControlled"; default: return "invalid"; } } @@ -5501,7 +6811,7 @@ namespace VULKAN_HPP_NAMESPACE enum class GeometryFlagBitsKHR : VkGeometryFlagsKHR { - eOpaque = VK_GEOMETRY_OPAQUE_BIT_KHR, + eOpaque = VK_GEOMETRY_OPAQUE_BIT_KHR, eNoDuplicateAnyHitInvocation = VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR }; using GeometryFlagBitsNV = GeometryFlagBitsKHR; @@ -5510,19 +6820,19 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case GeometryFlagBitsKHR::eOpaque : return "Opaque"; - case GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation : return "NoDuplicateAnyHitInvocation"; + case GeometryFlagBitsKHR::eOpaque: return "Opaque"; + case GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation: return "NoDuplicateAnyHitInvocation"; default: return "invalid"; } } enum class GeometryInstanceFlagBitsKHR : VkGeometryInstanceFlagsKHR { - eTriangleFacingCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR, + eTriangleFacingCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR, eTriangleFrontCounterclockwise = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR, - eForceOpaque = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR, - eForceNoOpaque = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR, - eTriangleCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV + eForceOpaque = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR, + eForceNoOpaque = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR, + eTriangleCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV }; using GeometryInstanceFlagBitsNV = GeometryInstanceFlagBitsKHR; @@ -5530,10 +6840,10 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable : return "TriangleFacingCullDisable"; - case GeometryInstanceFlagBitsKHR::eTriangleFrontCounterclockwise : return "TriangleFrontCounterclockwise"; - case GeometryInstanceFlagBitsKHR::eForceOpaque : return "ForceOpaque"; - case GeometryInstanceFlagBitsKHR::eForceNoOpaque : return "ForceNoOpaque"; + case GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable: return "TriangleFacingCullDisable"; + case GeometryInstanceFlagBitsKHR::eTriangleFrontCounterclockwise: return "TriangleFrontCounterclockwise"; + case GeometryInstanceFlagBitsKHR::eForceOpaque: return "ForceOpaque"; + case GeometryInstanceFlagBitsKHR::eForceNoOpaque: return "ForceNoOpaque"; default: return "invalid"; } } @@ -5541,7 +6851,7 @@ namespace VULKAN_HPP_NAMESPACE enum class GeometryTypeKHR { eTriangles = VK_GEOMETRY_TYPE_TRIANGLES_KHR, - eAabbs = VK_GEOMETRY_TYPE_AABBS_KHR, + eAabbs = VK_GEOMETRY_TYPE_AABBS_KHR, eInstances = VK_GEOMETRY_TYPE_INSTANCES_KHR }; using GeometryTypeNV = GeometryTypeKHR; @@ -5550,158 +6860,158 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case GeometryTypeKHR::eTriangles : return "Triangles"; - case GeometryTypeKHR::eAabbs : return "Aabbs"; - case GeometryTypeKHR::eInstances : return "Instances"; + case GeometryTypeKHR::eTriangles: return "Triangles"; + case GeometryTypeKHR::eAabbs: return "Aabbs"; + case GeometryTypeKHR::eInstances: return "Instances"; default: return "invalid"; } } enum class ImageAspectFlagBits : VkImageAspectFlags { - eColor = VK_IMAGE_ASPECT_COLOR_BIT, - eDepth = VK_IMAGE_ASPECT_DEPTH_BIT, - eStencil = VK_IMAGE_ASPECT_STENCIL_BIT, - eMetadata = VK_IMAGE_ASPECT_METADATA_BIT, - ePlane0 = VK_IMAGE_ASPECT_PLANE_0_BIT, - ePlane1 = VK_IMAGE_ASPECT_PLANE_1_BIT, - ePlane2 = VK_IMAGE_ASPECT_PLANE_2_BIT, + eColor = VK_IMAGE_ASPECT_COLOR_BIT, + eDepth = VK_IMAGE_ASPECT_DEPTH_BIT, + eStencil = VK_IMAGE_ASPECT_STENCIL_BIT, + eMetadata = VK_IMAGE_ASPECT_METADATA_BIT, + ePlane0 = VK_IMAGE_ASPECT_PLANE_0_BIT, + ePlane1 = VK_IMAGE_ASPECT_PLANE_1_BIT, + ePlane2 = VK_IMAGE_ASPECT_PLANE_2_BIT, eMemoryPlane0EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT, eMemoryPlane1EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT, eMemoryPlane2EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT, eMemoryPlane3EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT, - ePlane0KHR = VK_IMAGE_ASPECT_PLANE_0_BIT_KHR, - ePlane1KHR = VK_IMAGE_ASPECT_PLANE_1_BIT_KHR, - ePlane2KHR = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR + ePlane0KHR = VK_IMAGE_ASPECT_PLANE_0_BIT_KHR, + ePlane1KHR = VK_IMAGE_ASPECT_PLANE_1_BIT_KHR, + ePlane2KHR = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR }; VULKAN_HPP_INLINE std::string to_string( ImageAspectFlagBits value ) { switch ( value ) { - case ImageAspectFlagBits::eColor : return "Color"; - case ImageAspectFlagBits::eDepth : return "Depth"; - case ImageAspectFlagBits::eStencil : return "Stencil"; - case ImageAspectFlagBits::eMetadata : return "Metadata"; - case ImageAspectFlagBits::ePlane0 : return "Plane0"; - case ImageAspectFlagBits::ePlane1 : return "Plane1"; - case ImageAspectFlagBits::ePlane2 : return "Plane2"; - case ImageAspectFlagBits::eMemoryPlane0EXT : return "MemoryPlane0EXT"; - case ImageAspectFlagBits::eMemoryPlane1EXT : return "MemoryPlane1EXT"; - case ImageAspectFlagBits::eMemoryPlane2EXT : return "MemoryPlane2EXT"; - case ImageAspectFlagBits::eMemoryPlane3EXT : return "MemoryPlane3EXT"; + case ImageAspectFlagBits::eColor: return "Color"; + case ImageAspectFlagBits::eDepth: return "Depth"; + case ImageAspectFlagBits::eStencil: return "Stencil"; + case ImageAspectFlagBits::eMetadata: return "Metadata"; + case ImageAspectFlagBits::ePlane0: return "Plane0"; + case ImageAspectFlagBits::ePlane1: return "Plane1"; + case ImageAspectFlagBits::ePlane2: return "Plane2"; + case ImageAspectFlagBits::eMemoryPlane0EXT: return "MemoryPlane0EXT"; + case ImageAspectFlagBits::eMemoryPlane1EXT: return "MemoryPlane1EXT"; + case ImageAspectFlagBits::eMemoryPlane2EXT: return "MemoryPlane2EXT"; + case ImageAspectFlagBits::eMemoryPlane3EXT: return "MemoryPlane3EXT"; default: return "invalid"; } } enum class ImageCreateFlagBits : VkImageCreateFlags { - eSparseBinding = VK_IMAGE_CREATE_SPARSE_BINDING_BIT, - eSparseResidency = VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT, - eSparseAliased = VK_IMAGE_CREATE_SPARSE_ALIASED_BIT, - eMutableFormat = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT, - eCubeCompatible = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT, - eAlias = VK_IMAGE_CREATE_ALIAS_BIT, - eSplitInstanceBindRegions = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT, - e2DArrayCompatible = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT, - eBlockTexelViewCompatible = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT, - eExtendedUsage = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT, - eProtected = VK_IMAGE_CREATE_PROTECTED_BIT, - eDisjoint = VK_IMAGE_CREATE_DISJOINT_BIT, - eCornerSampledNV = VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV, + eSparseBinding = VK_IMAGE_CREATE_SPARSE_BINDING_BIT, + eSparseResidency = VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT, + eSparseAliased = VK_IMAGE_CREATE_SPARSE_ALIASED_BIT, + eMutableFormat = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT, + eCubeCompatible = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT, + eAlias = VK_IMAGE_CREATE_ALIAS_BIT, + eSplitInstanceBindRegions = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT, + e2DArrayCompatible = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT, + eBlockTexelViewCompatible = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT, + eExtendedUsage = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT, + eProtected = VK_IMAGE_CREATE_PROTECTED_BIT, + eDisjoint = VK_IMAGE_CREATE_DISJOINT_BIT, + eCornerSampledNV = VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV, eSampleLocationsCompatibleDepthEXT = VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT, - eSubsampledEXT = VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT, - eSplitInstanceBindRegionsKHR = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR, - e2DArrayCompatibleKHR = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR, - eBlockTexelViewCompatibleKHR = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR, - eExtendedUsageKHR = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR, - eDisjointKHR = VK_IMAGE_CREATE_DISJOINT_BIT_KHR, - eAliasKHR = VK_IMAGE_CREATE_ALIAS_BIT_KHR + eSubsampledEXT = VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT, + eSplitInstanceBindRegionsKHR = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR, + e2DArrayCompatibleKHR = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR, + eBlockTexelViewCompatibleKHR = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR, + eExtendedUsageKHR = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR, + eDisjointKHR = VK_IMAGE_CREATE_DISJOINT_BIT_KHR, + eAliasKHR = VK_IMAGE_CREATE_ALIAS_BIT_KHR }; VULKAN_HPP_INLINE std::string to_string( ImageCreateFlagBits value ) { switch ( value ) { - case ImageCreateFlagBits::eSparseBinding : return "SparseBinding"; - case ImageCreateFlagBits::eSparseResidency : return "SparseResidency"; - case ImageCreateFlagBits::eSparseAliased : return "SparseAliased"; - case ImageCreateFlagBits::eMutableFormat : return "MutableFormat"; - case ImageCreateFlagBits::eCubeCompatible : return "CubeCompatible"; - case ImageCreateFlagBits::eAlias : return "Alias"; - case ImageCreateFlagBits::eSplitInstanceBindRegions : return "SplitInstanceBindRegions"; - case ImageCreateFlagBits::e2DArrayCompatible : return "2DArrayCompatible"; - case ImageCreateFlagBits::eBlockTexelViewCompatible : return "BlockTexelViewCompatible"; - case ImageCreateFlagBits::eExtendedUsage : return "ExtendedUsage"; - case ImageCreateFlagBits::eProtected : return "Protected"; - case ImageCreateFlagBits::eDisjoint : return "Disjoint"; - case ImageCreateFlagBits::eCornerSampledNV : return "CornerSampledNV"; - case ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT : return "SampleLocationsCompatibleDepthEXT"; - case ImageCreateFlagBits::eSubsampledEXT : return "SubsampledEXT"; + case ImageCreateFlagBits::eSparseBinding: return "SparseBinding"; + case ImageCreateFlagBits::eSparseResidency: return "SparseResidency"; + case ImageCreateFlagBits::eSparseAliased: return "SparseAliased"; + case ImageCreateFlagBits::eMutableFormat: return "MutableFormat"; + case ImageCreateFlagBits::eCubeCompatible: return "CubeCompatible"; + case ImageCreateFlagBits::eAlias: return "Alias"; + case ImageCreateFlagBits::eSplitInstanceBindRegions: return "SplitInstanceBindRegions"; + case ImageCreateFlagBits::e2DArrayCompatible: return "2DArrayCompatible"; + case ImageCreateFlagBits::eBlockTexelViewCompatible: return "BlockTexelViewCompatible"; + case ImageCreateFlagBits::eExtendedUsage: return "ExtendedUsage"; + case ImageCreateFlagBits::eProtected: return "Protected"; + case ImageCreateFlagBits::eDisjoint: return "Disjoint"; + case ImageCreateFlagBits::eCornerSampledNV: return "CornerSampledNV"; + case ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT: return "SampleLocationsCompatibleDepthEXT"; + case ImageCreateFlagBits::eSubsampledEXT: return "SubsampledEXT"; default: return "invalid"; } } enum class ImageLayout { - eUndefined = VK_IMAGE_LAYOUT_UNDEFINED, - eGeneral = VK_IMAGE_LAYOUT_GENERAL, - eColorAttachmentOptimal = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, - eDepthStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, - eDepthStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, - eShaderReadOnlyOptimal = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, - eTransferSrcOptimal = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, - eTransferDstOptimal = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, - ePreinitialized = VK_IMAGE_LAYOUT_PREINITIALIZED, - eDepthReadOnlyStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, - eDepthAttachmentStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, - eDepthAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL, - eDepthReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL, - eStencilAttachmentOptimal = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL, - eStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL, - ePresentSrcKHR = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, - eSharedPresentKHR = VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR, - eShadingRateOptimalNV = VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV, - eFragmentDensityMapOptimalEXT = VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT, + eUndefined = VK_IMAGE_LAYOUT_UNDEFINED, + eGeneral = VK_IMAGE_LAYOUT_GENERAL, + eColorAttachmentOptimal = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, + eDepthStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, + eDepthStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, + eShaderReadOnlyOptimal = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, + eTransferSrcOptimal = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, + eTransferDstOptimal = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, + ePreinitialized = VK_IMAGE_LAYOUT_PREINITIALIZED, + eDepthReadOnlyStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, + eDepthAttachmentStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, + eDepthAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL, + eDepthReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL, + eStencilAttachmentOptimal = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL, + eStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL, + ePresentSrcKHR = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, + eSharedPresentKHR = VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR, + eShadingRateOptimalNV = VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV, + eFragmentDensityMapOptimalEXT = VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT, eDepthReadOnlyStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR, eDepthAttachmentStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR, - eDepthAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, - eDepthReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, - eStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR, - eStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR + eDepthAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, + eDepthReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, + eStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR, + eStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR }; VULKAN_HPP_INLINE std::string to_string( ImageLayout value ) { switch ( value ) { - case ImageLayout::eUndefined : return "Undefined"; - case ImageLayout::eGeneral : return "General"; - case ImageLayout::eColorAttachmentOptimal : return "ColorAttachmentOptimal"; - case ImageLayout::eDepthStencilAttachmentOptimal : return "DepthStencilAttachmentOptimal"; - case ImageLayout::eDepthStencilReadOnlyOptimal : return "DepthStencilReadOnlyOptimal"; - case ImageLayout::eShaderReadOnlyOptimal : return "ShaderReadOnlyOptimal"; - case ImageLayout::eTransferSrcOptimal : return "TransferSrcOptimal"; - case ImageLayout::eTransferDstOptimal : return "TransferDstOptimal"; - case ImageLayout::ePreinitialized : return "Preinitialized"; - case ImageLayout::eDepthReadOnlyStencilAttachmentOptimal : return "DepthReadOnlyStencilAttachmentOptimal"; - case ImageLayout::eDepthAttachmentStencilReadOnlyOptimal : return "DepthAttachmentStencilReadOnlyOptimal"; - case ImageLayout::eDepthAttachmentOptimal : return "DepthAttachmentOptimal"; - case ImageLayout::eDepthReadOnlyOptimal : return "DepthReadOnlyOptimal"; - case ImageLayout::eStencilAttachmentOptimal : return "StencilAttachmentOptimal"; - case ImageLayout::eStencilReadOnlyOptimal : return "StencilReadOnlyOptimal"; - case ImageLayout::ePresentSrcKHR : return "PresentSrcKHR"; - case ImageLayout::eSharedPresentKHR : return "SharedPresentKHR"; - case ImageLayout::eShadingRateOptimalNV : return "ShadingRateOptimalNV"; - case ImageLayout::eFragmentDensityMapOptimalEXT : return "FragmentDensityMapOptimalEXT"; + case ImageLayout::eUndefined: return "Undefined"; + case ImageLayout::eGeneral: return "General"; + case ImageLayout::eColorAttachmentOptimal: return "ColorAttachmentOptimal"; + case ImageLayout::eDepthStencilAttachmentOptimal: return "DepthStencilAttachmentOptimal"; + case ImageLayout::eDepthStencilReadOnlyOptimal: return "DepthStencilReadOnlyOptimal"; + case ImageLayout::eShaderReadOnlyOptimal: return "ShaderReadOnlyOptimal"; + case ImageLayout::eTransferSrcOptimal: return "TransferSrcOptimal"; + case ImageLayout::eTransferDstOptimal: return "TransferDstOptimal"; + case ImageLayout::ePreinitialized: return "Preinitialized"; + case ImageLayout::eDepthReadOnlyStencilAttachmentOptimal: return "DepthReadOnlyStencilAttachmentOptimal"; + case ImageLayout::eDepthAttachmentStencilReadOnlyOptimal: return "DepthAttachmentStencilReadOnlyOptimal"; + case ImageLayout::eDepthAttachmentOptimal: return "DepthAttachmentOptimal"; + case ImageLayout::eDepthReadOnlyOptimal: return "DepthReadOnlyOptimal"; + case ImageLayout::eStencilAttachmentOptimal: return "StencilAttachmentOptimal"; + case ImageLayout::eStencilReadOnlyOptimal: return "StencilReadOnlyOptimal"; + case ImageLayout::ePresentSrcKHR: return "PresentSrcKHR"; + case ImageLayout::eSharedPresentKHR: return "SharedPresentKHR"; + case ImageLayout::eShadingRateOptimalNV: return "ShadingRateOptimalNV"; + case ImageLayout::eFragmentDensityMapOptimalEXT: return "FragmentDensityMapOptimalEXT"; default: return "invalid"; } } enum class ImageTiling { - eOptimal = VK_IMAGE_TILING_OPTIMAL, - eLinear = VK_IMAGE_TILING_LINEAR, + eOptimal = VK_IMAGE_TILING_OPTIMAL, + eLinear = VK_IMAGE_TILING_LINEAR, eDrmFormatModifierEXT = VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT }; @@ -5709,9 +7019,9 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case ImageTiling::eOptimal : return "Optimal"; - case ImageTiling::eLinear : return "Linear"; - case ImageTiling::eDrmFormatModifierEXT : return "DrmFormatModifierEXT"; + case ImageTiling::eOptimal: return "Optimal"; + case ImageTiling::eLinear: return "Linear"; + case ImageTiling::eDrmFormatModifierEXT: return "DrmFormatModifierEXT"; default: return "invalid"; } } @@ -5727,41 +7037,41 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case ImageType::e1D : return "1D"; - case ImageType::e2D : return "2D"; - case ImageType::e3D : return "3D"; + case ImageType::e1D: return "1D"; + case ImageType::e2D: return "2D"; + case ImageType::e3D: return "3D"; default: return "invalid"; } } enum class ImageUsageFlagBits : VkImageUsageFlags { - eTransferSrc = VK_IMAGE_USAGE_TRANSFER_SRC_BIT, - eTransferDst = VK_IMAGE_USAGE_TRANSFER_DST_BIT, - eSampled = VK_IMAGE_USAGE_SAMPLED_BIT, - eStorage = VK_IMAGE_USAGE_STORAGE_BIT, - eColorAttachment = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, + eTransferSrc = VK_IMAGE_USAGE_TRANSFER_SRC_BIT, + eTransferDst = VK_IMAGE_USAGE_TRANSFER_DST_BIT, + eSampled = VK_IMAGE_USAGE_SAMPLED_BIT, + eStorage = VK_IMAGE_USAGE_STORAGE_BIT, + eColorAttachment = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, eDepthStencilAttachment = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, - eTransientAttachment = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT, - eInputAttachment = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, - eShadingRateImageNV = VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV, - eFragmentDensityMapEXT = VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT + eTransientAttachment = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT, + eInputAttachment = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, + eShadingRateImageNV = VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV, + eFragmentDensityMapEXT = VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT }; VULKAN_HPP_INLINE std::string to_string( ImageUsageFlagBits value ) { switch ( value ) { - case ImageUsageFlagBits::eTransferSrc : return "TransferSrc"; - case ImageUsageFlagBits::eTransferDst : return "TransferDst"; - case ImageUsageFlagBits::eSampled : return "Sampled"; - case ImageUsageFlagBits::eStorage : return "Storage"; - case ImageUsageFlagBits::eColorAttachment : return "ColorAttachment"; - case ImageUsageFlagBits::eDepthStencilAttachment : return "DepthStencilAttachment"; - case ImageUsageFlagBits::eTransientAttachment : return "TransientAttachment"; - case ImageUsageFlagBits::eInputAttachment : return "InputAttachment"; - case ImageUsageFlagBits::eShadingRateImageNV : return "ShadingRateImageNV"; - case ImageUsageFlagBits::eFragmentDensityMapEXT : return "FragmentDensityMapEXT"; + case ImageUsageFlagBits::eTransferSrc: return "TransferSrc"; + case ImageUsageFlagBits::eTransferDst: return "TransferDst"; + case ImageUsageFlagBits::eSampled: return "Sampled"; + case ImageUsageFlagBits::eStorage: return "Storage"; + case ImageUsageFlagBits::eColorAttachment: return "ColorAttachment"; + case ImageUsageFlagBits::eDepthStencilAttachment: return "DepthStencilAttachment"; + case ImageUsageFlagBits::eTransientAttachment: return "TransientAttachment"; + case ImageUsageFlagBits::eInputAttachment: return "InputAttachment"; + case ImageUsageFlagBits::eShadingRateImageNV: return "ShadingRateImageNV"; + case ImageUsageFlagBits::eFragmentDensityMapEXT: return "FragmentDensityMapEXT"; default: return "invalid"; } } @@ -5775,19 +7085,19 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT : return "FragmentDensityMapDynamicEXT"; + case ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT: return "FragmentDensityMapDynamicEXT"; default: return "invalid"; } } enum class ImageViewType { - e1D = VK_IMAGE_VIEW_TYPE_1D, - e2D = VK_IMAGE_VIEW_TYPE_2D, - e3D = VK_IMAGE_VIEW_TYPE_3D, - eCube = VK_IMAGE_VIEW_TYPE_CUBE, - e1DArray = VK_IMAGE_VIEW_TYPE_1D_ARRAY, - e2DArray = VK_IMAGE_VIEW_TYPE_2D_ARRAY, + e1D = VK_IMAGE_VIEW_TYPE_1D, + e2D = VK_IMAGE_VIEW_TYPE_2D, + e3D = VK_IMAGE_VIEW_TYPE_3D, + eCube = VK_IMAGE_VIEW_TYPE_CUBE, + e1DArray = VK_IMAGE_VIEW_TYPE_1D_ARRAY, + e2DArray = VK_IMAGE_VIEW_TYPE_2D_ARRAY, eCubeArray = VK_IMAGE_VIEW_TYPE_CUBE_ARRAY }; @@ -5795,34 +7105,34 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case ImageViewType::e1D : return "1D"; - case ImageViewType::e2D : return "2D"; - case ImageViewType::e3D : return "3D"; - case ImageViewType::eCube : return "Cube"; - case ImageViewType::e1DArray : return "1DArray"; - case ImageViewType::e2DArray : return "2DArray"; - case ImageViewType::eCubeArray : return "CubeArray"; + case ImageViewType::e1D: return "1D"; + case ImageViewType::e2D: return "2D"; + case ImageViewType::e3D: return "3D"; + case ImageViewType::eCube: return "Cube"; + case ImageViewType::e1DArray: return "1DArray"; + case ImageViewType::e2DArray: return "2DArray"; + case ImageViewType::eCubeArray: return "CubeArray"; default: return "invalid"; } } enum class IndexType { - eUint16 = VK_INDEX_TYPE_UINT16, - eUint32 = VK_INDEX_TYPE_UINT32, - eNoneKHR = VK_INDEX_TYPE_NONE_KHR, + eUint16 = VK_INDEX_TYPE_UINT16, + eUint32 = VK_INDEX_TYPE_UINT32, + eNoneKHR = VK_INDEX_TYPE_NONE_KHR, eUint8EXT = VK_INDEX_TYPE_UINT8_EXT, - eNoneNV = VK_INDEX_TYPE_NONE_NV + eNoneNV = VK_INDEX_TYPE_NONE_NV }; VULKAN_HPP_INLINE std::string to_string( IndexType value ) { switch ( value ) { - case IndexType::eUint16 : return "Uint16"; - case IndexType::eUint32 : return "Uint32"; - case IndexType::eNoneKHR : return "NoneKHR"; - case IndexType::eUint8EXT : return "Uint8EXT"; + case IndexType::eUint16: return "Uint16"; + case IndexType::eUint32: return "Uint32"; + case IndexType::eNoneKHR: return "NoneKHR"; + case IndexType::eUint8EXT: return "Uint8EXT"; default: return "invalid"; } } @@ -5830,7 +7140,7 @@ namespace VULKAN_HPP_NAMESPACE enum class IndirectCommandsLayoutUsageFlagBitsNV : VkIndirectCommandsLayoutUsageFlagsNV { eExplicitPreprocess = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_NV, - eIndexedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NV, + eIndexedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NV, eUnorderedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NV }; @@ -5838,37 +7148,37 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess : return "ExplicitPreprocess"; - case IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences : return "IndexedSequences"; - case IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences : return "UnorderedSequences"; + case IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess: return "ExplicitPreprocess"; + case IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences: return "IndexedSequences"; + case IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences: return "UnorderedSequences"; default: return "invalid"; } } enum class IndirectCommandsTokenTypeNV { - eShaderGroup = VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV, - eStateFlags = VK_INDIRECT_COMMANDS_TOKEN_TYPE_STATE_FLAGS_NV, - eIndexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NV, + eShaderGroup = VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV, + eStateFlags = VK_INDIRECT_COMMANDS_TOKEN_TYPE_STATE_FLAGS_NV, + eIndexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NV, eVertexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NV, ePushConstant = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NV, - eDrawIndexed = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV, - eDraw = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV, - eDrawTasks = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV + eDrawIndexed = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV, + eDraw = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV, + eDrawTasks = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV }; VULKAN_HPP_INLINE std::string to_string( IndirectCommandsTokenTypeNV value ) { switch ( value ) { - case IndirectCommandsTokenTypeNV::eShaderGroup : return "ShaderGroup"; - case IndirectCommandsTokenTypeNV::eStateFlags : return "StateFlags"; - case IndirectCommandsTokenTypeNV::eIndexBuffer : return "IndexBuffer"; - case IndirectCommandsTokenTypeNV::eVertexBuffer : return "VertexBuffer"; - case IndirectCommandsTokenTypeNV::ePushConstant : return "PushConstant"; - case IndirectCommandsTokenTypeNV::eDrawIndexed : return "DrawIndexed"; - case IndirectCommandsTokenTypeNV::eDraw : return "Draw"; - case IndirectCommandsTokenTypeNV::eDrawTasks : return "DrawTasks"; + case IndirectCommandsTokenTypeNV::eShaderGroup: return "ShaderGroup"; + case IndirectCommandsTokenTypeNV::eStateFlags: return "StateFlags"; + case IndirectCommandsTokenTypeNV::eIndexBuffer: return "IndexBuffer"; + case IndirectCommandsTokenTypeNV::eVertexBuffer: return "VertexBuffer"; + case IndirectCommandsTokenTypeNV::ePushConstant: return "PushConstant"; + case IndirectCommandsTokenTypeNV::eDrawIndexed: return "DrawIndexed"; + case IndirectCommandsTokenTypeNV::eDraw: return "Draw"; + case IndirectCommandsTokenTypeNV::eDrawTasks: return "DrawTasks"; default: return "invalid"; } } @@ -5882,13 +7192,14 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case IndirectStateFlagBitsNV::eFlagFrontface : return "FlagFrontface"; + case IndirectStateFlagBitsNV::eFlagFrontface: return "FlagFrontface"; default: return "invalid"; } } enum class InstanceCreateFlagBits - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( InstanceCreateFlagBits ) { @@ -5904,16 +7215,16 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case InternalAllocationType::eExecutable : return "Executable"; + case InternalAllocationType::eExecutable: return "Executable"; default: return "invalid"; } } enum class LineRasterizationModeEXT { - eDefault = VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT, - eRectangular = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT, - eBresenham = VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT, + eDefault = VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT, + eRectangular = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT, + eBresenham = VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT, eRectangularSmooth = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT }; @@ -5921,62 +7232,62 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case LineRasterizationModeEXT::eDefault : return "Default"; - case LineRasterizationModeEXT::eRectangular : return "Rectangular"; - case LineRasterizationModeEXT::eBresenham : return "Bresenham"; - case LineRasterizationModeEXT::eRectangularSmooth : return "RectangularSmooth"; + case LineRasterizationModeEXT::eDefault: return "Default"; + case LineRasterizationModeEXT::eRectangular: return "Rectangular"; + case LineRasterizationModeEXT::eBresenham: return "Bresenham"; + case LineRasterizationModeEXT::eRectangularSmooth: return "RectangularSmooth"; default: return "invalid"; } } enum class LogicOp { - eClear = VK_LOGIC_OP_CLEAR, - eAnd = VK_LOGIC_OP_AND, - eAndReverse = VK_LOGIC_OP_AND_REVERSE, - eCopy = VK_LOGIC_OP_COPY, - eAndInverted = VK_LOGIC_OP_AND_INVERTED, - eNoOp = VK_LOGIC_OP_NO_OP, - eXor = VK_LOGIC_OP_XOR, - eOr = VK_LOGIC_OP_OR, - eNor = VK_LOGIC_OP_NOR, - eEquivalent = VK_LOGIC_OP_EQUIVALENT, - eInvert = VK_LOGIC_OP_INVERT, - eOrReverse = VK_LOGIC_OP_OR_REVERSE, + eClear = VK_LOGIC_OP_CLEAR, + eAnd = VK_LOGIC_OP_AND, + eAndReverse = VK_LOGIC_OP_AND_REVERSE, + eCopy = VK_LOGIC_OP_COPY, + eAndInverted = VK_LOGIC_OP_AND_INVERTED, + eNoOp = VK_LOGIC_OP_NO_OP, + eXor = VK_LOGIC_OP_XOR, + eOr = VK_LOGIC_OP_OR, + eNor = VK_LOGIC_OP_NOR, + eEquivalent = VK_LOGIC_OP_EQUIVALENT, + eInvert = VK_LOGIC_OP_INVERT, + eOrReverse = VK_LOGIC_OP_OR_REVERSE, eCopyInverted = VK_LOGIC_OP_COPY_INVERTED, - eOrInverted = VK_LOGIC_OP_OR_INVERTED, - eNand = VK_LOGIC_OP_NAND, - eSet = VK_LOGIC_OP_SET + eOrInverted = VK_LOGIC_OP_OR_INVERTED, + eNand = VK_LOGIC_OP_NAND, + eSet = VK_LOGIC_OP_SET }; VULKAN_HPP_INLINE std::string to_string( LogicOp value ) { switch ( value ) { - case LogicOp::eClear : return "Clear"; - case LogicOp::eAnd : return "And"; - case LogicOp::eAndReverse : return "AndReverse"; - case LogicOp::eCopy : return "Copy"; - case LogicOp::eAndInverted : return "AndInverted"; - case LogicOp::eNoOp : return "NoOp"; - case LogicOp::eXor : return "Xor"; - case LogicOp::eOr : return "Or"; - case LogicOp::eNor : return "Nor"; - case LogicOp::eEquivalent : return "Equivalent"; - case LogicOp::eInvert : return "Invert"; - case LogicOp::eOrReverse : return "OrReverse"; - case LogicOp::eCopyInverted : return "CopyInverted"; - case LogicOp::eOrInverted : return "OrInverted"; - case LogicOp::eNand : return "Nand"; - case LogicOp::eSet : return "Set"; + case LogicOp::eClear: return "Clear"; + case LogicOp::eAnd: return "And"; + case LogicOp::eAndReverse: return "AndReverse"; + case LogicOp::eCopy: return "Copy"; + case LogicOp::eAndInverted: return "AndInverted"; + case LogicOp::eNoOp: return "NoOp"; + case LogicOp::eXor: return "Xor"; + case LogicOp::eOr: return "Or"; + case LogicOp::eNor: return "Nor"; + case LogicOp::eEquivalent: return "Equivalent"; + case LogicOp::eInvert: return "Invert"; + case LogicOp::eOrReverse: return "OrReverse"; + case LogicOp::eCopyInverted: return "CopyInverted"; + case LogicOp::eOrInverted: return "OrInverted"; + case LogicOp::eNand: return "Nand"; + case LogicOp::eSet: return "Set"; default: return "invalid"; } } enum class MemoryAllocateFlagBits : VkMemoryAllocateFlags { - eDeviceMask = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT, - eDeviceAddress = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT, + eDeviceMask = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT, + eDeviceAddress = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT, eDeviceAddressCaptureReplay = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT }; using MemoryAllocateFlagBitsKHR = MemoryAllocateFlagBits; @@ -5985,17 +7296,17 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case MemoryAllocateFlagBits::eDeviceMask : return "DeviceMask"; - case MemoryAllocateFlagBits::eDeviceAddress : return "DeviceAddress"; - case MemoryAllocateFlagBits::eDeviceAddressCaptureReplay : return "DeviceAddressCaptureReplay"; + case MemoryAllocateFlagBits::eDeviceMask: return "DeviceMask"; + case MemoryAllocateFlagBits::eDeviceAddress: return "DeviceAddress"; + case MemoryAllocateFlagBits::eDeviceAddressCaptureReplay: return "DeviceAddressCaptureReplay"; default: return "invalid"; } } enum class MemoryHeapFlagBits : VkMemoryHeapFlags { - eDeviceLocal = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT, - eMultiInstance = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT, + eDeviceLocal = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT, + eMultiInstance = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT, eMultiInstanceKHR = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR }; @@ -6003,16 +7314,16 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case MemoryHeapFlagBits::eDeviceLocal : return "DeviceLocal"; - case MemoryHeapFlagBits::eMultiInstance : return "MultiInstance"; + case MemoryHeapFlagBits::eDeviceLocal: return "DeviceLocal"; + case MemoryHeapFlagBits::eMultiInstance: return "MultiInstance"; default: return "invalid"; } } enum class MemoryOverallocationBehaviorAMD { - eDefault = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD, - eAllowed = VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD, + eDefault = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD, + eAllowed = VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD, eDisallowed = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD }; @@ -6020,21 +7331,21 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case MemoryOverallocationBehaviorAMD::eDefault : return "Default"; - case MemoryOverallocationBehaviorAMD::eAllowed : return "Allowed"; - case MemoryOverallocationBehaviorAMD::eDisallowed : return "Disallowed"; + case MemoryOverallocationBehaviorAMD::eDefault: return "Default"; + case MemoryOverallocationBehaviorAMD::eAllowed: return "Allowed"; + case MemoryOverallocationBehaviorAMD::eDisallowed: return "Disallowed"; default: return "invalid"; } } enum class MemoryPropertyFlagBits : VkMemoryPropertyFlags { - eDeviceLocal = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, - eHostVisible = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT, - eHostCoherent = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT, - eHostCached = VK_MEMORY_PROPERTY_HOST_CACHED_BIT, - eLazilyAllocated = VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT, - eProtected = VK_MEMORY_PROPERTY_PROTECTED_BIT, + eDeviceLocal = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, + eHostVisible = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT, + eHostCoherent = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT, + eHostCached = VK_MEMORY_PROPERTY_HOST_CACHED_BIT, + eLazilyAllocated = VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT, + eProtected = VK_MEMORY_PROPERTY_PROTECTED_BIT, eDeviceCoherentAMD = VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD, eDeviceUncachedAMD = VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD }; @@ -6043,115 +7354,115 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case MemoryPropertyFlagBits::eDeviceLocal : return "DeviceLocal"; - case MemoryPropertyFlagBits::eHostVisible : return "HostVisible"; - case MemoryPropertyFlagBits::eHostCoherent : return "HostCoherent"; - case MemoryPropertyFlagBits::eHostCached : return "HostCached"; - case MemoryPropertyFlagBits::eLazilyAllocated : return "LazilyAllocated"; - case MemoryPropertyFlagBits::eProtected : return "Protected"; - case MemoryPropertyFlagBits::eDeviceCoherentAMD : return "DeviceCoherentAMD"; - case MemoryPropertyFlagBits::eDeviceUncachedAMD : return "DeviceUncachedAMD"; + case MemoryPropertyFlagBits::eDeviceLocal: return "DeviceLocal"; + case MemoryPropertyFlagBits::eHostVisible: return "HostVisible"; + case MemoryPropertyFlagBits::eHostCoherent: return "HostCoherent"; + case MemoryPropertyFlagBits::eHostCached: return "HostCached"; + case MemoryPropertyFlagBits::eLazilyAllocated: return "LazilyAllocated"; + case MemoryPropertyFlagBits::eProtected: return "Protected"; + case MemoryPropertyFlagBits::eDeviceCoherentAMD: return "DeviceCoherentAMD"; + case MemoryPropertyFlagBits::eDeviceUncachedAMD: return "DeviceUncachedAMD"; default: return "invalid"; } } enum class ObjectType { - eUnknown = VK_OBJECT_TYPE_UNKNOWN, - eInstance = VK_OBJECT_TYPE_INSTANCE, - ePhysicalDevice = VK_OBJECT_TYPE_PHYSICAL_DEVICE, - eDevice = VK_OBJECT_TYPE_DEVICE, - eQueue = VK_OBJECT_TYPE_QUEUE, - eSemaphore = VK_OBJECT_TYPE_SEMAPHORE, - eCommandBuffer = VK_OBJECT_TYPE_COMMAND_BUFFER, - eFence = VK_OBJECT_TYPE_FENCE, - eDeviceMemory = VK_OBJECT_TYPE_DEVICE_MEMORY, - eBuffer = VK_OBJECT_TYPE_BUFFER, - eImage = VK_OBJECT_TYPE_IMAGE, - eEvent = VK_OBJECT_TYPE_EVENT, - eQueryPool = VK_OBJECT_TYPE_QUERY_POOL, - eBufferView = VK_OBJECT_TYPE_BUFFER_VIEW, - eImageView = VK_OBJECT_TYPE_IMAGE_VIEW, - eShaderModule = VK_OBJECT_TYPE_SHADER_MODULE, - ePipelineCache = VK_OBJECT_TYPE_PIPELINE_CACHE, - ePipelineLayout = VK_OBJECT_TYPE_PIPELINE_LAYOUT, - eRenderPass = VK_OBJECT_TYPE_RENDER_PASS, - ePipeline = VK_OBJECT_TYPE_PIPELINE, - eDescriptorSetLayout = VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT, - eSampler = VK_OBJECT_TYPE_SAMPLER, - eDescriptorPool = VK_OBJECT_TYPE_DESCRIPTOR_POOL, - eDescriptorSet = VK_OBJECT_TYPE_DESCRIPTOR_SET, - eFramebuffer = VK_OBJECT_TYPE_FRAMEBUFFER, - eCommandPool = VK_OBJECT_TYPE_COMMAND_POOL, - eSamplerYcbcrConversion = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION, - eDescriptorUpdateTemplate = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE, - eSurfaceKHR = VK_OBJECT_TYPE_SURFACE_KHR, - eSwapchainKHR = VK_OBJECT_TYPE_SWAPCHAIN_KHR, - eDisplayKHR = VK_OBJECT_TYPE_DISPLAY_KHR, - eDisplayModeKHR = VK_OBJECT_TYPE_DISPLAY_MODE_KHR, - eDebugReportCallbackEXT = VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT, - eDebugUtilsMessengerEXT = VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT, - eAccelerationStructureKHR = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR, - eValidationCacheEXT = VK_OBJECT_TYPE_VALIDATION_CACHE_EXT, + eUnknown = VK_OBJECT_TYPE_UNKNOWN, + eInstance = VK_OBJECT_TYPE_INSTANCE, + ePhysicalDevice = VK_OBJECT_TYPE_PHYSICAL_DEVICE, + eDevice = VK_OBJECT_TYPE_DEVICE, + eQueue = VK_OBJECT_TYPE_QUEUE, + eSemaphore = VK_OBJECT_TYPE_SEMAPHORE, + eCommandBuffer = VK_OBJECT_TYPE_COMMAND_BUFFER, + eFence = VK_OBJECT_TYPE_FENCE, + eDeviceMemory = VK_OBJECT_TYPE_DEVICE_MEMORY, + eBuffer = VK_OBJECT_TYPE_BUFFER, + eImage = VK_OBJECT_TYPE_IMAGE, + eEvent = VK_OBJECT_TYPE_EVENT, + eQueryPool = VK_OBJECT_TYPE_QUERY_POOL, + eBufferView = VK_OBJECT_TYPE_BUFFER_VIEW, + eImageView = VK_OBJECT_TYPE_IMAGE_VIEW, + eShaderModule = VK_OBJECT_TYPE_SHADER_MODULE, + ePipelineCache = VK_OBJECT_TYPE_PIPELINE_CACHE, + ePipelineLayout = VK_OBJECT_TYPE_PIPELINE_LAYOUT, + eRenderPass = VK_OBJECT_TYPE_RENDER_PASS, + ePipeline = VK_OBJECT_TYPE_PIPELINE, + eDescriptorSetLayout = VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT, + eSampler = VK_OBJECT_TYPE_SAMPLER, + eDescriptorPool = VK_OBJECT_TYPE_DESCRIPTOR_POOL, + eDescriptorSet = VK_OBJECT_TYPE_DESCRIPTOR_SET, + eFramebuffer = VK_OBJECT_TYPE_FRAMEBUFFER, + eCommandPool = VK_OBJECT_TYPE_COMMAND_POOL, + eSamplerYcbcrConversion = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION, + eDescriptorUpdateTemplate = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE, + eSurfaceKHR = VK_OBJECT_TYPE_SURFACE_KHR, + eSwapchainKHR = VK_OBJECT_TYPE_SWAPCHAIN_KHR, + eDisplayKHR = VK_OBJECT_TYPE_DISPLAY_KHR, + eDisplayModeKHR = VK_OBJECT_TYPE_DISPLAY_MODE_KHR, + eDebugReportCallbackEXT = VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT, + eDebugUtilsMessengerEXT = VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT, + eAccelerationStructureKHR = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR, + eValidationCacheEXT = VK_OBJECT_TYPE_VALIDATION_CACHE_EXT, ePerformanceConfigurationINTEL = VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL, - eDeferredOperationKHR = VK_OBJECT_TYPE_DEFERRED_OPERATION_KHR, - eIndirectCommandsLayoutNV = VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV, - eDescriptorUpdateTemplateKHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR, - eSamplerYcbcrConversionKHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR, - eAccelerationStructureNV = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV + eDeferredOperationKHR = VK_OBJECT_TYPE_DEFERRED_OPERATION_KHR, + eIndirectCommandsLayoutNV = VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV, + eDescriptorUpdateTemplateKHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR, + eSamplerYcbcrConversionKHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR, + eAccelerationStructureNV = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV }; VULKAN_HPP_INLINE std::string to_string( ObjectType value ) { switch ( value ) { - case ObjectType::eUnknown : return "Unknown"; - case ObjectType::eInstance : return "Instance"; - case ObjectType::ePhysicalDevice : return "PhysicalDevice"; - case ObjectType::eDevice : return "Device"; - case ObjectType::eQueue : return "Queue"; - case ObjectType::eSemaphore : return "Semaphore"; - case ObjectType::eCommandBuffer : return "CommandBuffer"; - case ObjectType::eFence : return "Fence"; - case ObjectType::eDeviceMemory : return "DeviceMemory"; - case ObjectType::eBuffer : return "Buffer"; - case ObjectType::eImage : return "Image"; - case ObjectType::eEvent : return "Event"; - case ObjectType::eQueryPool : return "QueryPool"; - case ObjectType::eBufferView : return "BufferView"; - case ObjectType::eImageView : return "ImageView"; - case ObjectType::eShaderModule : return "ShaderModule"; - case ObjectType::ePipelineCache : return "PipelineCache"; - case ObjectType::ePipelineLayout : return "PipelineLayout"; - case ObjectType::eRenderPass : return "RenderPass"; - case ObjectType::ePipeline : return "Pipeline"; - case ObjectType::eDescriptorSetLayout : return "DescriptorSetLayout"; - case ObjectType::eSampler : return "Sampler"; - case ObjectType::eDescriptorPool : return "DescriptorPool"; - case ObjectType::eDescriptorSet : return "DescriptorSet"; - case ObjectType::eFramebuffer : return "Framebuffer"; - case ObjectType::eCommandPool : return "CommandPool"; - case ObjectType::eSamplerYcbcrConversion : return "SamplerYcbcrConversion"; - case ObjectType::eDescriptorUpdateTemplate : return "DescriptorUpdateTemplate"; - case ObjectType::eSurfaceKHR : return "SurfaceKHR"; - case ObjectType::eSwapchainKHR : return "SwapchainKHR"; - case ObjectType::eDisplayKHR : return "DisplayKHR"; - case ObjectType::eDisplayModeKHR : return "DisplayModeKHR"; - case ObjectType::eDebugReportCallbackEXT : return "DebugReportCallbackEXT"; - case ObjectType::eDebugUtilsMessengerEXT : return "DebugUtilsMessengerEXT"; - case ObjectType::eAccelerationStructureKHR : return "AccelerationStructureKHR"; - case ObjectType::eValidationCacheEXT : return "ValidationCacheEXT"; - case ObjectType::ePerformanceConfigurationINTEL : return "PerformanceConfigurationINTEL"; - case ObjectType::eDeferredOperationKHR : return "DeferredOperationKHR"; - case ObjectType::eIndirectCommandsLayoutNV : return "IndirectCommandsLayoutNV"; + case ObjectType::eUnknown: return "Unknown"; + case ObjectType::eInstance: return "Instance"; + case ObjectType::ePhysicalDevice: return "PhysicalDevice"; + case ObjectType::eDevice: return "Device"; + case ObjectType::eQueue: return "Queue"; + case ObjectType::eSemaphore: return "Semaphore"; + case ObjectType::eCommandBuffer: return "CommandBuffer"; + case ObjectType::eFence: return "Fence"; + case ObjectType::eDeviceMemory: return "DeviceMemory"; + case ObjectType::eBuffer: return "Buffer"; + case ObjectType::eImage: return "Image"; + case ObjectType::eEvent: return "Event"; + case ObjectType::eQueryPool: return "QueryPool"; + case ObjectType::eBufferView: return "BufferView"; + case ObjectType::eImageView: return "ImageView"; + case ObjectType::eShaderModule: return "ShaderModule"; + case ObjectType::ePipelineCache: return "PipelineCache"; + case ObjectType::ePipelineLayout: return "PipelineLayout"; + case ObjectType::eRenderPass: return "RenderPass"; + case ObjectType::ePipeline: return "Pipeline"; + case ObjectType::eDescriptorSetLayout: return "DescriptorSetLayout"; + case ObjectType::eSampler: return "Sampler"; + case ObjectType::eDescriptorPool: return "DescriptorPool"; + case ObjectType::eDescriptorSet: return "DescriptorSet"; + case ObjectType::eFramebuffer: return "Framebuffer"; + case ObjectType::eCommandPool: return "CommandPool"; + case ObjectType::eSamplerYcbcrConversion: return "SamplerYcbcrConversion"; + case ObjectType::eDescriptorUpdateTemplate: return "DescriptorUpdateTemplate"; + case ObjectType::eSurfaceKHR: return "SurfaceKHR"; + case ObjectType::eSwapchainKHR: return "SwapchainKHR"; + case ObjectType::eDisplayKHR: return "DisplayKHR"; + case ObjectType::eDisplayModeKHR: return "DisplayModeKHR"; + case ObjectType::eDebugReportCallbackEXT: return "DebugReportCallbackEXT"; + case ObjectType::eDebugUtilsMessengerEXT: return "DebugUtilsMessengerEXT"; + case ObjectType::eAccelerationStructureKHR: return "AccelerationStructureKHR"; + case ObjectType::eValidationCacheEXT: return "ValidationCacheEXT"; + case ObjectType::ePerformanceConfigurationINTEL: return "PerformanceConfigurationINTEL"; + case ObjectType::eDeferredOperationKHR: return "DeferredOperationKHR"; + case ObjectType::eIndirectCommandsLayoutNV: return "IndirectCommandsLayoutNV"; default: return "invalid"; } } enum class PeerMemoryFeatureFlagBits : VkPeerMemoryFeatureFlags { - eCopySrc = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT, - eCopyDst = VK_PEER_MEMORY_FEATURE_COPY_DST_BIT, + eCopySrc = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT, + eCopyDst = VK_PEER_MEMORY_FEATURE_COPY_DST_BIT, eGenericSrc = VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT, eGenericDst = VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT }; @@ -6161,24 +7472,26 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case PeerMemoryFeatureFlagBits::eCopySrc : return "CopySrc"; - case PeerMemoryFeatureFlagBits::eCopyDst : return "CopyDst"; - case PeerMemoryFeatureFlagBits::eGenericSrc : return "GenericSrc"; - case PeerMemoryFeatureFlagBits::eGenericDst : return "GenericDst"; + case PeerMemoryFeatureFlagBits::eCopySrc: return "CopySrc"; + case PeerMemoryFeatureFlagBits::eCopyDst: return "CopyDst"; + case PeerMemoryFeatureFlagBits::eGenericSrc: return "GenericSrc"; + case PeerMemoryFeatureFlagBits::eGenericDst: return "GenericDst"; default: return "invalid"; } } enum class PerformanceConfigurationTypeINTEL { - eCommandQueueMetricsDiscoveryActivated = VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL + eCommandQueueMetricsDiscoveryActivated = + VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL }; VULKAN_HPP_INLINE std::string to_string( PerformanceConfigurationTypeINTEL value ) { switch ( value ) { - case PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated : return "CommandQueueMetricsDiscoveryActivated"; + case PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated: + return "CommandQueueMetricsDiscoveryActivated"; default: return "invalid"; } } @@ -6193,39 +7506,39 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting : return "PerformanceImpacting"; - case PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted : return "ConcurrentlyImpacted"; + case PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting: return "PerformanceImpacting"; + case PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted: return "ConcurrentlyImpacted"; default: return "invalid"; } } enum class PerformanceCounterScopeKHR { - eCommandBuffer = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR, - eRenderPass = VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR, - eCommand = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR, + eCommandBuffer = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR, + eRenderPass = VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR, + eCommand = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR, eVkQueryScopeCommandBuffer = VK_QUERY_SCOPE_COMMAND_BUFFER_KHR, - eVkQueryScopeRenderPass = VK_QUERY_SCOPE_RENDER_PASS_KHR, - eVkQueryScopeCommand = VK_QUERY_SCOPE_COMMAND_KHR + eVkQueryScopeRenderPass = VK_QUERY_SCOPE_RENDER_PASS_KHR, + eVkQueryScopeCommand = VK_QUERY_SCOPE_COMMAND_KHR }; VULKAN_HPP_INLINE std::string to_string( PerformanceCounterScopeKHR value ) { switch ( value ) { - case PerformanceCounterScopeKHR::eCommandBuffer : return "CommandBuffer"; - case PerformanceCounterScopeKHR::eRenderPass : return "RenderPass"; - case PerformanceCounterScopeKHR::eCommand : return "Command"; + case PerformanceCounterScopeKHR::eCommandBuffer: return "CommandBuffer"; + case PerformanceCounterScopeKHR::eRenderPass: return "RenderPass"; + case PerformanceCounterScopeKHR::eCommand: return "Command"; default: return "invalid"; } } enum class PerformanceCounterStorageKHR { - eInt32 = VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR, - eInt64 = VK_PERFORMANCE_COUNTER_STORAGE_INT64_KHR, - eUint32 = VK_PERFORMANCE_COUNTER_STORAGE_UINT32_KHR, - eUint64 = VK_PERFORMANCE_COUNTER_STORAGE_UINT64_KHR, + eInt32 = VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR, + eInt64 = VK_PERFORMANCE_COUNTER_STORAGE_INT64_KHR, + eUint32 = VK_PERFORMANCE_COUNTER_STORAGE_UINT32_KHR, + eUint64 = VK_PERFORMANCE_COUNTER_STORAGE_UINT64_KHR, eFloat32 = VK_PERFORMANCE_COUNTER_STORAGE_FLOAT32_KHR, eFloat64 = VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR }; @@ -6234,53 +7547,53 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case PerformanceCounterStorageKHR::eInt32 : return "Int32"; - case PerformanceCounterStorageKHR::eInt64 : return "Int64"; - case PerformanceCounterStorageKHR::eUint32 : return "Uint32"; - case PerformanceCounterStorageKHR::eUint64 : return "Uint64"; - case PerformanceCounterStorageKHR::eFloat32 : return "Float32"; - case PerformanceCounterStorageKHR::eFloat64 : return "Float64"; + case PerformanceCounterStorageKHR::eInt32: return "Int32"; + case PerformanceCounterStorageKHR::eInt64: return "Int64"; + case PerformanceCounterStorageKHR::eUint32: return "Uint32"; + case PerformanceCounterStorageKHR::eUint64: return "Uint64"; + case PerformanceCounterStorageKHR::eFloat32: return "Float32"; + case PerformanceCounterStorageKHR::eFloat64: return "Float64"; default: return "invalid"; } } enum class PerformanceCounterUnitKHR { - eGeneric = VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR, - ePercentage = VK_PERFORMANCE_COUNTER_UNIT_PERCENTAGE_KHR, - eNanoseconds = VK_PERFORMANCE_COUNTER_UNIT_NANOSECONDS_KHR, - eBytes = VK_PERFORMANCE_COUNTER_UNIT_BYTES_KHR, + eGeneric = VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR, + ePercentage = VK_PERFORMANCE_COUNTER_UNIT_PERCENTAGE_KHR, + eNanoseconds = VK_PERFORMANCE_COUNTER_UNIT_NANOSECONDS_KHR, + eBytes = VK_PERFORMANCE_COUNTER_UNIT_BYTES_KHR, eBytesPerSecond = VK_PERFORMANCE_COUNTER_UNIT_BYTES_PER_SECOND_KHR, - eKelvin = VK_PERFORMANCE_COUNTER_UNIT_KELVIN_KHR, - eWatts = VK_PERFORMANCE_COUNTER_UNIT_WATTS_KHR, - eVolts = VK_PERFORMANCE_COUNTER_UNIT_VOLTS_KHR, - eAmps = VK_PERFORMANCE_COUNTER_UNIT_AMPS_KHR, - eHertz = VK_PERFORMANCE_COUNTER_UNIT_HERTZ_KHR, - eCycles = VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR + eKelvin = VK_PERFORMANCE_COUNTER_UNIT_KELVIN_KHR, + eWatts = VK_PERFORMANCE_COUNTER_UNIT_WATTS_KHR, + eVolts = VK_PERFORMANCE_COUNTER_UNIT_VOLTS_KHR, + eAmps = VK_PERFORMANCE_COUNTER_UNIT_AMPS_KHR, + eHertz = VK_PERFORMANCE_COUNTER_UNIT_HERTZ_KHR, + eCycles = VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR }; VULKAN_HPP_INLINE std::string to_string( PerformanceCounterUnitKHR value ) { switch ( value ) { - case PerformanceCounterUnitKHR::eGeneric : return "Generic"; - case PerformanceCounterUnitKHR::ePercentage : return "Percentage"; - case PerformanceCounterUnitKHR::eNanoseconds : return "Nanoseconds"; - case PerformanceCounterUnitKHR::eBytes : return "Bytes"; - case PerformanceCounterUnitKHR::eBytesPerSecond : return "BytesPerSecond"; - case PerformanceCounterUnitKHR::eKelvin : return "Kelvin"; - case PerformanceCounterUnitKHR::eWatts : return "Watts"; - case PerformanceCounterUnitKHR::eVolts : return "Volts"; - case PerformanceCounterUnitKHR::eAmps : return "Amps"; - case PerformanceCounterUnitKHR::eHertz : return "Hertz"; - case PerformanceCounterUnitKHR::eCycles : return "Cycles"; + case PerformanceCounterUnitKHR::eGeneric: return "Generic"; + case PerformanceCounterUnitKHR::ePercentage: return "Percentage"; + case PerformanceCounterUnitKHR::eNanoseconds: return "Nanoseconds"; + case PerformanceCounterUnitKHR::eBytes: return "Bytes"; + case PerformanceCounterUnitKHR::eBytesPerSecond: return "BytesPerSecond"; + case PerformanceCounterUnitKHR::eKelvin: return "Kelvin"; + case PerformanceCounterUnitKHR::eWatts: return "Watts"; + case PerformanceCounterUnitKHR::eVolts: return "Volts"; + case PerformanceCounterUnitKHR::eAmps: return "Amps"; + case PerformanceCounterUnitKHR::eHertz: return "Hertz"; + case PerformanceCounterUnitKHR::eCycles: return "Cycles"; default: return "invalid"; } } enum class PerformanceOverrideTypeINTEL { - eNullHardware = VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL, + eNullHardware = VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL, eFlushGpuCaches = VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL }; @@ -6288,15 +7601,15 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case PerformanceOverrideTypeINTEL::eNullHardware : return "NullHardware"; - case PerformanceOverrideTypeINTEL::eFlushGpuCaches : return "FlushGpuCaches"; + case PerformanceOverrideTypeINTEL::eNullHardware: return "NullHardware"; + case PerformanceOverrideTypeINTEL::eFlushGpuCaches: return "FlushGpuCaches"; default: return "invalid"; } } enum class PerformanceParameterTypeINTEL { - eHwCountersSupported = VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL, + eHwCountersSupported = VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL, eStreamMarkerValidBits = VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL }; @@ -6304,8 +7617,8 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case PerformanceParameterTypeINTEL::eHwCountersSupported : return "HwCountersSupported"; - case PerformanceParameterTypeINTEL::eStreamMarkerValidBits : return "StreamMarkerValidBits"; + case PerformanceParameterTypeINTEL::eHwCountersSupported: return "HwCountersSupported"; + case PerformanceParameterTypeINTEL::eStreamMarkerValidBits: return "StreamMarkerValidBits"; default: return "invalid"; } } @@ -6314,8 +7627,8 @@ namespace VULKAN_HPP_NAMESPACE { eUint32 = VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL, eUint64 = VK_PERFORMANCE_VALUE_TYPE_UINT64_INTEL, - eFloat = VK_PERFORMANCE_VALUE_TYPE_FLOAT_INTEL, - eBool = VK_PERFORMANCE_VALUE_TYPE_BOOL_INTEL, + eFloat = VK_PERFORMANCE_VALUE_TYPE_FLOAT_INTEL, + eBool = VK_PERFORMANCE_VALUE_TYPE_BOOL_INTEL, eString = VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL }; @@ -6323,52 +7636,52 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case PerformanceValueTypeINTEL::eUint32 : return "Uint32"; - case PerformanceValueTypeINTEL::eUint64 : return "Uint64"; - case PerformanceValueTypeINTEL::eFloat : return "Float"; - case PerformanceValueTypeINTEL::eBool : return "Bool"; - case PerformanceValueTypeINTEL::eString : return "String"; + case PerformanceValueTypeINTEL::eUint32: return "Uint32"; + case PerformanceValueTypeINTEL::eUint64: return "Uint64"; + case PerformanceValueTypeINTEL::eFloat: return "Float"; + case PerformanceValueTypeINTEL::eBool: return "Bool"; + case PerformanceValueTypeINTEL::eString: return "String"; default: return "invalid"; } } enum class PhysicalDeviceType { - eOther = VK_PHYSICAL_DEVICE_TYPE_OTHER, + eOther = VK_PHYSICAL_DEVICE_TYPE_OTHER, eIntegratedGpu = VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU, - eDiscreteGpu = VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU, - eVirtualGpu = VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU, - eCpu = VK_PHYSICAL_DEVICE_TYPE_CPU + eDiscreteGpu = VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU, + eVirtualGpu = VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU, + eCpu = VK_PHYSICAL_DEVICE_TYPE_CPU }; VULKAN_HPP_INLINE std::string to_string( PhysicalDeviceType value ) { switch ( value ) { - case PhysicalDeviceType::eOther : return "Other"; - case PhysicalDeviceType::eIntegratedGpu : return "IntegratedGpu"; - case PhysicalDeviceType::eDiscreteGpu : return "DiscreteGpu"; - case PhysicalDeviceType::eVirtualGpu : return "VirtualGpu"; - case PhysicalDeviceType::eCpu : return "Cpu"; + case PhysicalDeviceType::eOther: return "Other"; + case PhysicalDeviceType::eIntegratedGpu: return "IntegratedGpu"; + case PhysicalDeviceType::eDiscreteGpu: return "DiscreteGpu"; + case PhysicalDeviceType::eVirtualGpu: return "VirtualGpu"; + case PhysicalDeviceType::eCpu: return "Cpu"; default: return "invalid"; } } enum class PipelineBindPoint { - eGraphics = VK_PIPELINE_BIND_POINT_GRAPHICS, - eCompute = VK_PIPELINE_BIND_POINT_COMPUTE, + eGraphics = VK_PIPELINE_BIND_POINT_GRAPHICS, + eCompute = VK_PIPELINE_BIND_POINT_COMPUTE, eRayTracingKHR = VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR, - eRayTracingNV = VK_PIPELINE_BIND_POINT_RAY_TRACING_NV + eRayTracingNV = VK_PIPELINE_BIND_POINT_RAY_TRACING_NV }; VULKAN_HPP_INLINE std::string to_string( PipelineBindPoint value ) { switch ( value ) { - case PipelineBindPoint::eGraphics : return "Graphics"; - case PipelineBindPoint::eCompute : return "Compute"; - case PipelineBindPoint::eRayTracingKHR : return "RayTracingKHR"; + case PipelineBindPoint::eGraphics: return "Graphics"; + case PipelineBindPoint::eCompute: return "Compute"; + case PipelineBindPoint::eRayTracingKHR: return "RayTracingKHR"; default: return "invalid"; } } @@ -6382,7 +7695,7 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case PipelineCacheCreateFlagBits::eExternallySynchronizedEXT : return "ExternallySynchronizedEXT"; + case PipelineCacheCreateFlagBits::eExternallySynchronizedEXT: return "ExternallySynchronizedEXT"; default: return "invalid"; } } @@ -6396,13 +7709,14 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case PipelineCacheHeaderVersion::eOne : return "One"; + case PipelineCacheHeaderVersion::eOne: return "One"; default: return "invalid"; } } enum class PipelineColorBlendStateCreateFlagBits - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( PipelineColorBlendStateCreateFlagBits ) { @@ -6410,7 +7724,8 @@ namespace VULKAN_HPP_NAMESPACE } enum class PipelineCompilerControlFlagBitsAMD : VkPipelineCompilerControlFlagsAMD - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( PipelineCompilerControlFlagBitsAMD ) { @@ -6419,74 +7734,76 @@ namespace VULKAN_HPP_NAMESPACE enum class PipelineCreateFlagBits : VkPipelineCreateFlags { - eDisableOptimization = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT, - eAllowDerivatives = VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT, - eDerivative = VK_PIPELINE_CREATE_DERIVATIVE_BIT, - eViewIndexFromDeviceIndex = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT, - eDispatchBase = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT, - eRayTracingNoNullAnyHitShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR, - eRayTracingNoNullClosestHitShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR, - eRayTracingNoNullMissShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR, + eDisableOptimization = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT, + eAllowDerivatives = VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT, + eDerivative = VK_PIPELINE_CREATE_DERIVATIVE_BIT, + eViewIndexFromDeviceIndex = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT, + eDispatchBase = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT, + eRayTracingNoNullAnyHitShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR, + eRayTracingNoNullClosestHitShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR, + eRayTracingNoNullMissShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR, eRayTracingNoNullIntersectionShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR, - eRayTracingSkipTrianglesKHR = VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR, - eRayTracingSkipAabbsKHR = VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR, - eDeferCompileNV = VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV, - eCaptureStatisticsKHR = VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR, - eCaptureInternalRepresentationsKHR = VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR, - eIndirectBindableNV = VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV, - eLibraryKHR = VK_PIPELINE_CREATE_LIBRARY_BIT_KHR, - eFailOnPipelineCompileRequiredEXT = VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT, - eEarlyReturnOnFailureEXT = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT, - eViewIndexFromDeviceIndexKHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR, - eDispatchBaseKHR = VK_PIPELINE_CREATE_DISPATCH_BASE_KHR + eRayTracingSkipTrianglesKHR = VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR, + eRayTracingSkipAabbsKHR = VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR, + eDeferCompileNV = VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV, + eCaptureStatisticsKHR = VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR, + eCaptureInternalRepresentationsKHR = VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR, + eIndirectBindableNV = VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV, + eLibraryKHR = VK_PIPELINE_CREATE_LIBRARY_BIT_KHR, + eFailOnPipelineCompileRequiredEXT = VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT, + eEarlyReturnOnFailureEXT = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT, + eViewIndexFromDeviceIndexKHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR, + eDispatchBaseKHR = VK_PIPELINE_CREATE_DISPATCH_BASE_KHR }; VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlagBits value ) { switch ( value ) { - case PipelineCreateFlagBits::eDisableOptimization : return "DisableOptimization"; - case PipelineCreateFlagBits::eAllowDerivatives : return "AllowDerivatives"; - case PipelineCreateFlagBits::eDerivative : return "Derivative"; - case PipelineCreateFlagBits::eViewIndexFromDeviceIndex : return "ViewIndexFromDeviceIndex"; - case PipelineCreateFlagBits::eDispatchBase : return "DispatchBase"; - case PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR : return "RayTracingNoNullAnyHitShadersKHR"; - case PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR : return "RayTracingNoNullClosestHitShadersKHR"; - case PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR : return "RayTracingNoNullMissShadersKHR"; - case PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR : return "RayTracingNoNullIntersectionShadersKHR"; - case PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR : return "RayTracingSkipTrianglesKHR"; - case PipelineCreateFlagBits::eRayTracingSkipAabbsKHR : return "RayTracingSkipAabbsKHR"; - case PipelineCreateFlagBits::eDeferCompileNV : return "DeferCompileNV"; - case PipelineCreateFlagBits::eCaptureStatisticsKHR : return "CaptureStatisticsKHR"; - case PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR : return "CaptureInternalRepresentationsKHR"; - case PipelineCreateFlagBits::eIndirectBindableNV : return "IndirectBindableNV"; - case PipelineCreateFlagBits::eLibraryKHR : return "LibraryKHR"; - case PipelineCreateFlagBits::eFailOnPipelineCompileRequiredEXT : return "FailOnPipelineCompileRequiredEXT"; - case PipelineCreateFlagBits::eEarlyReturnOnFailureEXT : return "EarlyReturnOnFailureEXT"; + case PipelineCreateFlagBits::eDisableOptimization: return "DisableOptimization"; + case PipelineCreateFlagBits::eAllowDerivatives: return "AllowDerivatives"; + case PipelineCreateFlagBits::eDerivative: return "Derivative"; + case PipelineCreateFlagBits::eViewIndexFromDeviceIndex: return "ViewIndexFromDeviceIndex"; + case PipelineCreateFlagBits::eDispatchBase: return "DispatchBase"; + case PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR: return "RayTracingNoNullAnyHitShadersKHR"; + case PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR: return "RayTracingNoNullClosestHitShadersKHR"; + case PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR: return "RayTracingNoNullMissShadersKHR"; + case PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR: + return "RayTracingNoNullIntersectionShadersKHR"; + case PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR: return "RayTracingSkipTrianglesKHR"; + case PipelineCreateFlagBits::eRayTracingSkipAabbsKHR: return "RayTracingSkipAabbsKHR"; + case PipelineCreateFlagBits::eDeferCompileNV: return "DeferCompileNV"; + case PipelineCreateFlagBits::eCaptureStatisticsKHR: return "CaptureStatisticsKHR"; + case PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR: return "CaptureInternalRepresentationsKHR"; + case PipelineCreateFlagBits::eIndirectBindableNV: return "IndirectBindableNV"; + case PipelineCreateFlagBits::eLibraryKHR: return "LibraryKHR"; + case PipelineCreateFlagBits::eFailOnPipelineCompileRequiredEXT: return "FailOnPipelineCompileRequiredEXT"; + case PipelineCreateFlagBits::eEarlyReturnOnFailureEXT: return "EarlyReturnOnFailureEXT"; default: return "invalid"; } } enum class PipelineCreationFeedbackFlagBitsEXT : VkPipelineCreationFeedbackFlagsEXT { - eValid = VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT_EXT, + eValid = VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT_EXT, eApplicationPipelineCacheHit = VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT_EXT, - eBasePipelineAcceleration = VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT_EXT + eBasePipelineAcceleration = VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT_EXT }; VULKAN_HPP_INLINE std::string to_string( PipelineCreationFeedbackFlagBitsEXT value ) { switch ( value ) { - case PipelineCreationFeedbackFlagBitsEXT::eValid : return "Valid"; - case PipelineCreationFeedbackFlagBitsEXT::eApplicationPipelineCacheHit : return "ApplicationPipelineCacheHit"; - case PipelineCreationFeedbackFlagBitsEXT::eBasePipelineAcceleration : return "BasePipelineAcceleration"; + case PipelineCreationFeedbackFlagBitsEXT::eValid: return "Valid"; + case PipelineCreationFeedbackFlagBitsEXT::eApplicationPipelineCacheHit: return "ApplicationPipelineCacheHit"; + case PipelineCreationFeedbackFlagBitsEXT::eBasePipelineAcceleration: return "BasePipelineAcceleration"; default: return "invalid"; } } enum class PipelineDepthStencilStateCreateFlagBits - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( PipelineDepthStencilStateCreateFlagBits ) { @@ -6494,7 +7811,8 @@ namespace VULKAN_HPP_NAMESPACE } enum class PipelineDynamicStateCreateFlagBits - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( PipelineDynamicStateCreateFlagBits ) { @@ -6503,9 +7821,9 @@ namespace VULKAN_HPP_NAMESPACE enum class PipelineExecutableStatisticFormatKHR { - eBool32 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR, - eInt64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR, - eUint64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR, + eBool32 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR, + eInt64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR, + eUint64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR, eFloat64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR }; @@ -6513,16 +7831,17 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case PipelineExecutableStatisticFormatKHR::eBool32 : return "Bool32"; - case PipelineExecutableStatisticFormatKHR::eInt64 : return "Int64"; - case PipelineExecutableStatisticFormatKHR::eUint64 : return "Uint64"; - case PipelineExecutableStatisticFormatKHR::eFloat64 : return "Float64"; + case PipelineExecutableStatisticFormatKHR::eBool32: return "Bool32"; + case PipelineExecutableStatisticFormatKHR::eInt64: return "Int64"; + case PipelineExecutableStatisticFormatKHR::eUint64: return "Uint64"; + case PipelineExecutableStatisticFormatKHR::eFloat64: return "Float64"; default: return "invalid"; } } enum class PipelineInputAssemblyStateCreateFlagBits - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( PipelineInputAssemblyStateCreateFlagBits ) { @@ -6530,7 +7849,8 @@ namespace VULKAN_HPP_NAMESPACE } enum class PipelineLayoutCreateFlagBits - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( PipelineLayoutCreateFlagBits ) { @@ -6538,7 +7858,8 @@ namespace VULKAN_HPP_NAMESPACE } enum class PipelineMultisampleStateCreateFlagBits - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( PipelineMultisampleStateCreateFlagBits ) { @@ -6546,7 +7867,8 @@ namespace VULKAN_HPP_NAMESPACE } enum class PipelineRasterizationStateCreateFlagBits - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateCreateFlagBits ) { @@ -6556,87 +7878,88 @@ namespace VULKAN_HPP_NAMESPACE enum class PipelineShaderStageCreateFlagBits : VkPipelineShaderStageCreateFlags { eAllowVaryingSubgroupSizeEXT = VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT, - eRequireFullSubgroupsEXT = VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT + eRequireFullSubgroupsEXT = VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT }; VULKAN_HPP_INLINE std::string to_string( PipelineShaderStageCreateFlagBits value ) { switch ( value ) { - case PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSizeEXT : return "AllowVaryingSubgroupSizeEXT"; - case PipelineShaderStageCreateFlagBits::eRequireFullSubgroupsEXT : return "RequireFullSubgroupsEXT"; + case PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSizeEXT: return "AllowVaryingSubgroupSizeEXT"; + case PipelineShaderStageCreateFlagBits::eRequireFullSubgroupsEXT: return "RequireFullSubgroupsEXT"; default: return "invalid"; } } enum class PipelineStageFlagBits : VkPipelineStageFlags { - eTopOfPipe = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, - eDrawIndirect = VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT, - eVertexInput = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, - eVertexShader = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, - eTessellationControlShader = VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT, - eTessellationEvaluationShader = VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT, - eGeometryShader = VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT, - eFragmentShader = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, - eEarlyFragmentTests = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT, - eLateFragmentTests = VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT, - eColorAttachmentOutput = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, - eComputeShader = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, - eTransfer = VK_PIPELINE_STAGE_TRANSFER_BIT, - eBottomOfPipe = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, - eHost = VK_PIPELINE_STAGE_HOST_BIT, - eAllGraphics = VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, - eAllCommands = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, - eTransformFeedbackEXT = VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT, - eConditionalRenderingEXT = VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT, - eRayTracingShaderKHR = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR, + eTopOfPipe = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, + eDrawIndirect = VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT, + eVertexInput = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, + eVertexShader = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, + eTessellationControlShader = VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT, + eTessellationEvaluationShader = VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT, + eGeometryShader = VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT, + eFragmentShader = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, + eEarlyFragmentTests = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT, + eLateFragmentTests = VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT, + eColorAttachmentOutput = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, + eComputeShader = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, + eTransfer = VK_PIPELINE_STAGE_TRANSFER_BIT, + eBottomOfPipe = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, + eHost = VK_PIPELINE_STAGE_HOST_BIT, + eAllGraphics = VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, + eAllCommands = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, + eTransformFeedbackEXT = VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT, + eConditionalRenderingEXT = VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT, + eRayTracingShaderKHR = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR, eAccelerationStructureBuildKHR = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, - eShadingRateImageNV = VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV, - eTaskShaderNV = VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV, - eMeshShaderNV = VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV, - eFragmentDensityProcessEXT = VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT, - eCommandPreprocessNV = VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV, - eRayTracingShaderNV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV, - eAccelerationStructureBuildNV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV + eShadingRateImageNV = VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV, + eTaskShaderNV = VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV, + eMeshShaderNV = VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV, + eFragmentDensityProcessEXT = VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT, + eCommandPreprocessNV = VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV, + eRayTracingShaderNV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV, + eAccelerationStructureBuildNV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV }; VULKAN_HPP_INLINE std::string to_string( PipelineStageFlagBits value ) { switch ( value ) { - case PipelineStageFlagBits::eTopOfPipe : return "TopOfPipe"; - case PipelineStageFlagBits::eDrawIndirect : return "DrawIndirect"; - case PipelineStageFlagBits::eVertexInput : return "VertexInput"; - case PipelineStageFlagBits::eVertexShader : return "VertexShader"; - case PipelineStageFlagBits::eTessellationControlShader : return "TessellationControlShader"; - case PipelineStageFlagBits::eTessellationEvaluationShader : return "TessellationEvaluationShader"; - case PipelineStageFlagBits::eGeometryShader : return "GeometryShader"; - case PipelineStageFlagBits::eFragmentShader : return "FragmentShader"; - case PipelineStageFlagBits::eEarlyFragmentTests : return "EarlyFragmentTests"; - case PipelineStageFlagBits::eLateFragmentTests : return "LateFragmentTests"; - case PipelineStageFlagBits::eColorAttachmentOutput : return "ColorAttachmentOutput"; - case PipelineStageFlagBits::eComputeShader : return "ComputeShader"; - case PipelineStageFlagBits::eTransfer : return "Transfer"; - case PipelineStageFlagBits::eBottomOfPipe : return "BottomOfPipe"; - case PipelineStageFlagBits::eHost : return "Host"; - case PipelineStageFlagBits::eAllGraphics : return "AllGraphics"; - case PipelineStageFlagBits::eAllCommands : return "AllCommands"; - case PipelineStageFlagBits::eTransformFeedbackEXT : return "TransformFeedbackEXT"; - case PipelineStageFlagBits::eConditionalRenderingEXT : return "ConditionalRenderingEXT"; - case PipelineStageFlagBits::eRayTracingShaderKHR : return "RayTracingShaderKHR"; - case PipelineStageFlagBits::eAccelerationStructureBuildKHR : return "AccelerationStructureBuildKHR"; - case PipelineStageFlagBits::eShadingRateImageNV : return "ShadingRateImageNV"; - case PipelineStageFlagBits::eTaskShaderNV : return "TaskShaderNV"; - case PipelineStageFlagBits::eMeshShaderNV : return "MeshShaderNV"; - case PipelineStageFlagBits::eFragmentDensityProcessEXT : return "FragmentDensityProcessEXT"; - case PipelineStageFlagBits::eCommandPreprocessNV : return "CommandPreprocessNV"; + case PipelineStageFlagBits::eTopOfPipe: return "TopOfPipe"; + case PipelineStageFlagBits::eDrawIndirect: return "DrawIndirect"; + case PipelineStageFlagBits::eVertexInput: return "VertexInput"; + case PipelineStageFlagBits::eVertexShader: return "VertexShader"; + case PipelineStageFlagBits::eTessellationControlShader: return "TessellationControlShader"; + case PipelineStageFlagBits::eTessellationEvaluationShader: return "TessellationEvaluationShader"; + case PipelineStageFlagBits::eGeometryShader: return "GeometryShader"; + case PipelineStageFlagBits::eFragmentShader: return "FragmentShader"; + case PipelineStageFlagBits::eEarlyFragmentTests: return "EarlyFragmentTests"; + case PipelineStageFlagBits::eLateFragmentTests: return "LateFragmentTests"; + case PipelineStageFlagBits::eColorAttachmentOutput: return "ColorAttachmentOutput"; + case PipelineStageFlagBits::eComputeShader: return "ComputeShader"; + case PipelineStageFlagBits::eTransfer: return "Transfer"; + case PipelineStageFlagBits::eBottomOfPipe: return "BottomOfPipe"; + case PipelineStageFlagBits::eHost: return "Host"; + case PipelineStageFlagBits::eAllGraphics: return "AllGraphics"; + case PipelineStageFlagBits::eAllCommands: return "AllCommands"; + case PipelineStageFlagBits::eTransformFeedbackEXT: return "TransformFeedbackEXT"; + case PipelineStageFlagBits::eConditionalRenderingEXT: return "ConditionalRenderingEXT"; + case PipelineStageFlagBits::eRayTracingShaderKHR: return "RayTracingShaderKHR"; + case PipelineStageFlagBits::eAccelerationStructureBuildKHR: return "AccelerationStructureBuildKHR"; + case PipelineStageFlagBits::eShadingRateImageNV: return "ShadingRateImageNV"; + case PipelineStageFlagBits::eTaskShaderNV: return "TaskShaderNV"; + case PipelineStageFlagBits::eMeshShaderNV: return "MeshShaderNV"; + case PipelineStageFlagBits::eFragmentDensityProcessEXT: return "FragmentDensityProcessEXT"; + case PipelineStageFlagBits::eCommandPreprocessNV: return "CommandPreprocessNV"; default: return "invalid"; } } enum class PipelineTessellationStateCreateFlagBits - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( PipelineTessellationStateCreateFlagBits ) { @@ -6644,7 +7967,8 @@ namespace VULKAN_HPP_NAMESPACE } enum class PipelineVertexInputStateCreateFlagBits - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( PipelineVertexInputStateCreateFlagBits ) { @@ -6652,7 +7976,8 @@ namespace VULKAN_HPP_NAMESPACE } enum class PipelineViewportStateCreateFlagBits - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( PipelineViewportStateCreateFlagBits ) { @@ -6661,7 +7986,7 @@ namespace VULKAN_HPP_NAMESPACE enum class PointClippingBehavior { - eAllClipPlanes = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES, + eAllClipPlanes = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES, eUserClipPlanesOnly = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY }; using PointClippingBehaviorKHR = PointClippingBehavior; @@ -6670,17 +7995,17 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case PointClippingBehavior::eAllClipPlanes : return "AllClipPlanes"; - case PointClippingBehavior::eUserClipPlanesOnly : return "UserClipPlanesOnly"; + case PointClippingBehavior::eAllClipPlanes: return "AllClipPlanes"; + case PointClippingBehavior::eUserClipPlanesOnly: return "UserClipPlanesOnly"; default: return "invalid"; } } enum class PolygonMode { - eFill = VK_POLYGON_MODE_FILL, - eLine = VK_POLYGON_MODE_LINE, - ePoint = VK_POLYGON_MODE_POINT, + eFill = VK_POLYGON_MODE_FILL, + eLine = VK_POLYGON_MODE_LINE, + ePoint = VK_POLYGON_MODE_POINT, eFillRectangleNV = VK_POLYGON_MODE_FILL_RECTANGLE_NV }; @@ -6688,21 +8013,21 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case PolygonMode::eFill : return "Fill"; - case PolygonMode::eLine : return "Line"; - case PolygonMode::ePoint : return "Point"; - case PolygonMode::eFillRectangleNV : return "FillRectangleNV"; + case PolygonMode::eFill: return "Fill"; + case PolygonMode::eLine: return "Line"; + case PolygonMode::ePoint: return "Point"; + case PolygonMode::eFillRectangleNV: return "FillRectangleNV"; default: return "invalid"; } } enum class PresentModeKHR { - eImmediate = VK_PRESENT_MODE_IMMEDIATE_KHR, - eMailbox = VK_PRESENT_MODE_MAILBOX_KHR, - eFifo = VK_PRESENT_MODE_FIFO_KHR, - eFifoRelaxed = VK_PRESENT_MODE_FIFO_RELAXED_KHR, - eSharedDemandRefresh = VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR, + eImmediate = VK_PRESENT_MODE_IMMEDIATE_KHR, + eMailbox = VK_PRESENT_MODE_MAILBOX_KHR, + eFifo = VK_PRESENT_MODE_FIFO_KHR, + eFifoRelaxed = VK_PRESENT_MODE_FIFO_RELAXED_KHR, + eSharedDemandRefresh = VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR, eSharedContinuousRefresh = VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR }; @@ -6710,46 +8035,46 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case PresentModeKHR::eImmediate : return "Immediate"; - case PresentModeKHR::eMailbox : return "Mailbox"; - case PresentModeKHR::eFifo : return "Fifo"; - case PresentModeKHR::eFifoRelaxed : return "FifoRelaxed"; - case PresentModeKHR::eSharedDemandRefresh : return "SharedDemandRefresh"; - case PresentModeKHR::eSharedContinuousRefresh : return "SharedContinuousRefresh"; + case PresentModeKHR::eImmediate: return "Immediate"; + case PresentModeKHR::eMailbox: return "Mailbox"; + case PresentModeKHR::eFifo: return "Fifo"; + case PresentModeKHR::eFifoRelaxed: return "FifoRelaxed"; + case PresentModeKHR::eSharedDemandRefresh: return "SharedDemandRefresh"; + case PresentModeKHR::eSharedContinuousRefresh: return "SharedContinuousRefresh"; default: return "invalid"; } } enum class PrimitiveTopology { - ePointList = VK_PRIMITIVE_TOPOLOGY_POINT_LIST, - eLineList = VK_PRIMITIVE_TOPOLOGY_LINE_LIST, - eLineStrip = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP, - eTriangleList = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST, - eTriangleStrip = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP, - eTriangleFan = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN, - eLineListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY, - eLineStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY, - eTriangleListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY, + ePointList = VK_PRIMITIVE_TOPOLOGY_POINT_LIST, + eLineList = VK_PRIMITIVE_TOPOLOGY_LINE_LIST, + eLineStrip = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP, + eTriangleList = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST, + eTriangleStrip = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP, + eTriangleFan = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN, + eLineListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY, + eLineStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY, + eTriangleListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY, eTriangleStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY, - ePatchList = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST + ePatchList = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST }; VULKAN_HPP_INLINE std::string to_string( PrimitiveTopology value ) { switch ( value ) { - case PrimitiveTopology::ePointList : return "PointList"; - case PrimitiveTopology::eLineList : return "LineList"; - case PrimitiveTopology::eLineStrip : return "LineStrip"; - case PrimitiveTopology::eTriangleList : return "TriangleList"; - case PrimitiveTopology::eTriangleStrip : return "TriangleStrip"; - case PrimitiveTopology::eTriangleFan : return "TriangleFan"; - case PrimitiveTopology::eLineListWithAdjacency : return "LineListWithAdjacency"; - case PrimitiveTopology::eLineStripWithAdjacency : return "LineStripWithAdjacency"; - case PrimitiveTopology::eTriangleListWithAdjacency : return "TriangleListWithAdjacency"; - case PrimitiveTopology::eTriangleStripWithAdjacency : return "TriangleStripWithAdjacency"; - case PrimitiveTopology::ePatchList : return "PatchList"; + case PrimitiveTopology::ePointList: return "PointList"; + case PrimitiveTopology::eLineList: return "LineList"; + case PrimitiveTopology::eLineStrip: return "LineStrip"; + case PrimitiveTopology::eTriangleList: return "TriangleList"; + case PrimitiveTopology::eTriangleStrip: return "TriangleStrip"; + case PrimitiveTopology::eTriangleFan: return "TriangleFan"; + case PrimitiveTopology::eLineListWithAdjacency: return "LineListWithAdjacency"; + case PrimitiveTopology::eLineStripWithAdjacency: return "LineStripWithAdjacency"; + case PrimitiveTopology::eTriangleListWithAdjacency: return "TriangleListWithAdjacency"; + case PrimitiveTopology::eTriangleStripWithAdjacency: return "TriangleStripWithAdjacency"; + case PrimitiveTopology::ePatchList: return "PatchList"; default: return "invalid"; } } @@ -6763,23 +8088,24 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case QueryControlFlagBits::ePrecise : return "Precise"; + case QueryControlFlagBits::ePrecise: return "Precise"; default: return "invalid"; } } enum class QueryPipelineStatisticFlagBits : VkQueryPipelineStatisticFlags { - eInputAssemblyVertices = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT, - eInputAssemblyPrimitives = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT, - eVertexShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT, - eGeometryShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT, - eGeometryShaderPrimitives = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT, - eClippingInvocations = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT, - eClippingPrimitives = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT, - eFragmentShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT, + eInputAssemblyVertices = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT, + eInputAssemblyPrimitives = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT, + eVertexShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT, + eGeometryShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT, + eGeometryShaderPrimitives = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT, + eClippingInvocations = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT, + eClippingPrimitives = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT, + eFragmentShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT, eTessellationControlShaderPatches = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT, - eTessellationEvaluationShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT, + eTessellationEvaluationShaderInvocations = + VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT, eComputeShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT }; @@ -6787,23 +8113,25 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case QueryPipelineStatisticFlagBits::eInputAssemblyVertices : return "InputAssemblyVertices"; - case QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives : return "InputAssemblyPrimitives"; - case QueryPipelineStatisticFlagBits::eVertexShaderInvocations : return "VertexShaderInvocations"; - case QueryPipelineStatisticFlagBits::eGeometryShaderInvocations : return "GeometryShaderInvocations"; - case QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives : return "GeometryShaderPrimitives"; - case QueryPipelineStatisticFlagBits::eClippingInvocations : return "ClippingInvocations"; - case QueryPipelineStatisticFlagBits::eClippingPrimitives : return "ClippingPrimitives"; - case QueryPipelineStatisticFlagBits::eFragmentShaderInvocations : return "FragmentShaderInvocations"; - case QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches : return "TessellationControlShaderPatches"; - case QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations : return "TessellationEvaluationShaderInvocations"; - case QueryPipelineStatisticFlagBits::eComputeShaderInvocations : return "ComputeShaderInvocations"; + case QueryPipelineStatisticFlagBits::eInputAssemblyVertices: return "InputAssemblyVertices"; + case QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives: return "InputAssemblyPrimitives"; + case QueryPipelineStatisticFlagBits::eVertexShaderInvocations: return "VertexShaderInvocations"; + case QueryPipelineStatisticFlagBits::eGeometryShaderInvocations: return "GeometryShaderInvocations"; + case QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives: return "GeometryShaderPrimitives"; + case QueryPipelineStatisticFlagBits::eClippingInvocations: return "ClippingInvocations"; + case QueryPipelineStatisticFlagBits::eClippingPrimitives: return "ClippingPrimitives"; + case QueryPipelineStatisticFlagBits::eFragmentShaderInvocations: return "FragmentShaderInvocations"; + case QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches: return "TessellationControlShaderPatches"; + case QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations: + return "TessellationEvaluationShaderInvocations"; + case QueryPipelineStatisticFlagBits::eComputeShaderInvocations: return "ComputeShaderInvocations"; default: return "invalid"; } } enum class QueryPoolCreateFlagBits - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( QueryPoolCreateFlagBits ) { @@ -6819,87 +8147,87 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case QueryPoolSamplingModeINTEL::eManual : return "Manual"; + case QueryPoolSamplingModeINTEL::eManual: return "Manual"; default: return "invalid"; } } enum class QueryResultFlagBits : VkQueryResultFlags { - e64 = VK_QUERY_RESULT_64_BIT, - eWait = VK_QUERY_RESULT_WAIT_BIT, + e64 = VK_QUERY_RESULT_64_BIT, + eWait = VK_QUERY_RESULT_WAIT_BIT, eWithAvailability = VK_QUERY_RESULT_WITH_AVAILABILITY_BIT, - ePartial = VK_QUERY_RESULT_PARTIAL_BIT + ePartial = VK_QUERY_RESULT_PARTIAL_BIT }; VULKAN_HPP_INLINE std::string to_string( QueryResultFlagBits value ) { switch ( value ) { - case QueryResultFlagBits::e64 : return "64"; - case QueryResultFlagBits::eWait : return "Wait"; - case QueryResultFlagBits::eWithAvailability : return "WithAvailability"; - case QueryResultFlagBits::ePartial : return "Partial"; + case QueryResultFlagBits::e64: return "64"; + case QueryResultFlagBits::eWait: return "Wait"; + case QueryResultFlagBits::eWithAvailability: return "WithAvailability"; + case QueryResultFlagBits::ePartial: return "Partial"; default: return "invalid"; } } enum class QueryType { - eOcclusion = VK_QUERY_TYPE_OCCLUSION, - ePipelineStatistics = VK_QUERY_TYPE_PIPELINE_STATISTICS, - eTimestamp = VK_QUERY_TYPE_TIMESTAMP, - eTransformFeedbackStreamEXT = VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT, - ePerformanceQueryKHR = VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR, - eAccelerationStructureCompactedSizeKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR, + eOcclusion = VK_QUERY_TYPE_OCCLUSION, + ePipelineStatistics = VK_QUERY_TYPE_PIPELINE_STATISTICS, + eTimestamp = VK_QUERY_TYPE_TIMESTAMP, + eTransformFeedbackStreamEXT = VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT, + ePerformanceQueryKHR = VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR, + eAccelerationStructureCompactedSizeKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR, eAccelerationStructureSerializationSizeKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR, - ePerformanceQueryINTEL = VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL, - eAccelerationStructureCompactedSizeNV = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV + ePerformanceQueryINTEL = VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL, + eAccelerationStructureCompactedSizeNV = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV }; VULKAN_HPP_INLINE std::string to_string( QueryType value ) { switch ( value ) { - case QueryType::eOcclusion : return "Occlusion"; - case QueryType::ePipelineStatistics : return "PipelineStatistics"; - case QueryType::eTimestamp : return "Timestamp"; - case QueryType::eTransformFeedbackStreamEXT : return "TransformFeedbackStreamEXT"; - case QueryType::ePerformanceQueryKHR : return "PerformanceQueryKHR"; - case QueryType::eAccelerationStructureCompactedSizeKHR : return "AccelerationStructureCompactedSizeKHR"; - case QueryType::eAccelerationStructureSerializationSizeKHR : return "AccelerationStructureSerializationSizeKHR"; - case QueryType::ePerformanceQueryINTEL : return "PerformanceQueryINTEL"; + case QueryType::eOcclusion: return "Occlusion"; + case QueryType::ePipelineStatistics: return "PipelineStatistics"; + case QueryType::eTimestamp: return "Timestamp"; + case QueryType::eTransformFeedbackStreamEXT: return "TransformFeedbackStreamEXT"; + case QueryType::ePerformanceQueryKHR: return "PerformanceQueryKHR"; + case QueryType::eAccelerationStructureCompactedSizeKHR: return "AccelerationStructureCompactedSizeKHR"; + case QueryType::eAccelerationStructureSerializationSizeKHR: return "AccelerationStructureSerializationSizeKHR"; + case QueryType::ePerformanceQueryINTEL: return "PerformanceQueryINTEL"; default: return "invalid"; } } enum class QueueFlagBits : VkQueueFlags { - eGraphics = VK_QUEUE_GRAPHICS_BIT, - eCompute = VK_QUEUE_COMPUTE_BIT, - eTransfer = VK_QUEUE_TRANSFER_BIT, + eGraphics = VK_QUEUE_GRAPHICS_BIT, + eCompute = VK_QUEUE_COMPUTE_BIT, + eTransfer = VK_QUEUE_TRANSFER_BIT, eSparseBinding = VK_QUEUE_SPARSE_BINDING_BIT, - eProtected = VK_QUEUE_PROTECTED_BIT + eProtected = VK_QUEUE_PROTECTED_BIT }; VULKAN_HPP_INLINE std::string to_string( QueueFlagBits value ) { switch ( value ) { - case QueueFlagBits::eGraphics : return "Graphics"; - case QueueFlagBits::eCompute : return "Compute"; - case QueueFlagBits::eTransfer : return "Transfer"; - case QueueFlagBits::eSparseBinding : return "SparseBinding"; - case QueueFlagBits::eProtected : return "Protected"; + case QueueFlagBits::eGraphics: return "Graphics"; + case QueueFlagBits::eCompute: return "Compute"; + case QueueFlagBits::eTransfer: return "Transfer"; + case QueueFlagBits::eSparseBinding: return "SparseBinding"; + case QueueFlagBits::eProtected: return "Protected"; default: return "invalid"; } } enum class QueueGlobalPriorityEXT { - eLow = VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT, - eMedium = VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT, - eHigh = VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT, + eLow = VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT, + eMedium = VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT, + eHigh = VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT, eRealtime = VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT }; @@ -6907,17 +8235,17 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case QueueGlobalPriorityEXT::eLow : return "Low"; - case QueueGlobalPriorityEXT::eMedium : return "Medium"; - case QueueGlobalPriorityEXT::eHigh : return "High"; - case QueueGlobalPriorityEXT::eRealtime : return "Realtime"; + case QueueGlobalPriorityEXT::eLow: return "Low"; + case QueueGlobalPriorityEXT::eMedium: return "Medium"; + case QueueGlobalPriorityEXT::eHigh: return "High"; + case QueueGlobalPriorityEXT::eRealtime: return "Realtime"; default: return "invalid"; } } enum class RasterizationOrderAMD { - eStrict = VK_RASTERIZATION_ORDER_STRICT_AMD, + eStrict = VK_RASTERIZATION_ORDER_STRICT_AMD, eRelaxed = VK_RASTERIZATION_ORDER_RELAXED_AMD }; @@ -6925,16 +8253,16 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case RasterizationOrderAMD::eStrict : return "Strict"; - case RasterizationOrderAMD::eRelaxed : return "Relaxed"; + case RasterizationOrderAMD::eStrict: return "Strict"; + case RasterizationOrderAMD::eRelaxed: return "Relaxed"; default: return "invalid"; } } enum class RayTracingShaderGroupTypeKHR { - eGeneral = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR, - eTrianglesHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR, + eGeneral = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR, + eTrianglesHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR, eProceduralHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR }; using RayTracingShaderGroupTypeNV = RayTracingShaderGroupTypeKHR; @@ -6943,9 +8271,9 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case RayTracingShaderGroupTypeKHR::eGeneral : return "General"; - case RayTracingShaderGroupTypeKHR::eTrianglesHitGroup : return "TrianglesHitGroup"; - case RayTracingShaderGroupTypeKHR::eProceduralHitGroup : return "ProceduralHitGroup"; + case RayTracingShaderGroupTypeKHR::eGeneral: return "General"; + case RayTracingShaderGroupTypeKHR::eTrianglesHitGroup: return "TrianglesHitGroup"; + case RayTracingShaderGroupTypeKHR::eProceduralHitGroup: return "ProceduralHitGroup"; default: return "invalid"; } } @@ -6959,18 +8287,18 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case RenderPassCreateFlagBits::eTransformQCOM : return "TransformQCOM"; + case RenderPassCreateFlagBits::eTransformQCOM: return "TransformQCOM"; default: return "invalid"; } } enum class ResolveModeFlagBits : VkResolveModeFlags { - eNone = VK_RESOLVE_MODE_NONE, + eNone = VK_RESOLVE_MODE_NONE, eSampleZero = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT, - eAverage = VK_RESOLVE_MODE_AVERAGE_BIT, - eMin = VK_RESOLVE_MODE_MIN_BIT, - eMax = VK_RESOLVE_MODE_MAX_BIT + eAverage = VK_RESOLVE_MODE_AVERAGE_BIT, + eMin = VK_RESOLVE_MODE_MIN_BIT, + eMax = VK_RESOLVE_MODE_MAX_BIT }; using ResolveModeFlagBitsKHR = ResolveModeFlagBits; @@ -6978,117 +8306,117 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case ResolveModeFlagBits::eNone : return "None"; - case ResolveModeFlagBits::eSampleZero : return "SampleZero"; - case ResolveModeFlagBits::eAverage : return "Average"; - case ResolveModeFlagBits::eMin : return "Min"; - case ResolveModeFlagBits::eMax : return "Max"; + case ResolveModeFlagBits::eNone: return "None"; + case ResolveModeFlagBits::eSampleZero: return "SampleZero"; + case ResolveModeFlagBits::eAverage: return "Average"; + case ResolveModeFlagBits::eMin: return "Min"; + case ResolveModeFlagBits::eMax: return "Max"; default: return "invalid"; } } enum class Result { - eSuccess = VK_SUCCESS, - eNotReady = VK_NOT_READY, - eTimeout = VK_TIMEOUT, - eEventSet = VK_EVENT_SET, - eEventReset = VK_EVENT_RESET, - eIncomplete = VK_INCOMPLETE, - eErrorOutOfHostMemory = VK_ERROR_OUT_OF_HOST_MEMORY, - eErrorOutOfDeviceMemory = VK_ERROR_OUT_OF_DEVICE_MEMORY, - eErrorInitializationFailed = VK_ERROR_INITIALIZATION_FAILED, - eErrorDeviceLost = VK_ERROR_DEVICE_LOST, - eErrorMemoryMapFailed = VK_ERROR_MEMORY_MAP_FAILED, - eErrorLayerNotPresent = VK_ERROR_LAYER_NOT_PRESENT, - eErrorExtensionNotPresent = VK_ERROR_EXTENSION_NOT_PRESENT, - eErrorFeatureNotPresent = VK_ERROR_FEATURE_NOT_PRESENT, - eErrorIncompatibleDriver = VK_ERROR_INCOMPATIBLE_DRIVER, - eErrorTooManyObjects = VK_ERROR_TOO_MANY_OBJECTS, - eErrorFormatNotSupported = VK_ERROR_FORMAT_NOT_SUPPORTED, - eErrorFragmentedPool = VK_ERROR_FRAGMENTED_POOL, - eErrorUnknown = VK_ERROR_UNKNOWN, - eErrorOutOfPoolMemory = VK_ERROR_OUT_OF_POOL_MEMORY, - eErrorInvalidExternalHandle = VK_ERROR_INVALID_EXTERNAL_HANDLE, - eErrorFragmentation = VK_ERROR_FRAGMENTATION, - eErrorInvalidOpaqueCaptureAddress = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS, - eErrorSurfaceLostKHR = VK_ERROR_SURFACE_LOST_KHR, - eErrorNativeWindowInUseKHR = VK_ERROR_NATIVE_WINDOW_IN_USE_KHR, - eSuboptimalKHR = VK_SUBOPTIMAL_KHR, - eErrorOutOfDateKHR = VK_ERROR_OUT_OF_DATE_KHR, - eErrorIncompatibleDisplayKHR = VK_ERROR_INCOMPATIBLE_DISPLAY_KHR, - eErrorValidationFailedEXT = VK_ERROR_VALIDATION_FAILED_EXT, - eErrorInvalidShaderNV = VK_ERROR_INVALID_SHADER_NV, - eErrorIncompatibleVersionKHR = VK_ERROR_INCOMPATIBLE_VERSION_KHR, + eSuccess = VK_SUCCESS, + eNotReady = VK_NOT_READY, + eTimeout = VK_TIMEOUT, + eEventSet = VK_EVENT_SET, + eEventReset = VK_EVENT_RESET, + eIncomplete = VK_INCOMPLETE, + eErrorOutOfHostMemory = VK_ERROR_OUT_OF_HOST_MEMORY, + eErrorOutOfDeviceMemory = VK_ERROR_OUT_OF_DEVICE_MEMORY, + eErrorInitializationFailed = VK_ERROR_INITIALIZATION_FAILED, + eErrorDeviceLost = VK_ERROR_DEVICE_LOST, + eErrorMemoryMapFailed = VK_ERROR_MEMORY_MAP_FAILED, + eErrorLayerNotPresent = VK_ERROR_LAYER_NOT_PRESENT, + eErrorExtensionNotPresent = VK_ERROR_EXTENSION_NOT_PRESENT, + eErrorFeatureNotPresent = VK_ERROR_FEATURE_NOT_PRESENT, + eErrorIncompatibleDriver = VK_ERROR_INCOMPATIBLE_DRIVER, + eErrorTooManyObjects = VK_ERROR_TOO_MANY_OBJECTS, + eErrorFormatNotSupported = VK_ERROR_FORMAT_NOT_SUPPORTED, + eErrorFragmentedPool = VK_ERROR_FRAGMENTED_POOL, + eErrorUnknown = VK_ERROR_UNKNOWN, + eErrorOutOfPoolMemory = VK_ERROR_OUT_OF_POOL_MEMORY, + eErrorInvalidExternalHandle = VK_ERROR_INVALID_EXTERNAL_HANDLE, + eErrorFragmentation = VK_ERROR_FRAGMENTATION, + eErrorInvalidOpaqueCaptureAddress = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS, + eErrorSurfaceLostKHR = VK_ERROR_SURFACE_LOST_KHR, + eErrorNativeWindowInUseKHR = VK_ERROR_NATIVE_WINDOW_IN_USE_KHR, + eSuboptimalKHR = VK_SUBOPTIMAL_KHR, + eErrorOutOfDateKHR = VK_ERROR_OUT_OF_DATE_KHR, + eErrorIncompatibleDisplayKHR = VK_ERROR_INCOMPATIBLE_DISPLAY_KHR, + eErrorValidationFailedEXT = VK_ERROR_VALIDATION_FAILED_EXT, + eErrorInvalidShaderNV = VK_ERROR_INVALID_SHADER_NV, + eErrorIncompatibleVersionKHR = VK_ERROR_INCOMPATIBLE_VERSION_KHR, eErrorInvalidDrmFormatModifierPlaneLayoutEXT = VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT, - eErrorNotPermittedEXT = VK_ERROR_NOT_PERMITTED_EXT, - eErrorFullScreenExclusiveModeLostEXT = VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT, - eThreadIdleKHR = VK_THREAD_IDLE_KHR, - eThreadDoneKHR = VK_THREAD_DONE_KHR, - eOperationDeferredKHR = VK_OPERATION_DEFERRED_KHR, - eOperationNotDeferredKHR = VK_OPERATION_NOT_DEFERRED_KHR, - ePipelineCompileRequiredEXT = VK_PIPELINE_COMPILE_REQUIRED_EXT, - eErrorOutOfPoolMemoryKHR = VK_ERROR_OUT_OF_POOL_MEMORY_KHR, - eErrorInvalidExternalHandleKHR = VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR, - eErrorFragmentationEXT = VK_ERROR_FRAGMENTATION_EXT, - eErrorInvalidDeviceAddressEXT = VK_ERROR_INVALID_DEVICE_ADDRESS_EXT, - eErrorInvalidOpaqueCaptureAddressKHR = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR, - eErrorPipelineCompileRequiredEXT = VK_ERROR_PIPELINE_COMPILE_REQUIRED_EXT + eErrorNotPermittedEXT = VK_ERROR_NOT_PERMITTED_EXT, + eErrorFullScreenExclusiveModeLostEXT = VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT, + eThreadIdleKHR = VK_THREAD_IDLE_KHR, + eThreadDoneKHR = VK_THREAD_DONE_KHR, + eOperationDeferredKHR = VK_OPERATION_DEFERRED_KHR, + eOperationNotDeferredKHR = VK_OPERATION_NOT_DEFERRED_KHR, + ePipelineCompileRequiredEXT = VK_PIPELINE_COMPILE_REQUIRED_EXT, + eErrorOutOfPoolMemoryKHR = VK_ERROR_OUT_OF_POOL_MEMORY_KHR, + eErrorInvalidExternalHandleKHR = VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR, + eErrorFragmentationEXT = VK_ERROR_FRAGMENTATION_EXT, + eErrorInvalidDeviceAddressEXT = VK_ERROR_INVALID_DEVICE_ADDRESS_EXT, + eErrorInvalidOpaqueCaptureAddressKHR = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR, + eErrorPipelineCompileRequiredEXT = VK_ERROR_PIPELINE_COMPILE_REQUIRED_EXT }; VULKAN_HPP_INLINE std::string to_string( Result value ) { switch ( value ) { - case Result::eSuccess : return "Success"; - case Result::eNotReady : return "NotReady"; - case Result::eTimeout : return "Timeout"; - case Result::eEventSet : return "EventSet"; - case Result::eEventReset : return "EventReset"; - case Result::eIncomplete : return "Incomplete"; - case Result::eErrorOutOfHostMemory : return "ErrorOutOfHostMemory"; - case Result::eErrorOutOfDeviceMemory : return "ErrorOutOfDeviceMemory"; - case Result::eErrorInitializationFailed : return "ErrorInitializationFailed"; - case Result::eErrorDeviceLost : return "ErrorDeviceLost"; - case Result::eErrorMemoryMapFailed : return "ErrorMemoryMapFailed"; - case Result::eErrorLayerNotPresent : return "ErrorLayerNotPresent"; - case Result::eErrorExtensionNotPresent : return "ErrorExtensionNotPresent"; - case Result::eErrorFeatureNotPresent : return "ErrorFeatureNotPresent"; - case Result::eErrorIncompatibleDriver : return "ErrorIncompatibleDriver"; - case Result::eErrorTooManyObjects : return "ErrorTooManyObjects"; - case Result::eErrorFormatNotSupported : return "ErrorFormatNotSupported"; - case Result::eErrorFragmentedPool : return "ErrorFragmentedPool"; - case Result::eErrorUnknown : return "ErrorUnknown"; - case Result::eErrorOutOfPoolMemory : return "ErrorOutOfPoolMemory"; - case Result::eErrorInvalidExternalHandle : return "ErrorInvalidExternalHandle"; - case Result::eErrorFragmentation : return "ErrorFragmentation"; - case Result::eErrorInvalidOpaqueCaptureAddress : return "ErrorInvalidOpaqueCaptureAddress"; - case Result::eErrorSurfaceLostKHR : return "ErrorSurfaceLostKHR"; - case Result::eErrorNativeWindowInUseKHR : return "ErrorNativeWindowInUseKHR"; - case Result::eSuboptimalKHR : return "SuboptimalKHR"; - case Result::eErrorOutOfDateKHR : return "ErrorOutOfDateKHR"; - case Result::eErrorIncompatibleDisplayKHR : return "ErrorIncompatibleDisplayKHR"; - case Result::eErrorValidationFailedEXT : return "ErrorValidationFailedEXT"; - case Result::eErrorInvalidShaderNV : return "ErrorInvalidShaderNV"; - case Result::eErrorIncompatibleVersionKHR : return "ErrorIncompatibleVersionKHR"; - case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT : return "ErrorInvalidDrmFormatModifierPlaneLayoutEXT"; - case Result::eErrorNotPermittedEXT : return "ErrorNotPermittedEXT"; - case Result::eErrorFullScreenExclusiveModeLostEXT : return "ErrorFullScreenExclusiveModeLostEXT"; - case Result::eThreadIdleKHR : return "ThreadIdleKHR"; - case Result::eThreadDoneKHR : return "ThreadDoneKHR"; - case Result::eOperationDeferredKHR : return "OperationDeferredKHR"; - case Result::eOperationNotDeferredKHR : return "OperationNotDeferredKHR"; - case Result::ePipelineCompileRequiredEXT : return "PipelineCompileRequiredEXT"; + case Result::eSuccess: return "Success"; + case Result::eNotReady: return "NotReady"; + case Result::eTimeout: return "Timeout"; + case Result::eEventSet: return "EventSet"; + case Result::eEventReset: return "EventReset"; + case Result::eIncomplete: return "Incomplete"; + case Result::eErrorOutOfHostMemory: return "ErrorOutOfHostMemory"; + case Result::eErrorOutOfDeviceMemory: return "ErrorOutOfDeviceMemory"; + case Result::eErrorInitializationFailed: return "ErrorInitializationFailed"; + case Result::eErrorDeviceLost: return "ErrorDeviceLost"; + case Result::eErrorMemoryMapFailed: return "ErrorMemoryMapFailed"; + case Result::eErrorLayerNotPresent: return "ErrorLayerNotPresent"; + case Result::eErrorExtensionNotPresent: return "ErrorExtensionNotPresent"; + case Result::eErrorFeatureNotPresent: return "ErrorFeatureNotPresent"; + case Result::eErrorIncompatibleDriver: return "ErrorIncompatibleDriver"; + case Result::eErrorTooManyObjects: return "ErrorTooManyObjects"; + case Result::eErrorFormatNotSupported: return "ErrorFormatNotSupported"; + case Result::eErrorFragmentedPool: return "ErrorFragmentedPool"; + case Result::eErrorUnknown: return "ErrorUnknown"; + case Result::eErrorOutOfPoolMemory: return "ErrorOutOfPoolMemory"; + case Result::eErrorInvalidExternalHandle: return "ErrorInvalidExternalHandle"; + case Result::eErrorFragmentation: return "ErrorFragmentation"; + case Result::eErrorInvalidOpaqueCaptureAddress: return "ErrorInvalidOpaqueCaptureAddress"; + case Result::eErrorSurfaceLostKHR: return "ErrorSurfaceLostKHR"; + case Result::eErrorNativeWindowInUseKHR: return "ErrorNativeWindowInUseKHR"; + case Result::eSuboptimalKHR: return "SuboptimalKHR"; + case Result::eErrorOutOfDateKHR: return "ErrorOutOfDateKHR"; + case Result::eErrorIncompatibleDisplayKHR: return "ErrorIncompatibleDisplayKHR"; + case Result::eErrorValidationFailedEXT: return "ErrorValidationFailedEXT"; + case Result::eErrorInvalidShaderNV: return "ErrorInvalidShaderNV"; + case Result::eErrorIncompatibleVersionKHR: return "ErrorIncompatibleVersionKHR"; + case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT: return "ErrorInvalidDrmFormatModifierPlaneLayoutEXT"; + case Result::eErrorNotPermittedEXT: return "ErrorNotPermittedEXT"; + case Result::eErrorFullScreenExclusiveModeLostEXT: return "ErrorFullScreenExclusiveModeLostEXT"; + case Result::eThreadIdleKHR: return "ThreadIdleKHR"; + case Result::eThreadDoneKHR: return "ThreadDoneKHR"; + case Result::eOperationDeferredKHR: return "OperationDeferredKHR"; + case Result::eOperationNotDeferredKHR: return "OperationNotDeferredKHR"; + case Result::ePipelineCompileRequiredEXT: return "PipelineCompileRequiredEXT"; default: return "invalid"; } } enum class SampleCountFlagBits : VkSampleCountFlags { - e1 = VK_SAMPLE_COUNT_1_BIT, - e2 = VK_SAMPLE_COUNT_2_BIT, - e4 = VK_SAMPLE_COUNT_4_BIT, - e8 = VK_SAMPLE_COUNT_8_BIT, + e1 = VK_SAMPLE_COUNT_1_BIT, + e2 = VK_SAMPLE_COUNT_2_BIT, + e4 = VK_SAMPLE_COUNT_4_BIT, + e8 = VK_SAMPLE_COUNT_8_BIT, e16 = VK_SAMPLE_COUNT_16_BIT, e32 = VK_SAMPLE_COUNT_32_BIT, e64 = VK_SAMPLE_COUNT_64_BIT @@ -7098,24 +8426,24 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case SampleCountFlagBits::e1 : return "1"; - case SampleCountFlagBits::e2 : return "2"; - case SampleCountFlagBits::e4 : return "4"; - case SampleCountFlagBits::e8 : return "8"; - case SampleCountFlagBits::e16 : return "16"; - case SampleCountFlagBits::e32 : return "32"; - case SampleCountFlagBits::e64 : return "64"; + case SampleCountFlagBits::e1: return "1"; + case SampleCountFlagBits::e2: return "2"; + case SampleCountFlagBits::e4: return "4"; + case SampleCountFlagBits::e8: return "8"; + case SampleCountFlagBits::e16: return "16"; + case SampleCountFlagBits::e32: return "32"; + case SampleCountFlagBits::e64: return "64"; default: return "invalid"; } } enum class SamplerAddressMode { - eRepeat = VK_SAMPLER_ADDRESS_MODE_REPEAT, - eMirroredRepeat = VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT, - eClampToEdge = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, - eClampToBorder = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER, - eMirrorClampToEdge = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE, + eRepeat = VK_SAMPLER_ADDRESS_MODE_REPEAT, + eMirroredRepeat = VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT, + eClampToEdge = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, + eClampToBorder = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER, + eMirrorClampToEdge = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE, eMirrorClampToEdgeKHR = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE_KHR }; @@ -7123,18 +8451,18 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case SamplerAddressMode::eRepeat : return "Repeat"; - case SamplerAddressMode::eMirroredRepeat : return "MirroredRepeat"; - case SamplerAddressMode::eClampToEdge : return "ClampToEdge"; - case SamplerAddressMode::eClampToBorder : return "ClampToBorder"; - case SamplerAddressMode::eMirrorClampToEdge : return "MirrorClampToEdge"; + case SamplerAddressMode::eRepeat: return "Repeat"; + case SamplerAddressMode::eMirroredRepeat: return "MirroredRepeat"; + case SamplerAddressMode::eClampToEdge: return "ClampToEdge"; + case SamplerAddressMode::eClampToBorder: return "ClampToBorder"; + case SamplerAddressMode::eMirrorClampToEdge: return "MirrorClampToEdge"; default: return "invalid"; } } enum class SamplerCreateFlagBits : VkSamplerCreateFlags { - eSubsampledEXT = VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT, + eSubsampledEXT = VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT, eSubsampledCoarseReconstructionEXT = VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT }; @@ -7142,8 +8470,8 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case SamplerCreateFlagBits::eSubsampledEXT : return "SubsampledEXT"; - case SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT : return "SubsampledCoarseReconstructionEXT"; + case SamplerCreateFlagBits::eSubsampledEXT: return "SubsampledEXT"; + case SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT: return "SubsampledCoarseReconstructionEXT"; default: return "invalid"; } } @@ -7151,15 +8479,15 @@ namespace VULKAN_HPP_NAMESPACE enum class SamplerMipmapMode { eNearest = VK_SAMPLER_MIPMAP_MODE_NEAREST, - eLinear = VK_SAMPLER_MIPMAP_MODE_LINEAR + eLinear = VK_SAMPLER_MIPMAP_MODE_LINEAR }; VULKAN_HPP_INLINE std::string to_string( SamplerMipmapMode value ) { switch ( value ) { - case SamplerMipmapMode::eNearest : return "Nearest"; - case SamplerMipmapMode::eLinear : return "Linear"; + case SamplerMipmapMode::eNearest: return "Nearest"; + case SamplerMipmapMode::eLinear: return "Linear"; default: return "invalid"; } } @@ -7167,8 +8495,8 @@ namespace VULKAN_HPP_NAMESPACE enum class SamplerReductionMode { eWeightedAverage = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE, - eMin = VK_SAMPLER_REDUCTION_MODE_MIN, - eMax = VK_SAMPLER_REDUCTION_MODE_MAX + eMin = VK_SAMPLER_REDUCTION_MODE_MIN, + eMax = VK_SAMPLER_REDUCTION_MODE_MAX }; using SamplerReductionModeEXT = SamplerReductionMode; @@ -7176,20 +8504,20 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case SamplerReductionMode::eWeightedAverage : return "WeightedAverage"; - case SamplerReductionMode::eMin : return "Min"; - case SamplerReductionMode::eMax : return "Max"; + case SamplerReductionMode::eWeightedAverage: return "WeightedAverage"; + case SamplerReductionMode::eMin: return "Min"; + case SamplerReductionMode::eMax: return "Max"; default: return "invalid"; } } enum class SamplerYcbcrModelConversion { - eRgbIdentity = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY, + eRgbIdentity = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY, eYcbcrIdentity = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY, - eYcbcr709 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709, - eYcbcr601 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601, - eYcbcr2020 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020 + eYcbcr709 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709, + eYcbcr601 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601, + eYcbcr2020 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020 }; using SamplerYcbcrModelConversionKHR = SamplerYcbcrModelConversion; @@ -7197,18 +8525,18 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case SamplerYcbcrModelConversion::eRgbIdentity : return "RgbIdentity"; - case SamplerYcbcrModelConversion::eYcbcrIdentity : return "YcbcrIdentity"; - case SamplerYcbcrModelConversion::eYcbcr709 : return "Ycbcr709"; - case SamplerYcbcrModelConversion::eYcbcr601 : return "Ycbcr601"; - case SamplerYcbcrModelConversion::eYcbcr2020 : return "Ycbcr2020"; + case SamplerYcbcrModelConversion::eRgbIdentity: return "RgbIdentity"; + case SamplerYcbcrModelConversion::eYcbcrIdentity: return "YcbcrIdentity"; + case SamplerYcbcrModelConversion::eYcbcr709: return "Ycbcr709"; + case SamplerYcbcrModelConversion::eYcbcr601: return "Ycbcr601"; + case SamplerYcbcrModelConversion::eYcbcr2020: return "Ycbcr2020"; default: return "invalid"; } } enum class SamplerYcbcrRange { - eItuFull = VK_SAMPLER_YCBCR_RANGE_ITU_FULL, + eItuFull = VK_SAMPLER_YCBCR_RANGE_ITU_FULL, eItuNarrow = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW }; using SamplerYcbcrRangeKHR = SamplerYcbcrRange; @@ -7217,17 +8545,17 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case SamplerYcbcrRange::eItuFull : return "ItuFull"; - case SamplerYcbcrRange::eItuNarrow : return "ItuNarrow"; + case SamplerYcbcrRange::eItuFull: return "ItuFull"; + case SamplerYcbcrRange::eItuNarrow: return "ItuNarrow"; default: return "invalid"; } } enum class ScopeNV { - eDevice = VK_SCOPE_DEVICE_NV, - eWorkgroup = VK_SCOPE_WORKGROUP_NV, - eSubgroup = VK_SCOPE_SUBGROUP_NV, + eDevice = VK_SCOPE_DEVICE_NV, + eWorkgroup = VK_SCOPE_WORKGROUP_NV, + eSubgroup = VK_SCOPE_SUBGROUP_NV, eQueueFamily = VK_SCOPE_QUEUE_FAMILY_NV }; @@ -7235,16 +8563,17 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case ScopeNV::eDevice : return "Device"; - case ScopeNV::eWorkgroup : return "Workgroup"; - case ScopeNV::eSubgroup : return "Subgroup"; - case ScopeNV::eQueueFamily : return "QueueFamily"; + case ScopeNV::eDevice: return "Device"; + case ScopeNV::eWorkgroup: return "Workgroup"; + case ScopeNV::eSubgroup: return "Subgroup"; + case ScopeNV::eQueueFamily: return "QueueFamily"; default: return "invalid"; } } enum class SemaphoreCreateFlagBits : VkSemaphoreCreateFlags - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( SemaphoreCreateFlagBits ) { @@ -7261,14 +8590,14 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case SemaphoreImportFlagBits::eTemporary : return "Temporary"; + case SemaphoreImportFlagBits::eTemporary: return "Temporary"; default: return "invalid"; } } enum class SemaphoreType { - eBinary = VK_SEMAPHORE_TYPE_BINARY, + eBinary = VK_SEMAPHORE_TYPE_BINARY, eTimeline = VK_SEMAPHORE_TYPE_TIMELINE }; using SemaphoreTypeKHR = SemaphoreType; @@ -7277,8 +8606,8 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case SemaphoreType::eBinary : return "Binary"; - case SemaphoreType::eTimeline : return "Timeline"; + case SemaphoreType::eBinary: return "Binary"; + case SemaphoreType::eTimeline: return "Timeline"; default: return "invalid"; } } @@ -7293,13 +8622,14 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case SemaphoreWaitFlagBits::eAny : return "Any"; + case SemaphoreWaitFlagBits::eAny: return "Any"; default: return "invalid"; } } enum class ShaderCorePropertiesFlagBitsAMD : VkShaderCorePropertiesFlagsAMD - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( ShaderCorePropertiesFlagBitsAMD ) { @@ -7309,8 +8639,8 @@ namespace VULKAN_HPP_NAMESPACE enum class ShaderFloatControlsIndependence { e32BitOnly = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY, - eAll = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL, - eNone = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE + eAll = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL, + eNone = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE }; using ShaderFloatControlsIndependenceKHR = ShaderFloatControlsIndependence; @@ -7318,17 +8648,17 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case ShaderFloatControlsIndependence::e32BitOnly : return "32BitOnly"; - case ShaderFloatControlsIndependence::eAll : return "All"; - case ShaderFloatControlsIndependence::eNone : return "None"; + case ShaderFloatControlsIndependence::e32BitOnly: return "32BitOnly"; + case ShaderFloatControlsIndependence::eAll: return "All"; + case ShaderFloatControlsIndependence::eNone: return "None"; default: return "invalid"; } } enum class ShaderInfoTypeAMD { - eStatistics = VK_SHADER_INFO_TYPE_STATISTICS_AMD, - eBinary = VK_SHADER_INFO_TYPE_BINARY_AMD, + eStatistics = VK_SHADER_INFO_TYPE_STATISTICS_AMD, + eBinary = VK_SHADER_INFO_TYPE_BINARY_AMD, eDisassembly = VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD }; @@ -7336,15 +8666,16 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case ShaderInfoTypeAMD::eStatistics : return "Statistics"; - case ShaderInfoTypeAMD::eBinary : return "Binary"; - case ShaderInfoTypeAMD::eDisassembly : return "Disassembly"; + case ShaderInfoTypeAMD::eStatistics: return "Statistics"; + case ShaderInfoTypeAMD::eBinary: return "Binary"; + case ShaderInfoTypeAMD::eDisassembly: return "Disassembly"; default: return "invalid"; } } enum class ShaderModuleCreateFlagBits : VkShaderModuleCreateFlags - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( ShaderModuleCreateFlagBits ) { @@ -7353,62 +8684,62 @@ namespace VULKAN_HPP_NAMESPACE enum class ShaderStageFlagBits : VkShaderStageFlags { - eVertex = VK_SHADER_STAGE_VERTEX_BIT, - eTessellationControl = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, + eVertex = VK_SHADER_STAGE_VERTEX_BIT, + eTessellationControl = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, eTessellationEvaluation = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, - eGeometry = VK_SHADER_STAGE_GEOMETRY_BIT, - eFragment = VK_SHADER_STAGE_FRAGMENT_BIT, - eCompute = VK_SHADER_STAGE_COMPUTE_BIT, - eAllGraphics = VK_SHADER_STAGE_ALL_GRAPHICS, - eAll = VK_SHADER_STAGE_ALL, - eRaygenKHR = VK_SHADER_STAGE_RAYGEN_BIT_KHR, - eAnyHitKHR = VK_SHADER_STAGE_ANY_HIT_BIT_KHR, - eClosestHitKHR = VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR, - eMissKHR = VK_SHADER_STAGE_MISS_BIT_KHR, - eIntersectionKHR = VK_SHADER_STAGE_INTERSECTION_BIT_KHR, - eCallableKHR = VK_SHADER_STAGE_CALLABLE_BIT_KHR, - eTaskNV = VK_SHADER_STAGE_TASK_BIT_NV, - eMeshNV = VK_SHADER_STAGE_MESH_BIT_NV, - eRaygenNV = VK_SHADER_STAGE_RAYGEN_BIT_NV, - eAnyHitNV = VK_SHADER_STAGE_ANY_HIT_BIT_NV, - eClosestHitNV = VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV, - eMissNV = VK_SHADER_STAGE_MISS_BIT_NV, - eIntersectionNV = VK_SHADER_STAGE_INTERSECTION_BIT_NV, - eCallableNV = VK_SHADER_STAGE_CALLABLE_BIT_NV + eGeometry = VK_SHADER_STAGE_GEOMETRY_BIT, + eFragment = VK_SHADER_STAGE_FRAGMENT_BIT, + eCompute = VK_SHADER_STAGE_COMPUTE_BIT, + eAllGraphics = VK_SHADER_STAGE_ALL_GRAPHICS, + eAll = VK_SHADER_STAGE_ALL, + eRaygenKHR = VK_SHADER_STAGE_RAYGEN_BIT_KHR, + eAnyHitKHR = VK_SHADER_STAGE_ANY_HIT_BIT_KHR, + eClosestHitKHR = VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR, + eMissKHR = VK_SHADER_STAGE_MISS_BIT_KHR, + eIntersectionKHR = VK_SHADER_STAGE_INTERSECTION_BIT_KHR, + eCallableKHR = VK_SHADER_STAGE_CALLABLE_BIT_KHR, + eTaskNV = VK_SHADER_STAGE_TASK_BIT_NV, + eMeshNV = VK_SHADER_STAGE_MESH_BIT_NV, + eRaygenNV = VK_SHADER_STAGE_RAYGEN_BIT_NV, + eAnyHitNV = VK_SHADER_STAGE_ANY_HIT_BIT_NV, + eClosestHitNV = VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV, + eMissNV = VK_SHADER_STAGE_MISS_BIT_NV, + eIntersectionNV = VK_SHADER_STAGE_INTERSECTION_BIT_NV, + eCallableNV = VK_SHADER_STAGE_CALLABLE_BIT_NV }; VULKAN_HPP_INLINE std::string to_string( ShaderStageFlagBits value ) { switch ( value ) { - case ShaderStageFlagBits::eVertex : return "Vertex"; - case ShaderStageFlagBits::eTessellationControl : return "TessellationControl"; - case ShaderStageFlagBits::eTessellationEvaluation : return "TessellationEvaluation"; - case ShaderStageFlagBits::eGeometry : return "Geometry"; - case ShaderStageFlagBits::eFragment : return "Fragment"; - case ShaderStageFlagBits::eCompute : return "Compute"; - case ShaderStageFlagBits::eAllGraphics : return "AllGraphics"; - case ShaderStageFlagBits::eAll : return "All"; - case ShaderStageFlagBits::eRaygenKHR : return "RaygenKHR"; - case ShaderStageFlagBits::eAnyHitKHR : return "AnyHitKHR"; - case ShaderStageFlagBits::eClosestHitKHR : return "ClosestHitKHR"; - case ShaderStageFlagBits::eMissKHR : return "MissKHR"; - case ShaderStageFlagBits::eIntersectionKHR : return "IntersectionKHR"; - case ShaderStageFlagBits::eCallableKHR : return "CallableKHR"; - case ShaderStageFlagBits::eTaskNV : return "TaskNV"; - case ShaderStageFlagBits::eMeshNV : return "MeshNV"; + case ShaderStageFlagBits::eVertex: return "Vertex"; + case ShaderStageFlagBits::eTessellationControl: return "TessellationControl"; + case ShaderStageFlagBits::eTessellationEvaluation: return "TessellationEvaluation"; + case ShaderStageFlagBits::eGeometry: return "Geometry"; + case ShaderStageFlagBits::eFragment: return "Fragment"; + case ShaderStageFlagBits::eCompute: return "Compute"; + case ShaderStageFlagBits::eAllGraphics: return "AllGraphics"; + case ShaderStageFlagBits::eAll: return "All"; + case ShaderStageFlagBits::eRaygenKHR: return "RaygenKHR"; + case ShaderStageFlagBits::eAnyHitKHR: return "AnyHitKHR"; + case ShaderStageFlagBits::eClosestHitKHR: return "ClosestHitKHR"; + case ShaderStageFlagBits::eMissKHR: return "MissKHR"; + case ShaderStageFlagBits::eIntersectionKHR: return "IntersectionKHR"; + case ShaderStageFlagBits::eCallableKHR: return "CallableKHR"; + case ShaderStageFlagBits::eTaskNV: return "TaskNV"; + case ShaderStageFlagBits::eMeshNV: return "MeshNV"; default: return "invalid"; } } enum class ShadingRatePaletteEntryNV { - eNoInvocations = VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV, - e16InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_16_INVOCATIONS_PER_PIXEL_NV, - e8InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_8_INVOCATIONS_PER_PIXEL_NV, - e4InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_4_INVOCATIONS_PER_PIXEL_NV, - e2InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_2_INVOCATIONS_PER_PIXEL_NV, - e1InvocationPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV, + eNoInvocations = VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV, + e16InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_16_INVOCATIONS_PER_PIXEL_NV, + e8InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_8_INVOCATIONS_PER_PIXEL_NV, + e4InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_4_INVOCATIONS_PER_PIXEL_NV, + e2InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_2_INVOCATIONS_PER_PIXEL_NV, + e1InvocationPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV, e1InvocationPer2X1Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X1_PIXELS_NV, e1InvocationPer1X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV, e1InvocationPer2X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X2_PIXELS_NV, @@ -7421,25 +8752,25 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case ShadingRatePaletteEntryNV::eNoInvocations : return "NoInvocations"; - case ShadingRatePaletteEntryNV::e16InvocationsPerPixel : return "16InvocationsPerPixel"; - case ShadingRatePaletteEntryNV::e8InvocationsPerPixel : return "8InvocationsPerPixel"; - case ShadingRatePaletteEntryNV::e4InvocationsPerPixel : return "4InvocationsPerPixel"; - case ShadingRatePaletteEntryNV::e2InvocationsPerPixel : return "2InvocationsPerPixel"; - case ShadingRatePaletteEntryNV::e1InvocationPerPixel : return "1InvocationPerPixel"; - case ShadingRatePaletteEntryNV::e1InvocationPer2X1Pixels : return "1InvocationPer2X1Pixels"; - case ShadingRatePaletteEntryNV::e1InvocationPer1X2Pixels : return "1InvocationPer1X2Pixels"; - case ShadingRatePaletteEntryNV::e1InvocationPer2X2Pixels : return "1InvocationPer2X2Pixels"; - case ShadingRatePaletteEntryNV::e1InvocationPer4X2Pixels : return "1InvocationPer4X2Pixels"; - case ShadingRatePaletteEntryNV::e1InvocationPer2X4Pixels : return "1InvocationPer2X4Pixels"; - case ShadingRatePaletteEntryNV::e1InvocationPer4X4Pixels : return "1InvocationPer4X4Pixels"; + case ShadingRatePaletteEntryNV::eNoInvocations: return "NoInvocations"; + case ShadingRatePaletteEntryNV::e16InvocationsPerPixel: return "16InvocationsPerPixel"; + case ShadingRatePaletteEntryNV::e8InvocationsPerPixel: return "8InvocationsPerPixel"; + case ShadingRatePaletteEntryNV::e4InvocationsPerPixel: return "4InvocationsPerPixel"; + case ShadingRatePaletteEntryNV::e2InvocationsPerPixel: return "2InvocationsPerPixel"; + case ShadingRatePaletteEntryNV::e1InvocationPerPixel: return "1InvocationPerPixel"; + case ShadingRatePaletteEntryNV::e1InvocationPer2X1Pixels: return "1InvocationPer2X1Pixels"; + case ShadingRatePaletteEntryNV::e1InvocationPer1X2Pixels: return "1InvocationPer1X2Pixels"; + case ShadingRatePaletteEntryNV::e1InvocationPer2X2Pixels: return "1InvocationPer2X2Pixels"; + case ShadingRatePaletteEntryNV::e1InvocationPer4X2Pixels: return "1InvocationPer4X2Pixels"; + case ShadingRatePaletteEntryNV::e1InvocationPer2X4Pixels: return "1InvocationPer2X4Pixels"; + case ShadingRatePaletteEntryNV::e1InvocationPer4X4Pixels: return "1InvocationPer4X4Pixels"; default: return "invalid"; } } enum class SharingMode { - eExclusive = VK_SHARING_MODE_EXCLUSIVE, + eExclusive = VK_SHARING_MODE_EXCLUSIVE, eConcurrent = VK_SHARING_MODE_CONCURRENT }; @@ -7447,16 +8778,16 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case SharingMode::eExclusive : return "Exclusive"; - case SharingMode::eConcurrent : return "Concurrent"; + case SharingMode::eExclusive: return "Exclusive"; + case SharingMode::eConcurrent: return "Concurrent"; default: return "invalid"; } } enum class SparseImageFormatFlagBits : VkSparseImageFormatFlags { - eSingleMiptail = VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT, - eAlignedMipSize = VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT, + eSingleMiptail = VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT, + eAlignedMipSize = VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT, eNonstandardBlockSize = VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT }; @@ -7464,9 +8795,9 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case SparseImageFormatFlagBits::eSingleMiptail : return "SingleMiptail"; - case SparseImageFormatFlagBits::eAlignedMipSize : return "AlignedMipSize"; - case SparseImageFormatFlagBits::eNonstandardBlockSize : return "NonstandardBlockSize"; + case SparseImageFormatFlagBits::eSingleMiptail: return "SingleMiptail"; + case SparseImageFormatFlagBits::eAlignedMipSize: return "AlignedMipSize"; + case SparseImageFormatFlagBits::eNonstandardBlockSize: return "NonstandardBlockSize"; default: return "invalid"; } } @@ -7480,16 +8811,16 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case SparseMemoryBindFlagBits::eMetadata : return "Metadata"; + case SparseMemoryBindFlagBits::eMetadata: return "Metadata"; default: return "invalid"; } } enum class StencilFaceFlagBits : VkStencilFaceFlags { - eFront = VK_STENCIL_FACE_FRONT_BIT, - eBack = VK_STENCIL_FACE_BACK_BIT, - eFrontAndBack = VK_STENCIL_FACE_FRONT_AND_BACK, + eFront = VK_STENCIL_FACE_FRONT_BIT, + eBack = VK_STENCIL_FACE_BACK_BIT, + eFrontAndBack = VK_STENCIL_FACE_FRONT_AND_BACK, eVkStencilFrontAndBack = VK_STENCIL_FRONT_AND_BACK }; @@ -7497,1057 +8828,1230 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case StencilFaceFlagBits::eFront : return "Front"; - case StencilFaceFlagBits::eBack : return "Back"; - case StencilFaceFlagBits::eFrontAndBack : return "FrontAndBack"; + case StencilFaceFlagBits::eFront: return "Front"; + case StencilFaceFlagBits::eBack: return "Back"; + case StencilFaceFlagBits::eFrontAndBack: return "FrontAndBack"; default: return "invalid"; } } enum class StencilOp { - eKeep = VK_STENCIL_OP_KEEP, - eZero = VK_STENCIL_OP_ZERO, - eReplace = VK_STENCIL_OP_REPLACE, + eKeep = VK_STENCIL_OP_KEEP, + eZero = VK_STENCIL_OP_ZERO, + eReplace = VK_STENCIL_OP_REPLACE, eIncrementAndClamp = VK_STENCIL_OP_INCREMENT_AND_CLAMP, eDecrementAndClamp = VK_STENCIL_OP_DECREMENT_AND_CLAMP, - eInvert = VK_STENCIL_OP_INVERT, - eIncrementAndWrap = VK_STENCIL_OP_INCREMENT_AND_WRAP, - eDecrementAndWrap = VK_STENCIL_OP_DECREMENT_AND_WRAP + eInvert = VK_STENCIL_OP_INVERT, + eIncrementAndWrap = VK_STENCIL_OP_INCREMENT_AND_WRAP, + eDecrementAndWrap = VK_STENCIL_OP_DECREMENT_AND_WRAP }; VULKAN_HPP_INLINE std::string to_string( StencilOp value ) { switch ( value ) { - case StencilOp::eKeep : return "Keep"; - case StencilOp::eZero : return "Zero"; - case StencilOp::eReplace : return "Replace"; - case StencilOp::eIncrementAndClamp : return "IncrementAndClamp"; - case StencilOp::eDecrementAndClamp : return "DecrementAndClamp"; - case StencilOp::eInvert : return "Invert"; - case StencilOp::eIncrementAndWrap : return "IncrementAndWrap"; - case StencilOp::eDecrementAndWrap : return "DecrementAndWrap"; + case StencilOp::eKeep: return "Keep"; + case StencilOp::eZero: return "Zero"; + case StencilOp::eReplace: return "Replace"; + case StencilOp::eIncrementAndClamp: return "IncrementAndClamp"; + case StencilOp::eDecrementAndClamp: return "DecrementAndClamp"; + case StencilOp::eInvert: return "Invert"; + case StencilOp::eIncrementAndWrap: return "IncrementAndWrap"; + case StencilOp::eDecrementAndWrap: return "DecrementAndWrap"; default: return "invalid"; } } enum class StructureType { - eApplicationInfo = VK_STRUCTURE_TYPE_APPLICATION_INFO, - eInstanceCreateInfo = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO, - eDeviceQueueCreateInfo = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, - eDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO, - eSubmitInfo = VK_STRUCTURE_TYPE_SUBMIT_INFO, - eMemoryAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, - eMappedMemoryRange = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE, - eBindSparseInfo = VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, - eFenceCreateInfo = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, - eSemaphoreCreateInfo = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO, - eEventCreateInfo = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO, - eQueryPoolCreateInfo = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO, - eBufferCreateInfo = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO, - eBufferViewCreateInfo = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO, - eImageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, - eImageViewCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO, - eShaderModuleCreateInfo = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO, - ePipelineCacheCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO, - ePipelineShaderStageCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO, - ePipelineVertexInputStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO, - ePipelineInputAssemblyStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, - ePipelineTessellationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO, - ePipelineViewportStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO, - ePipelineRasterizationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO, - ePipelineMultisampleStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO, - ePipelineDepthStencilStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO, - ePipelineColorBlendStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO, - ePipelineDynamicStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO, - eGraphicsPipelineCreateInfo = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO, - eComputePipelineCreateInfo = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO, - ePipelineLayoutCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, - eSamplerCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, - eDescriptorSetLayoutCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, - eDescriptorPoolCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO, - eDescriptorSetAllocateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, - eWriteDescriptorSet = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET, - eCopyDescriptorSet = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET, - eFramebufferCreateInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, - eRenderPassCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, - eCommandPoolCreateInfo = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO, - eCommandBufferAllocateInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, - eCommandBufferInheritanceInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO, - eCommandBufferBeginInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, - eRenderPassBeginInfo = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, - eBufferMemoryBarrier = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER, - eImageMemoryBarrier = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, - eMemoryBarrier = VK_STRUCTURE_TYPE_MEMORY_BARRIER, - eLoaderInstanceCreateInfo = VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO, - eLoaderDeviceCreateInfo = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO, - ePhysicalDeviceSubgroupProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES, - eBindBufferMemoryInfo = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO, - eBindImageMemoryInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO, - ePhysicalDevice16BitStorageFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES, - eMemoryDedicatedRequirements = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS, - eMemoryDedicatedAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO, - eMemoryAllocateFlagsInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO, - eDeviceGroupRenderPassBeginInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO, - eDeviceGroupCommandBufferBeginInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO, - eDeviceGroupSubmitInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO, - eDeviceGroupBindSparseInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO, - eBindBufferMemoryDeviceGroupInfo = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO, - eBindImageMemoryDeviceGroupInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO, - ePhysicalDeviceGroupProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES, - eDeviceGroupDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO, - eBufferMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2, - eImageMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2, - eImageSparseMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2, - eMemoryRequirements2 = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, - eSparseImageMemoryRequirements2 = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2, - ePhysicalDeviceFeatures2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2, - ePhysicalDeviceProperties2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2, - eFormatProperties2 = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, - eImageFormatProperties2 = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2, - ePhysicalDeviceImageFormatInfo2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2, - eQueueFamilyProperties2 = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2, - ePhysicalDeviceMemoryProperties2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2, - eSparseImageFormatProperties2 = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2, - ePhysicalDeviceSparseImageFormatInfo2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2, - ePhysicalDevicePointClippingProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES, + eApplicationInfo = VK_STRUCTURE_TYPE_APPLICATION_INFO, + eInstanceCreateInfo = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO, + eDeviceQueueCreateInfo = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, + eDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO, + eSubmitInfo = VK_STRUCTURE_TYPE_SUBMIT_INFO, + eMemoryAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, + eMappedMemoryRange = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE, + eBindSparseInfo = VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, + eFenceCreateInfo = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, + eSemaphoreCreateInfo = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO, + eEventCreateInfo = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO, + eQueryPoolCreateInfo = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO, + eBufferCreateInfo = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO, + eBufferViewCreateInfo = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO, + eImageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, + eImageViewCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO, + eShaderModuleCreateInfo = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO, + ePipelineCacheCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO, + ePipelineShaderStageCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO, + ePipelineVertexInputStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO, + ePipelineInputAssemblyStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, + ePipelineTessellationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO, + ePipelineViewportStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO, + ePipelineRasterizationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO, + ePipelineMultisampleStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO, + ePipelineDepthStencilStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO, + ePipelineColorBlendStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO, + ePipelineDynamicStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO, + eGraphicsPipelineCreateInfo = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO, + eComputePipelineCreateInfo = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO, + ePipelineLayoutCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, + eSamplerCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, + eDescriptorSetLayoutCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, + eDescriptorPoolCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO, + eDescriptorSetAllocateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, + eWriteDescriptorSet = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET, + eCopyDescriptorSet = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET, + eFramebufferCreateInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, + eRenderPassCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, + eCommandPoolCreateInfo = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO, + eCommandBufferAllocateInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, + eCommandBufferInheritanceInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO, + eCommandBufferBeginInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, + eRenderPassBeginInfo = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, + eBufferMemoryBarrier = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER, + eImageMemoryBarrier = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, + eMemoryBarrier = VK_STRUCTURE_TYPE_MEMORY_BARRIER, + eLoaderInstanceCreateInfo = VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO, + eLoaderDeviceCreateInfo = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO, + ePhysicalDeviceSubgroupProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES, + eBindBufferMemoryInfo = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO, + eBindImageMemoryInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO, + ePhysicalDevice16BitStorageFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES, + eMemoryDedicatedRequirements = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS, + eMemoryDedicatedAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO, + eMemoryAllocateFlagsInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO, + eDeviceGroupRenderPassBeginInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO, + eDeviceGroupCommandBufferBeginInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO, + eDeviceGroupSubmitInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO, + eDeviceGroupBindSparseInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO, + eBindBufferMemoryDeviceGroupInfo = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO, + eBindImageMemoryDeviceGroupInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO, + ePhysicalDeviceGroupProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES, + eDeviceGroupDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO, + eBufferMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2, + eImageMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2, + eImageSparseMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2, + eMemoryRequirements2 = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, + eSparseImageMemoryRequirements2 = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2, + ePhysicalDeviceFeatures2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2, + ePhysicalDeviceProperties2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2, + eFormatProperties2 = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, + eImageFormatProperties2 = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2, + ePhysicalDeviceImageFormatInfo2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2, + eQueueFamilyProperties2 = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2, + ePhysicalDeviceMemoryProperties2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2, + eSparseImageFormatProperties2 = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2, + ePhysicalDeviceSparseImageFormatInfo2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2, + ePhysicalDevicePointClippingProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES, eRenderPassInputAttachmentAspectCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO, - eImageViewUsageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO, - ePipelineTessellationDomainOriginStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO, - eRenderPassMultiviewCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO, - ePhysicalDeviceMultiviewFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES, - ePhysicalDeviceMultiviewProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES, - ePhysicalDeviceVariablePointersFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES, - eProtectedSubmitInfo = VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO, - ePhysicalDeviceProtectedMemoryFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES, - ePhysicalDeviceProtectedMemoryProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES, - eDeviceQueueInfo2 = VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2, - eSamplerYcbcrConversionCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO, - eSamplerYcbcrConversionInfo = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO, - eBindImagePlaneMemoryInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO, - eImagePlaneMemoryRequirementsInfo = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO, + eImageViewUsageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO, + ePipelineTessellationDomainOriginStateCreateInfo = + VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO, + eRenderPassMultiviewCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO, + ePhysicalDeviceMultiviewFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES, + ePhysicalDeviceMultiviewProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES, + ePhysicalDeviceVariablePointersFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES, + eProtectedSubmitInfo = VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO, + ePhysicalDeviceProtectedMemoryFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES, + ePhysicalDeviceProtectedMemoryProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES, + eDeviceQueueInfo2 = VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2, + eSamplerYcbcrConversionCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO, + eSamplerYcbcrConversionInfo = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO, + eBindImagePlaneMemoryInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO, + eImagePlaneMemoryRequirementsInfo = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO, ePhysicalDeviceSamplerYcbcrConversionFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES, - eSamplerYcbcrConversionImageFormatProperties = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES, - eDescriptorUpdateTemplateCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO, - ePhysicalDeviceExternalImageFormatInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO, - eExternalImageFormatProperties = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES, - ePhysicalDeviceExternalBufferInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO, - eExternalBufferProperties = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES, - ePhysicalDeviceIdProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES, - eExternalMemoryBufferCreateInfo = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO, - eExternalMemoryImageCreateInfo = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, - eExportMemoryAllocateInfo = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO, - ePhysicalDeviceExternalFenceInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO, - eExternalFenceProperties = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES, - eExportFenceCreateInfo = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO, - eExportSemaphoreCreateInfo = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO, - ePhysicalDeviceExternalSemaphoreInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO, - eExternalSemaphoreProperties = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES, - ePhysicalDeviceMaintenance3Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES, - eDescriptorSetLayoutSupport = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT, - ePhysicalDeviceShaderDrawParametersFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES, - ePhysicalDeviceVulkan11Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES, - ePhysicalDeviceVulkan11Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES, - ePhysicalDeviceVulkan12Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES, - ePhysicalDeviceVulkan12Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES, - eImageFormatListCreateInfo = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO, - eAttachmentDescription2 = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2, - eAttachmentReference2 = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2, - eSubpassDescription2 = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2, - eSubpassDependency2 = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2, - eRenderPassCreateInfo2 = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2, - eSubpassBeginInfo = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO, - eSubpassEndInfo = VK_STRUCTURE_TYPE_SUBPASS_END_INFO, - ePhysicalDevice8BitStorageFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES, - ePhysicalDeviceDriverProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES, - ePhysicalDeviceShaderAtomicInt64Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES, - ePhysicalDeviceShaderFloat16Int8Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES, - ePhysicalDeviceFloatControlsProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES, - eDescriptorSetLayoutBindingFlagsCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO, - ePhysicalDeviceDescriptorIndexingFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES, - ePhysicalDeviceDescriptorIndexingProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES, - eDescriptorSetVariableDescriptorCountAllocateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO, - eDescriptorSetVariableDescriptorCountLayoutSupport = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT, + eSamplerYcbcrConversionImageFormatProperties = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES, + eDescriptorUpdateTemplateCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO, + ePhysicalDeviceExternalImageFormatInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO, + eExternalImageFormatProperties = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES, + ePhysicalDeviceExternalBufferInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO, + eExternalBufferProperties = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES, + ePhysicalDeviceIdProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES, + eExternalMemoryBufferCreateInfo = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO, + eExternalMemoryImageCreateInfo = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, + eExportMemoryAllocateInfo = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO, + ePhysicalDeviceExternalFenceInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO, + eExternalFenceProperties = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES, + eExportFenceCreateInfo = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO, + eExportSemaphoreCreateInfo = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO, + ePhysicalDeviceExternalSemaphoreInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO, + eExternalSemaphoreProperties = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES, + ePhysicalDeviceMaintenance3Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES, + eDescriptorSetLayoutSupport = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT, + ePhysicalDeviceShaderDrawParametersFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES, + ePhysicalDeviceVulkan11Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES, + ePhysicalDeviceVulkan11Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES, + ePhysicalDeviceVulkan12Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES, + ePhysicalDeviceVulkan12Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES, + eImageFormatListCreateInfo = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO, + eAttachmentDescription2 = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2, + eAttachmentReference2 = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2, + eSubpassDescription2 = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2, + eSubpassDependency2 = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2, + eRenderPassCreateInfo2 = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2, + eSubpassBeginInfo = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO, + eSubpassEndInfo = VK_STRUCTURE_TYPE_SUBPASS_END_INFO, + ePhysicalDevice8BitStorageFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES, + ePhysicalDeviceDriverProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES, + ePhysicalDeviceShaderAtomicInt64Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES, + ePhysicalDeviceShaderFloat16Int8Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES, + ePhysicalDeviceFloatControlsProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES, + eDescriptorSetLayoutBindingFlagsCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO, + ePhysicalDeviceDescriptorIndexingFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES, + ePhysicalDeviceDescriptorIndexingProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES, + eDescriptorSetVariableDescriptorCountAllocateInfo = + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO, + eDescriptorSetVariableDescriptorCountLayoutSupport = + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT, ePhysicalDeviceDepthStencilResolveProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES, - eSubpassDescriptionDepthStencilResolve = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE, - ePhysicalDeviceScalarBlockLayoutFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES, - eImageStencilUsageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO, + eSubpassDescriptionDepthStencilResolve = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE, + ePhysicalDeviceScalarBlockLayoutFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES, + eImageStencilUsageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO, ePhysicalDeviceSamplerFilterMinmaxProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES, - eSamplerReductionModeCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO, - ePhysicalDeviceVulkanMemoryModelFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES, - ePhysicalDeviceImagelessFramebufferFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES, - eFramebufferAttachmentsCreateInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO, - eFramebufferAttachmentImageInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO, - eRenderPassAttachmentBeginInfo = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO, - ePhysicalDeviceUniformBufferStandardLayoutFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES, - ePhysicalDeviceShaderSubgroupExtendedTypesFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES, - ePhysicalDeviceSeparateDepthStencilLayoutsFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES, - eAttachmentReferenceStencilLayout = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT, - eAttachmentDescriptionStencilLayout = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT, - ePhysicalDeviceHostQueryResetFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES, - ePhysicalDeviceTimelineSemaphoreFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES, + eSamplerReductionModeCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO, + ePhysicalDeviceVulkanMemoryModelFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES, + ePhysicalDeviceImagelessFramebufferFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES, + eFramebufferAttachmentsCreateInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO, + eFramebufferAttachmentImageInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO, + eRenderPassAttachmentBeginInfo = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO, + ePhysicalDeviceUniformBufferStandardLayoutFeatures = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES, + ePhysicalDeviceShaderSubgroupExtendedTypesFeatures = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES, + ePhysicalDeviceSeparateDepthStencilLayoutsFeatures = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES, + eAttachmentReferenceStencilLayout = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT, + eAttachmentDescriptionStencilLayout = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT, + ePhysicalDeviceHostQueryResetFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES, + ePhysicalDeviceTimelineSemaphoreFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES, ePhysicalDeviceTimelineSemaphoreProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES, - eSemaphoreTypeCreateInfo = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO, - eTimelineSemaphoreSubmitInfo = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO, - eSemaphoreWaitInfo = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO, - eSemaphoreSignalInfo = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO, + eSemaphoreTypeCreateInfo = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO, + eTimelineSemaphoreSubmitInfo = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO, + eSemaphoreWaitInfo = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO, + eSemaphoreSignalInfo = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO, ePhysicalDeviceBufferDeviceAddressFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES, - eBufferDeviceAddressInfo = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO, - eBufferOpaqueCaptureAddressCreateInfo = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO, - eMemoryOpaqueCaptureAddressAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO, - eDeviceMemoryOpaqueCaptureAddressInfo = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO, - eSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR, - ePresentInfoKHR = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR, - eDeviceGroupPresentCapabilitiesKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR, - eImageSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR, - eBindImageMemorySwapchainInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR, - eAcquireNextImageInfoKHR = VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR, - eDeviceGroupPresentInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR, - eDeviceGroupSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR, - eDisplayModeCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR, - eDisplaySurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR, - eDisplayPresentInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR, - eXlibSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR, - eXcbSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR, - eWaylandSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR, - eAndroidSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR, - eWin32SurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR, - eDebugReportCallbackCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT, - ePipelineRasterizationStateRasterizationOrderAMD = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD, - eDebugMarkerObjectNameInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT, - eDebugMarkerObjectTagInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT, - eDebugMarkerMarkerInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT, - eDedicatedAllocationImageCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV, - eDedicatedAllocationBufferCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV, - eDedicatedAllocationMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV, - ePhysicalDeviceTransformFeedbackFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT, + eBufferDeviceAddressInfo = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO, + eBufferOpaqueCaptureAddressCreateInfo = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO, + eMemoryOpaqueCaptureAddressAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO, + eDeviceMemoryOpaqueCaptureAddressInfo = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO, + eSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR, + ePresentInfoKHR = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR, + eDeviceGroupPresentCapabilitiesKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR, + eImageSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR, + eBindImageMemorySwapchainInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR, + eAcquireNextImageInfoKHR = VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR, + eDeviceGroupPresentInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR, + eDeviceGroupSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR, + eDisplayModeCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR, + eDisplaySurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR, + eDisplayPresentInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR, + eXlibSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR, + eXcbSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR, + eWaylandSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR, + eAndroidSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR, + eWin32SurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR, + eDebugReportCallbackCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT, + ePipelineRasterizationStateRasterizationOrderAMD = + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD, + eDebugMarkerObjectNameInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT, + eDebugMarkerObjectTagInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT, + eDebugMarkerMarkerInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT, + eDedicatedAllocationImageCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV, + eDedicatedAllocationBufferCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV, + eDedicatedAllocationMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV, + ePhysicalDeviceTransformFeedbackFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT, ePhysicalDeviceTransformFeedbackPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT, - ePipelineRasterizationStateStreamCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT, - eImageViewHandleInfoNVX = VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX, - eImageViewAddressPropertiesNVX = VK_STRUCTURE_TYPE_IMAGE_VIEW_ADDRESS_PROPERTIES_NVX, - eTextureLodGatherFormatPropertiesAMD = VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD, - eStreamDescriptorSurfaceCreateInfoGGP = VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP, + ePipelineRasterizationStateStreamCreateInfoEXT = + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT, + eImageViewHandleInfoNVX = VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX, + eImageViewAddressPropertiesNVX = VK_STRUCTURE_TYPE_IMAGE_VIEW_ADDRESS_PROPERTIES_NVX, + eTextureLodGatherFormatPropertiesAMD = VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD, + eStreamDescriptorSurfaceCreateInfoGGP = VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP, ePhysicalDeviceCornerSampledImageFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV, - eExternalMemoryImageCreateInfoNV = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV, - eExportMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV, - eImportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV, - eExportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV, - eWin32KeyedMutexAcquireReleaseInfoNV = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV, - eValidationFlagsEXT = VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT, - eViSurfaceCreateInfoNN = VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN, - ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT, - eImageViewAstcDecodeModeEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT, - ePhysicalDeviceAstcDecodeFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT, - eImportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR, - eExportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR, - eMemoryWin32HandlePropertiesKHR = VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR, - eMemoryGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR, - eImportMemoryFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR, - eMemoryFdPropertiesKHR = VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR, - eMemoryGetFdInfoKHR = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR, - eWin32KeyedMutexAcquireReleaseInfoKHR = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR, - eImportSemaphoreWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR, - eExportSemaphoreWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR, - eD3D12FenceSubmitInfoKHR = VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR, - eSemaphoreGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR, - eImportSemaphoreFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR, - eSemaphoreGetFdInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR, + eExternalMemoryImageCreateInfoNV = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV, + eExportMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV, + eImportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV, + eExportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV, + eWin32KeyedMutexAcquireReleaseInfoNV = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV, + eValidationFlagsEXT = VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT, + eViSurfaceCreateInfoNN = VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN, + ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT, + eImageViewAstcDecodeModeEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT, + ePhysicalDeviceAstcDecodeFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT, + eImportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR, + eExportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR, + eMemoryWin32HandlePropertiesKHR = VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR, + eMemoryGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR, + eImportMemoryFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR, + eMemoryFdPropertiesKHR = VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR, + eMemoryGetFdInfoKHR = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR, + eWin32KeyedMutexAcquireReleaseInfoKHR = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR, + eImportSemaphoreWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR, + eExportSemaphoreWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR, + eD3D12FenceSubmitInfoKHR = VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR, + eSemaphoreGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR, + eImportSemaphoreFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR, + eSemaphoreGetFdInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR, ePhysicalDevicePushDescriptorPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR, - eCommandBufferInheritanceConditionalRenderingInfoEXT = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT, - ePhysicalDeviceConditionalRenderingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT, - eConditionalRenderingBeginInfoEXT = VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT, - ePresentRegionsKHR = VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR, + eCommandBufferInheritanceConditionalRenderingInfoEXT = + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT, + ePhysicalDeviceConditionalRenderingFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT, + eConditionalRenderingBeginInfoEXT = VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT, + ePresentRegionsKHR = VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR, ePipelineViewportWScalingStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV, - eSurfaceCapabilities2EXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT, - eDisplayPowerInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT, - eDeviceEventInfoEXT = VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT, - eDisplayEventInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT, - eSwapchainCounterCreateInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT, - ePresentTimesInfoGOOGLE = VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE, - ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX, - ePipelineViewportSwizzleStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV, + eSurfaceCapabilities2EXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT, + eDisplayPowerInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT, + eDeviceEventInfoEXT = VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT, + eDisplayEventInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT, + eSwapchainCounterCreateInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT, + ePresentTimesInfoGOOGLE = VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE, + ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX, + ePipelineViewportSwizzleStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV, ePhysicalDeviceDiscardRectanglePropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT, - ePipelineDiscardRectangleStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT, - ePhysicalDeviceConservativeRasterizationPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT, - ePipelineRasterizationConservativeStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT, + ePipelineDiscardRectangleStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT, + ePhysicalDeviceConservativeRasterizationPropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT, + ePipelineRasterizationConservativeStateCreateInfoEXT = + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT, ePhysicalDeviceDepthClipEnableFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT, - ePipelineRasterizationDepthClipStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT, - eHdrMetadataEXT = VK_STRUCTURE_TYPE_HDR_METADATA_EXT, - eSharedPresentSurfaceCapabilitiesKHR = VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR, - eImportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR, - eExportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR, - eFenceGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR, - eImportFenceFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR, - eFenceGetFdInfoKHR = VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR, - ePhysicalDevicePerformanceQueryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR, - ePhysicalDevicePerformanceQueryPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR, - eQueryPoolPerformanceCreateInfoKHR = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR, - ePerformanceQuerySubmitInfoKHR = VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR, - eAcquireProfilingLockInfoKHR = VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR, - ePerformanceCounterKHR = VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR, - ePerformanceCounterDescriptionKHR = VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR, - ePhysicalDeviceSurfaceInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR, - eSurfaceCapabilities2KHR = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR, - eSurfaceFormat2KHR = VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR, - eDisplayProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR, - eDisplayPlaneProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR, - eDisplayModeProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR, - eDisplayPlaneInfo2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR, - eDisplayPlaneCapabilities2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR, - eIosSurfaceCreateInfoMVK = VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK, - eMacosSurfaceCreateInfoMVK = VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK, - eDebugUtilsObjectNameInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT, - eDebugUtilsObjectTagInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT, - eDebugUtilsLabelEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT, - eDebugUtilsMessengerCallbackDataEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT, - eDebugUtilsMessengerCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT, - eAndroidHardwareBufferUsageANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID, - eAndroidHardwareBufferPropertiesANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID, + ePipelineRasterizationDepthClipStateCreateInfoEXT = + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT, + eHdrMetadataEXT = VK_STRUCTURE_TYPE_HDR_METADATA_EXT, + eSharedPresentSurfaceCapabilitiesKHR = VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR, + eImportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR, + eExportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR, + eFenceGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR, + eImportFenceFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR, + eFenceGetFdInfoKHR = VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR, + ePhysicalDevicePerformanceQueryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR, + ePhysicalDevicePerformanceQueryPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR, + eQueryPoolPerformanceCreateInfoKHR = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR, + ePerformanceQuerySubmitInfoKHR = VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR, + eAcquireProfilingLockInfoKHR = VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR, + ePerformanceCounterKHR = VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR, + ePerformanceCounterDescriptionKHR = VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR, + ePhysicalDeviceSurfaceInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR, + eSurfaceCapabilities2KHR = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR, + eSurfaceFormat2KHR = VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR, + eDisplayProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR, + eDisplayPlaneProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR, + eDisplayModeProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR, + eDisplayPlaneInfo2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR, + eDisplayPlaneCapabilities2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR, + eIosSurfaceCreateInfoMVK = VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK, + eMacosSurfaceCreateInfoMVK = VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK, + eDebugUtilsObjectNameInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT, + eDebugUtilsObjectTagInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT, + eDebugUtilsLabelEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT, + eDebugUtilsMessengerCallbackDataEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT, + eDebugUtilsMessengerCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT, + eAndroidHardwareBufferUsageANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID, + eAndroidHardwareBufferPropertiesANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID, eAndroidHardwareBufferFormatPropertiesANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID, - eImportAndroidHardwareBufferInfoANDROID = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID, - eMemoryGetAndroidHardwareBufferInfoANDROID = VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID, - eExternalFormatANDROID = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID, - ePhysicalDeviceInlineUniformBlockFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT, - ePhysicalDeviceInlineUniformBlockPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT, + eImportAndroidHardwareBufferInfoANDROID = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID, + eMemoryGetAndroidHardwareBufferInfoANDROID = VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID, + eExternalFormatANDROID = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID, + ePhysicalDeviceInlineUniformBlockFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT, + ePhysicalDeviceInlineUniformBlockPropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT, eWriteDescriptorSetInlineUniformBlockEXT = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT, - eDescriptorPoolInlineUniformBlockCreateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT, - eSampleLocationsInfoEXT = VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT, - eRenderPassSampleLocationsBeginInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT, - ePipelineSampleLocationsStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT, + eDescriptorPoolInlineUniformBlockCreateInfoEXT = + VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT, + eSampleLocationsInfoEXT = VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT, + eRenderPassSampleLocationsBeginInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT, + ePipelineSampleLocationsStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT, ePhysicalDeviceSampleLocationsPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT, - eMultisamplePropertiesEXT = VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT, - ePhysicalDeviceBlendOperationAdvancedFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT, - ePhysicalDeviceBlendOperationAdvancedPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT, - ePipelineColorBlendAdvancedStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT, - ePipelineCoverageToColorStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV, - eBindAccelerationStructureMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_KHR, + eMultisamplePropertiesEXT = VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT, + ePhysicalDeviceBlendOperationAdvancedFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT, + ePhysicalDeviceBlendOperationAdvancedPropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT, + ePipelineColorBlendAdvancedStateCreateInfoEXT = + VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT, + ePipelineCoverageToColorStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV, + eBindAccelerationStructureMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_KHR, eWriteDescriptorSetAccelerationStructureKHR = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR, - eAccelerationStructureBuildGeometryInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR, - eAccelerationStructureCreateGeometryTypeInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_GEOMETRY_TYPE_INFO_KHR, + eAccelerationStructureBuildGeometryInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR, + eAccelerationStructureCreateGeometryTypeInfoKHR = + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_GEOMETRY_TYPE_INFO_KHR, eAccelerationStructureDeviceAddressInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR, eAccelerationStructureGeometryAabbsDataKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR, - eAccelerationStructureGeometryInstancesDataKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR, - eAccelerationStructureGeometryTrianglesDataKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR, + eAccelerationStructureGeometryInstancesDataKHR = + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR, + eAccelerationStructureGeometryTrianglesDataKHR = + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR, eAccelerationStructureGeometryKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_KHR, - eAccelerationStructureMemoryRequirementsInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR, - eAccelerationStructureVersionKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_VERSION_KHR, - eCopyAccelerationStructureInfoKHR = VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_INFO_KHR, - eCopyAccelerationStructureToMemoryInfoKHR = VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR, - eCopyMemoryToAccelerationStructureInfoKHR = VK_STRUCTURE_TYPE_COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR, - ePhysicalDeviceRayTracingFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_FEATURES_KHR, - ePhysicalDeviceRayTracingPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_KHR, - eRayTracingPipelineCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR, - eRayTracingShaderGroupCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR, - eAccelerationStructureCreateInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_KHR, - eRayTracingPipelineInterfaceCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR, + eAccelerationStructureMemoryRequirementsInfoKHR = + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR, + eAccelerationStructureVersionKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_VERSION_KHR, + eCopyAccelerationStructureInfoKHR = VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_INFO_KHR, + eCopyAccelerationStructureToMemoryInfoKHR = VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR, + eCopyMemoryToAccelerationStructureInfoKHR = VK_STRUCTURE_TYPE_COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR, + ePhysicalDeviceRayTracingFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_FEATURES_KHR, + ePhysicalDeviceRayTracingPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_KHR, + eRayTracingPipelineCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR, + eRayTracingShaderGroupCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR, + eAccelerationStructureCreateInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_KHR, + eRayTracingPipelineInterfaceCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR, ePipelineCoverageModulationStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV, - ePhysicalDeviceShaderSmBuiltinsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV, - ePhysicalDeviceShaderSmBuiltinsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV, - eDrmFormatModifierPropertiesListEXT = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT, - eDrmFormatModifierPropertiesEXT = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT, + ePhysicalDeviceShaderSmBuiltinsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV, + ePhysicalDeviceShaderSmBuiltinsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV, + eDrmFormatModifierPropertiesListEXT = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT, + eDrmFormatModifierPropertiesEXT = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT, ePhysicalDeviceImageDrmFormatModifierInfoEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT, - eImageDrmFormatModifierListCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT, + eImageDrmFormatModifierListCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT, eImageDrmFormatModifierExplicitCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT, - eImageDrmFormatModifierPropertiesEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT, - eValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT, - eShaderModuleValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT, - ePipelineViewportShadingRateImageStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV, - ePhysicalDeviceShadingRateImageFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV, + eImageDrmFormatModifierPropertiesEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT, + eValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT, + eShaderModuleValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT, + ePipelineViewportShadingRateImageStateCreateInfoNV = + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV, + ePhysicalDeviceShadingRateImageFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV, ePhysicalDeviceShadingRateImagePropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV, - ePipelineViewportCoarseSampleOrderStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV, - eRayTracingPipelineCreateInfoNV = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV, + ePipelineViewportCoarseSampleOrderStateCreateInfoNV = + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV, + eRayTracingPipelineCreateInfoNV = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV, eAccelerationStructureCreateInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV, - eGeometryNV = VK_STRUCTURE_TYPE_GEOMETRY_NV, - eGeometryTrianglesNV = VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV, - eGeometryAabbNV = VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV, - eAccelerationStructureMemoryRequirementsInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV, + eGeometryNV = VK_STRUCTURE_TYPE_GEOMETRY_NV, + eGeometryTrianglesNV = VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV, + eGeometryAabbNV = VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV, + eAccelerationStructureMemoryRequirementsInfoNV = + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV, ePhysicalDeviceRayTracingPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV, - eRayTracingShaderGroupCreateInfoNV = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV, - eAccelerationStructureInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV, - ePhysicalDeviceRepresentativeFragmentTestFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV, - ePipelineRepresentativeFragmentTestStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV, + eRayTracingShaderGroupCreateInfoNV = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV, + eAccelerationStructureInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV, + ePhysicalDeviceRepresentativeFragmentTestFeaturesNV = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV, + ePipelineRepresentativeFragmentTestStateCreateInfoNV = + VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV, ePhysicalDeviceImageViewImageFormatInfoEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT, - eFilterCubicImageViewImageFormatPropertiesEXT = VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT, + eFilterCubicImageViewImageFormatPropertiesEXT = + VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT, eDeviceQueueGlobalPriorityCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT, - eImportMemoryHostPointerInfoEXT = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT, - eMemoryHostPointerPropertiesEXT = VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT, - ePhysicalDeviceExternalMemoryHostPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT, - ePhysicalDeviceShaderClockFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR, - ePipelineCompilerControlCreateInfoAMD = VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD, - eCalibratedTimestampInfoEXT = VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT, - ePhysicalDeviceShaderCorePropertiesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD, + eImportMemoryHostPointerInfoEXT = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT, + eMemoryHostPointerPropertiesEXT = VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT, + ePhysicalDeviceExternalMemoryHostPropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT, + ePhysicalDeviceShaderClockFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR, + ePipelineCompilerControlCreateInfoAMD = VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD, + eCalibratedTimestampInfoEXT = VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT, + ePhysicalDeviceShaderCorePropertiesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD, eDeviceMemoryOverallocationCreateInfoAMD = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD, - ePhysicalDeviceVertexAttributeDivisorPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT, - ePipelineVertexInputDivisorStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT, - ePhysicalDeviceVertexAttributeDivisorFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT, - ePresentFrameTokenGGP = VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP, + ePhysicalDeviceVertexAttributeDivisorPropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT, + ePipelineVertexInputDivisorStateCreateInfoEXT = + VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT, + ePhysicalDeviceVertexAttributeDivisorFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT, + ePresentFrameTokenGGP = VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP, ePipelineCreationFeedbackCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT, - ePhysicalDeviceComputeShaderDerivativesFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV, - ePhysicalDeviceMeshShaderFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV, + ePhysicalDeviceComputeShaderDerivativesFeaturesNV = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV, + ePhysicalDeviceMeshShaderFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV, ePhysicalDeviceMeshShaderPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV, - ePhysicalDeviceFragmentShaderBarycentricFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV, - ePhysicalDeviceShaderImageFootprintFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV, - ePipelineViewportExclusiveScissorStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV, + ePhysicalDeviceFragmentShaderBarycentricFeaturesNV = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV, + ePhysicalDeviceShaderImageFootprintFeaturesNV = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV, + ePipelineViewportExclusiveScissorStateCreateInfoNV = + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV, ePhysicalDeviceExclusiveScissorFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV, - eCheckpointDataNV = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV, - eQueueFamilyCheckpointPropertiesNV = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV, - ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL, - eQueryPoolPerformanceQueryCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL, - eInitializePerformanceApiInfoINTEL = VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL, - ePerformanceMarkerInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL, - ePerformanceStreamMarkerInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL, - ePerformanceOverrideInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL, - ePerformanceConfigurationAcquireInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL, - ePhysicalDevicePciBusInfoPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT, - eDisplayNativeHdrSurfaceCapabilitiesAMD = VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD, - eSwapchainDisplayNativeHdrCreateInfoAMD = VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD, - eImagepipeSurfaceCreateInfoFUCHSIA = VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA, - eMetalSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT, + eCheckpointDataNV = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV, + eQueueFamilyCheckpointPropertiesNV = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV, + ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL, + eQueryPoolPerformanceQueryCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL, + eInitializePerformanceApiInfoINTEL = VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL, + ePerformanceMarkerInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL, + ePerformanceStreamMarkerInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL, + ePerformanceOverrideInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL, + ePerformanceConfigurationAcquireInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL, + ePhysicalDevicePciBusInfoPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT, + eDisplayNativeHdrSurfaceCapabilitiesAMD = VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD, + eSwapchainDisplayNativeHdrCreateInfoAMD = VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD, + eImagepipeSurfaceCreateInfoFUCHSIA = VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA, + eMetalSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT, ePhysicalDeviceFragmentDensityMapFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT, - ePhysicalDeviceFragmentDensityMapPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT, + ePhysicalDeviceFragmentDensityMapPropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT, eRenderPassFragmentDensityMapCreateInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT, - ePhysicalDeviceSubgroupSizeControlPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT, - ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT, - ePhysicalDeviceSubgroupSizeControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT, - ePhysicalDeviceShaderCoreProperties2AMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD, + ePhysicalDeviceSubgroupSizeControlPropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT, + ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = + VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT, + ePhysicalDeviceSubgroupSizeControlFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT, + ePhysicalDeviceShaderCoreProperties2AMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD, ePhysicalDeviceCoherentMemoryFeaturesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD, ePhysicalDeviceMemoryBudgetPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT, ePhysicalDeviceMemoryPriorityFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT, - eMemoryPriorityAllocateInfoEXT = VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT, - eSurfaceProtectedCapabilitiesKHR = VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR, - ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV, - ePhysicalDeviceBufferDeviceAddressFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT, - eBufferDeviceAddressCreateInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT, - ePhysicalDeviceToolPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT, - eValidationFeaturesEXT = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT, - ePhysicalDeviceCooperativeMatrixFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV, - eCooperativeMatrixPropertiesNV = VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV, + eMemoryPriorityAllocateInfoEXT = VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT, + eSurfaceProtectedCapabilitiesKHR = VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR, + ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV, + ePhysicalDeviceBufferDeviceAddressFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT, + eBufferDeviceAddressCreateInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT, + ePhysicalDeviceToolPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT, + eValidationFeaturesEXT = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT, + ePhysicalDeviceCooperativeMatrixFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV, + eCooperativeMatrixPropertiesNV = VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV, ePhysicalDeviceCooperativeMatrixPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV, - ePhysicalDeviceCoverageReductionModeFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV, + ePhysicalDeviceCoverageReductionModeFeaturesNV = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV, ePipelineCoverageReductionStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV, - eFramebufferMixedSamplesCombinationNV = VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV, - ePhysicalDeviceFragmentShaderInterlockFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT, - ePhysicalDeviceYcbcrImageArraysFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT, - eSurfaceFullScreenExclusiveInfoEXT = VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT, - eSurfaceCapabilitiesFullScreenExclusiveEXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT, - eSurfaceFullScreenExclusiveWin32InfoEXT = VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT, - eHeadlessSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT, - ePhysicalDeviceLineRasterizationFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT, - ePipelineRasterizationLineStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT, + eFramebufferMixedSamplesCombinationNV = VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV, + ePhysicalDeviceFragmentShaderInterlockFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT, + ePhysicalDeviceYcbcrImageArraysFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT, + eSurfaceFullScreenExclusiveInfoEXT = VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT, + eSurfaceCapabilitiesFullScreenExclusiveEXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT, + eSurfaceFullScreenExclusiveWin32InfoEXT = VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT, + eHeadlessSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT, + ePhysicalDeviceLineRasterizationFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT, + ePipelineRasterizationLineStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT, ePhysicalDeviceLineRasterizationPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT, - ePhysicalDeviceIndexTypeUint8FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT, - eDeferredOperationInfoKHR = VK_STRUCTURE_TYPE_DEFERRED_OPERATION_INFO_KHR, - ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR, - ePipelineInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR, - ePipelineExecutablePropertiesKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR, - ePipelineExecutableInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR, - ePipelineExecutableStatisticKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR, + ePhysicalDeviceIndexTypeUint8FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT, + eDeferredOperationInfoKHR = VK_STRUCTURE_TYPE_DEFERRED_OPERATION_INFO_KHR, + ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR, + ePipelineInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR, + ePipelineExecutablePropertiesKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR, + ePipelineExecutableInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR, + ePipelineExecutableStatisticKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR, ePipelineExecutableInternalRepresentationKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR, - ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT, - ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV, - eGraphicsShaderGroupCreateInfoNV = VK_STRUCTURE_TYPE_GRAPHICS_SHADER_GROUP_CREATE_INFO_NV, - eGraphicsPipelineShaderGroupsCreateInfoNV = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV, - eIndirectCommandsLayoutTokenNV = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_NV, - eIndirectCommandsLayoutCreateInfoNV = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NV, - eGeneratedCommandsInfoNV = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_NV, + ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT, + ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV, + eGraphicsShaderGroupCreateInfoNV = VK_STRUCTURE_TYPE_GRAPHICS_SHADER_GROUP_CREATE_INFO_NV, + eGraphicsPipelineShaderGroupsCreateInfoNV = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV, + eIndirectCommandsLayoutTokenNV = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_NV, + eIndirectCommandsLayoutCreateInfoNV = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NV, + eGeneratedCommandsInfoNV = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_NV, eGeneratedCommandsMemoryRequirementsInfoNV = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_NV, - ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV, - ePhysicalDeviceTexelBufferAlignmentFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT, - ePhysicalDeviceTexelBufferAlignmentPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT, - eCommandBufferInheritanceRenderPassTransformInfoQCOM = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM, + ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV, + ePhysicalDeviceTexelBufferAlignmentFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT, + ePhysicalDeviceTexelBufferAlignmentPropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT, + eCommandBufferInheritanceRenderPassTransformInfoQCOM = + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM, eRenderPassTransformBeginInfoQCOM = VK_STRUCTURE_TYPE_RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM, - ePipelineLibraryCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_LIBRARY_CREATE_INFO_KHR, - ePhysicalDevicePipelineCreationCacheControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES_EXT, - ePhysicalDeviceDiagnosticsConfigFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV, - eDeviceDiagnosticsConfigCreateInfoNV = VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV, - ePhysicalDeviceVariablePointerFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES, - ePhysicalDeviceShaderDrawParameterFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES, - eDebugReportCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT, - eRenderPassMultiviewCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR, - ePhysicalDeviceMultiviewFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR, - ePhysicalDeviceMultiviewPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR, - ePhysicalDeviceFeatures2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR, - ePhysicalDeviceProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR, - eFormatProperties2KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2_KHR, - eImageFormatProperties2KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR, - ePhysicalDeviceImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR, - eQueueFamilyProperties2KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR, - ePhysicalDeviceMemoryProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR, - eSparseImageFormatProperties2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2_KHR, - ePhysicalDeviceSparseImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2_KHR, - eMemoryAllocateFlagsInfoKHR = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR, - eDeviceGroupRenderPassBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO_KHR, - eDeviceGroupCommandBufferBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHR, - eDeviceGroupSubmitInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO_KHR, - eDeviceGroupBindSparseInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO_KHR, - eBindBufferMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO_KHR, - eBindImageMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR, - ePhysicalDeviceGroupPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR, - eDeviceGroupDeviceCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHR, - ePhysicalDeviceExternalImageFormatInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR, - eExternalImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR, - ePhysicalDeviceExternalBufferInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR, - eExternalBufferPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES_KHR, - ePhysicalDeviceIdPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR, - eExternalMemoryBufferCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR, - eExternalMemoryImageCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR, - eExportMemoryAllocateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR, - ePhysicalDeviceExternalSemaphoreInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR, - eExternalSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR, - eExportSemaphoreCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR, + ePipelineLibraryCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_LIBRARY_CREATE_INFO_KHR, + ePhysicalDevicePipelineCreationCacheControlFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES_EXT, + ePhysicalDeviceDiagnosticsConfigFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV, + eDeviceDiagnosticsConfigCreateInfoNV = VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV, + ePhysicalDeviceVariablePointerFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES, + ePhysicalDeviceShaderDrawParameterFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES, + eDebugReportCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT, + eRenderPassMultiviewCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR, + ePhysicalDeviceMultiviewFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR, + ePhysicalDeviceMultiviewPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR, + ePhysicalDeviceFeatures2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR, + ePhysicalDeviceProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR, + eFormatProperties2KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2_KHR, + eImageFormatProperties2KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR, + ePhysicalDeviceImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR, + eQueueFamilyProperties2KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR, + ePhysicalDeviceMemoryProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR, + eSparseImageFormatProperties2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2_KHR, + ePhysicalDeviceSparseImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2_KHR, + eMemoryAllocateFlagsInfoKHR = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR, + eDeviceGroupRenderPassBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO_KHR, + eDeviceGroupCommandBufferBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHR, + eDeviceGroupSubmitInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO_KHR, + eDeviceGroupBindSparseInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO_KHR, + eBindBufferMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO_KHR, + eBindImageMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR, + ePhysicalDeviceGroupPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR, + eDeviceGroupDeviceCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHR, + ePhysicalDeviceExternalImageFormatInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR, + eExternalImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR, + ePhysicalDeviceExternalBufferInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR, + eExternalBufferPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES_KHR, + ePhysicalDeviceIdPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR, + eExternalMemoryBufferCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR, + eExternalMemoryImageCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR, + eExportMemoryAllocateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR, + ePhysicalDeviceExternalSemaphoreInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR, + eExternalSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR, + eExportSemaphoreCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR, ePhysicalDeviceShaderFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR, - ePhysicalDeviceFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR, - ePhysicalDevice16BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR, - eDescriptorUpdateTemplateCreateInfoKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR, - ePhysicalDeviceImagelessFramebufferFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR, - eFramebufferAttachmentsCreateInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR, - eFramebufferAttachmentImageInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR, - eRenderPassAttachmentBeginInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR, - eAttachmentDescription2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR, - eAttachmentReference2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR, - eSubpassDescription2KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR, - eSubpassDependency2KHR = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR, - eRenderPassCreateInfo2KHR = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR, - eSubpassBeginInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR, - eSubpassEndInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR, - ePhysicalDeviceExternalFenceInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR, - eExternalFencePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR, - eExportFenceCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR, + ePhysicalDeviceFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR, + ePhysicalDevice16BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR, + eDescriptorUpdateTemplateCreateInfoKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR, + ePhysicalDeviceImagelessFramebufferFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR, + eFramebufferAttachmentsCreateInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR, + eFramebufferAttachmentImageInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR, + eRenderPassAttachmentBeginInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR, + eAttachmentDescription2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR, + eAttachmentReference2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR, + eSubpassDescription2KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR, + eSubpassDependency2KHR = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR, + eRenderPassCreateInfo2KHR = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR, + eSubpassBeginInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR, + eSubpassEndInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR, + ePhysicalDeviceExternalFenceInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR, + eExternalFencePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR, + eExportFenceCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR, ePhysicalDevicePointClippingPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES_KHR, - eRenderPassInputAttachmentAspectCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO_KHR, + eRenderPassInputAttachmentAspectCreateInfoKHR = + VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO_KHR, eImageViewUsageCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR, - ePipelineTessellationDomainOriginStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO_KHR, - ePhysicalDeviceVariablePointerFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR, + ePipelineTessellationDomainOriginStateCreateInfoKHR = + VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO_KHR, + ePhysicalDeviceVariablePointerFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR, ePhysicalDeviceVariablePointersFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR, - eMemoryDedicatedRequirementsKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR, - eMemoryDedicatedAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR, - ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT, - eSamplerReductionModeCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT, - eBufferMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR, - eImageMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR, + eMemoryDedicatedRequirementsKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR, + eMemoryDedicatedAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR, + ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT, + eSamplerReductionModeCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT, + eBufferMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR, + eImageMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR, eImageSparseMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2_KHR, - eMemoryRequirements2KHR = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR, - eSparseImageMemoryRequirements2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR, - eImageFormatListCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR, - eSamplerYcbcrConversionCreateInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR, - eSamplerYcbcrConversionInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR, - eBindImagePlaneMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR, - eImagePlaneMemoryRequirementsInfoKHR = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO_KHR, - ePhysicalDeviceSamplerYcbcrConversionFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR, - eSamplerYcbcrConversionImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR, + eMemoryRequirements2KHR = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR, + eSparseImageMemoryRequirements2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR, + eImageFormatListCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR, + eSamplerYcbcrConversionCreateInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR, + eSamplerYcbcrConversionInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR, + eBindImagePlaneMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR, + eImagePlaneMemoryRequirementsInfoKHR = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO_KHR, + ePhysicalDeviceSamplerYcbcrConversionFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR, + eSamplerYcbcrConversionImageFormatPropertiesKHR = + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR, eBindBufferMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR, - eBindImageMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR, - eDescriptorSetLayoutBindingFlagsCreateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT, + eBindImageMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR, + eDescriptorSetLayoutBindingFlagsCreateInfoEXT = + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT, ePhysicalDeviceDescriptorIndexingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT, - ePhysicalDeviceDescriptorIndexingPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT, - eDescriptorSetVariableDescriptorCountAllocateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT, - eDescriptorSetVariableDescriptorCountLayoutSupportEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT, - eBindAccelerationStructureMemoryInfoNV = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV, + ePhysicalDeviceDescriptorIndexingPropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT, + eDescriptorSetVariableDescriptorCountAllocateInfoEXT = + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT, + eDescriptorSetVariableDescriptorCountLayoutSupportEXT = + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT, + eBindAccelerationStructureMemoryInfoNV = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV, eWriteDescriptorSetAccelerationStructureNV = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV, - ePhysicalDeviceMaintenance3PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR, - eDescriptorSetLayoutSupportKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR, - ePhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR, - ePhysicalDevice8BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR, + ePhysicalDeviceMaintenance3PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR, + eDescriptorSetLayoutSupportKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR, + ePhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR, + ePhysicalDevice8BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR, ePhysicalDeviceShaderAtomicInt64FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR, - ePhysicalDeviceDriverPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR, - ePhysicalDeviceFloatControlsPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR, - ePhysicalDeviceDepthStencilResolvePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR, - eSubpassDescriptionDepthStencilResolveKHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR, - ePhysicalDeviceTimelineSemaphoreFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR, + ePhysicalDeviceDriverPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR, + ePhysicalDeviceFloatControlsPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR, + ePhysicalDeviceDepthStencilResolvePropertiesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR, + eSubpassDescriptionDepthStencilResolveKHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR, + ePhysicalDeviceTimelineSemaphoreFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR, ePhysicalDeviceTimelineSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR, - eSemaphoreTypeCreateInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR, - eTimelineSemaphoreSubmitInfoKHR = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR, - eSemaphoreWaitInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR, - eSemaphoreSignalInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR, - eQueryPoolCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL, - ePhysicalDeviceVulkanMemoryModelFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR, - ePhysicalDeviceScalarBlockLayoutFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT, - ePhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR, - eAttachmentReferenceStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR, - eAttachmentDescriptionStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR, + eSemaphoreTypeCreateInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR, + eTimelineSemaphoreSubmitInfoKHR = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR, + eSemaphoreWaitInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR, + eSemaphoreSignalInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR, + eQueryPoolCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL, + ePhysicalDeviceVulkanMemoryModelFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR, + ePhysicalDeviceScalarBlockLayoutFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT, + ePhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR, + eAttachmentReferenceStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR, + eAttachmentDescriptionStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR, ePhysicalDeviceBufferAddressFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_ADDRESS_FEATURES_EXT, - eBufferDeviceAddressInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT, - eImageStencilUsageCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT, - ePhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR, - ePhysicalDeviceBufferDeviceAddressFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR, - eBufferDeviceAddressInfoKHR = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR, - eBufferOpaqueCaptureAddressCreateInfoKHR = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR, + eBufferDeviceAddressInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT, + eImageStencilUsageCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT, + ePhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR, + ePhysicalDeviceBufferDeviceAddressFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR, + eBufferDeviceAddressInfoKHR = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR, + eBufferOpaqueCaptureAddressCreateInfoKHR = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR, eMemoryOpaqueCaptureAddressAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR, - eDeviceMemoryOpaqueCaptureAddressInfoKHR = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR, - ePhysicalDeviceHostQueryResetFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT + eDeviceMemoryOpaqueCaptureAddressInfoKHR = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR, + ePhysicalDeviceHostQueryResetFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT }; VULKAN_HPP_INLINE std::string to_string( StructureType value ) { switch ( value ) { - case StructureType::eApplicationInfo : return "ApplicationInfo"; - case StructureType::eInstanceCreateInfo : return "InstanceCreateInfo"; - case StructureType::eDeviceQueueCreateInfo : return "DeviceQueueCreateInfo"; - case StructureType::eDeviceCreateInfo : return "DeviceCreateInfo"; - case StructureType::eSubmitInfo : return "SubmitInfo"; - case StructureType::eMemoryAllocateInfo : return "MemoryAllocateInfo"; - case StructureType::eMappedMemoryRange : return "MappedMemoryRange"; - case StructureType::eBindSparseInfo : return "BindSparseInfo"; - case StructureType::eFenceCreateInfo : return "FenceCreateInfo"; - case StructureType::eSemaphoreCreateInfo : return "SemaphoreCreateInfo"; - case StructureType::eEventCreateInfo : return "EventCreateInfo"; - case StructureType::eQueryPoolCreateInfo : return "QueryPoolCreateInfo"; - case StructureType::eBufferCreateInfo : return "BufferCreateInfo"; - case StructureType::eBufferViewCreateInfo : return "BufferViewCreateInfo"; - case StructureType::eImageCreateInfo : return "ImageCreateInfo"; - case StructureType::eImageViewCreateInfo : return "ImageViewCreateInfo"; - case StructureType::eShaderModuleCreateInfo : return "ShaderModuleCreateInfo"; - case StructureType::ePipelineCacheCreateInfo : return "PipelineCacheCreateInfo"; - case StructureType::ePipelineShaderStageCreateInfo : return "PipelineShaderStageCreateInfo"; - case StructureType::ePipelineVertexInputStateCreateInfo : return "PipelineVertexInputStateCreateInfo"; - case StructureType::ePipelineInputAssemblyStateCreateInfo : return "PipelineInputAssemblyStateCreateInfo"; - case StructureType::ePipelineTessellationStateCreateInfo : return "PipelineTessellationStateCreateInfo"; - case StructureType::ePipelineViewportStateCreateInfo : return "PipelineViewportStateCreateInfo"; - case StructureType::ePipelineRasterizationStateCreateInfo : return "PipelineRasterizationStateCreateInfo"; - case StructureType::ePipelineMultisampleStateCreateInfo : return "PipelineMultisampleStateCreateInfo"; - case StructureType::ePipelineDepthStencilStateCreateInfo : return "PipelineDepthStencilStateCreateInfo"; - case StructureType::ePipelineColorBlendStateCreateInfo : return "PipelineColorBlendStateCreateInfo"; - case StructureType::ePipelineDynamicStateCreateInfo : return "PipelineDynamicStateCreateInfo"; - case StructureType::eGraphicsPipelineCreateInfo : return "GraphicsPipelineCreateInfo"; - case StructureType::eComputePipelineCreateInfo : return "ComputePipelineCreateInfo"; - case StructureType::ePipelineLayoutCreateInfo : return "PipelineLayoutCreateInfo"; - case StructureType::eSamplerCreateInfo : return "SamplerCreateInfo"; - case StructureType::eDescriptorSetLayoutCreateInfo : return "DescriptorSetLayoutCreateInfo"; - case StructureType::eDescriptorPoolCreateInfo : return "DescriptorPoolCreateInfo"; - case StructureType::eDescriptorSetAllocateInfo : return "DescriptorSetAllocateInfo"; - case StructureType::eWriteDescriptorSet : return "WriteDescriptorSet"; - case StructureType::eCopyDescriptorSet : return "CopyDescriptorSet"; - case StructureType::eFramebufferCreateInfo : return "FramebufferCreateInfo"; - case StructureType::eRenderPassCreateInfo : return "RenderPassCreateInfo"; - case StructureType::eCommandPoolCreateInfo : return "CommandPoolCreateInfo"; - case StructureType::eCommandBufferAllocateInfo : return "CommandBufferAllocateInfo"; - case StructureType::eCommandBufferInheritanceInfo : return "CommandBufferInheritanceInfo"; - case StructureType::eCommandBufferBeginInfo : return "CommandBufferBeginInfo"; - case StructureType::eRenderPassBeginInfo : return "RenderPassBeginInfo"; - case StructureType::eBufferMemoryBarrier : return "BufferMemoryBarrier"; - case StructureType::eImageMemoryBarrier : return "ImageMemoryBarrier"; - case StructureType::eMemoryBarrier : return "MemoryBarrier"; - case StructureType::eLoaderInstanceCreateInfo : return "LoaderInstanceCreateInfo"; - case StructureType::eLoaderDeviceCreateInfo : return "LoaderDeviceCreateInfo"; - case StructureType::ePhysicalDeviceSubgroupProperties : return "PhysicalDeviceSubgroupProperties"; - case StructureType::eBindBufferMemoryInfo : return "BindBufferMemoryInfo"; - case StructureType::eBindImageMemoryInfo : return "BindImageMemoryInfo"; - case StructureType::ePhysicalDevice16BitStorageFeatures : return "PhysicalDevice16BitStorageFeatures"; - case StructureType::eMemoryDedicatedRequirements : return "MemoryDedicatedRequirements"; - case StructureType::eMemoryDedicatedAllocateInfo : return "MemoryDedicatedAllocateInfo"; - case StructureType::eMemoryAllocateFlagsInfo : return "MemoryAllocateFlagsInfo"; - case StructureType::eDeviceGroupRenderPassBeginInfo : return "DeviceGroupRenderPassBeginInfo"; - case StructureType::eDeviceGroupCommandBufferBeginInfo : return "DeviceGroupCommandBufferBeginInfo"; - case StructureType::eDeviceGroupSubmitInfo : return "DeviceGroupSubmitInfo"; - case StructureType::eDeviceGroupBindSparseInfo : return "DeviceGroupBindSparseInfo"; - case StructureType::eBindBufferMemoryDeviceGroupInfo : return "BindBufferMemoryDeviceGroupInfo"; - case StructureType::eBindImageMemoryDeviceGroupInfo : return "BindImageMemoryDeviceGroupInfo"; - case StructureType::ePhysicalDeviceGroupProperties : return "PhysicalDeviceGroupProperties"; - case StructureType::eDeviceGroupDeviceCreateInfo : return "DeviceGroupDeviceCreateInfo"; - case StructureType::eBufferMemoryRequirementsInfo2 : return "BufferMemoryRequirementsInfo2"; - case StructureType::eImageMemoryRequirementsInfo2 : return "ImageMemoryRequirementsInfo2"; - case StructureType::eImageSparseMemoryRequirementsInfo2 : return "ImageSparseMemoryRequirementsInfo2"; - case StructureType::eMemoryRequirements2 : return "MemoryRequirements2"; - case StructureType::eSparseImageMemoryRequirements2 : return "SparseImageMemoryRequirements2"; - case StructureType::ePhysicalDeviceFeatures2 : return "PhysicalDeviceFeatures2"; - case StructureType::ePhysicalDeviceProperties2 : return "PhysicalDeviceProperties2"; - case StructureType::eFormatProperties2 : return "FormatProperties2"; - case StructureType::eImageFormatProperties2 : return "ImageFormatProperties2"; - case StructureType::ePhysicalDeviceImageFormatInfo2 : return "PhysicalDeviceImageFormatInfo2"; - case StructureType::eQueueFamilyProperties2 : return "QueueFamilyProperties2"; - case StructureType::ePhysicalDeviceMemoryProperties2 : return "PhysicalDeviceMemoryProperties2"; - case StructureType::eSparseImageFormatProperties2 : return "SparseImageFormatProperties2"; - case StructureType::ePhysicalDeviceSparseImageFormatInfo2 : return "PhysicalDeviceSparseImageFormatInfo2"; - case StructureType::ePhysicalDevicePointClippingProperties : return "PhysicalDevicePointClippingProperties"; - case StructureType::eRenderPassInputAttachmentAspectCreateInfo : return "RenderPassInputAttachmentAspectCreateInfo"; - case StructureType::eImageViewUsageCreateInfo : return "ImageViewUsageCreateInfo"; - case StructureType::ePipelineTessellationDomainOriginStateCreateInfo : return "PipelineTessellationDomainOriginStateCreateInfo"; - case StructureType::eRenderPassMultiviewCreateInfo : return "RenderPassMultiviewCreateInfo"; - case StructureType::ePhysicalDeviceMultiviewFeatures : return "PhysicalDeviceMultiviewFeatures"; - case StructureType::ePhysicalDeviceMultiviewProperties : return "PhysicalDeviceMultiviewProperties"; - case StructureType::ePhysicalDeviceVariablePointersFeatures : return "PhysicalDeviceVariablePointersFeatures"; - case StructureType::eProtectedSubmitInfo : return "ProtectedSubmitInfo"; - case StructureType::ePhysicalDeviceProtectedMemoryFeatures : return "PhysicalDeviceProtectedMemoryFeatures"; - case StructureType::ePhysicalDeviceProtectedMemoryProperties : return "PhysicalDeviceProtectedMemoryProperties"; - case StructureType::eDeviceQueueInfo2 : return "DeviceQueueInfo2"; - case StructureType::eSamplerYcbcrConversionCreateInfo : return "SamplerYcbcrConversionCreateInfo"; - case StructureType::eSamplerYcbcrConversionInfo : return "SamplerYcbcrConversionInfo"; - case StructureType::eBindImagePlaneMemoryInfo : return "BindImagePlaneMemoryInfo"; - case StructureType::eImagePlaneMemoryRequirementsInfo : return "ImagePlaneMemoryRequirementsInfo"; - case StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures : return "PhysicalDeviceSamplerYcbcrConversionFeatures"; - case StructureType::eSamplerYcbcrConversionImageFormatProperties : return "SamplerYcbcrConversionImageFormatProperties"; - case StructureType::eDescriptorUpdateTemplateCreateInfo : return "DescriptorUpdateTemplateCreateInfo"; - case StructureType::ePhysicalDeviceExternalImageFormatInfo : return "PhysicalDeviceExternalImageFormatInfo"; - case StructureType::eExternalImageFormatProperties : return "ExternalImageFormatProperties"; - case StructureType::ePhysicalDeviceExternalBufferInfo : return "PhysicalDeviceExternalBufferInfo"; - case StructureType::eExternalBufferProperties : return "ExternalBufferProperties"; - case StructureType::ePhysicalDeviceIdProperties : return "PhysicalDeviceIdProperties"; - case StructureType::eExternalMemoryBufferCreateInfo : return "ExternalMemoryBufferCreateInfo"; - case StructureType::eExternalMemoryImageCreateInfo : return "ExternalMemoryImageCreateInfo"; - case StructureType::eExportMemoryAllocateInfo : return "ExportMemoryAllocateInfo"; - case StructureType::ePhysicalDeviceExternalFenceInfo : return "PhysicalDeviceExternalFenceInfo"; - case StructureType::eExternalFenceProperties : return "ExternalFenceProperties"; - case StructureType::eExportFenceCreateInfo : return "ExportFenceCreateInfo"; - case StructureType::eExportSemaphoreCreateInfo : return "ExportSemaphoreCreateInfo"; - case StructureType::ePhysicalDeviceExternalSemaphoreInfo : return "PhysicalDeviceExternalSemaphoreInfo"; - case StructureType::eExternalSemaphoreProperties : return "ExternalSemaphoreProperties"; - case StructureType::ePhysicalDeviceMaintenance3Properties : return "PhysicalDeviceMaintenance3Properties"; - case StructureType::eDescriptorSetLayoutSupport : return "DescriptorSetLayoutSupport"; - case StructureType::ePhysicalDeviceShaderDrawParametersFeatures : return "PhysicalDeviceShaderDrawParametersFeatures"; - case StructureType::ePhysicalDeviceVulkan11Features : return "PhysicalDeviceVulkan11Features"; - case StructureType::ePhysicalDeviceVulkan11Properties : return "PhysicalDeviceVulkan11Properties"; - case StructureType::ePhysicalDeviceVulkan12Features : return "PhysicalDeviceVulkan12Features"; - case StructureType::ePhysicalDeviceVulkan12Properties : return "PhysicalDeviceVulkan12Properties"; - case StructureType::eImageFormatListCreateInfo : return "ImageFormatListCreateInfo"; - case StructureType::eAttachmentDescription2 : return "AttachmentDescription2"; - case StructureType::eAttachmentReference2 : return "AttachmentReference2"; - case StructureType::eSubpassDescription2 : return "SubpassDescription2"; - case StructureType::eSubpassDependency2 : return "SubpassDependency2"; - case StructureType::eRenderPassCreateInfo2 : return "RenderPassCreateInfo2"; - case StructureType::eSubpassBeginInfo : return "SubpassBeginInfo"; - case StructureType::eSubpassEndInfo : return "SubpassEndInfo"; - case StructureType::ePhysicalDevice8BitStorageFeatures : return "PhysicalDevice8BitStorageFeatures"; - case StructureType::ePhysicalDeviceDriverProperties : return "PhysicalDeviceDriverProperties"; - case StructureType::ePhysicalDeviceShaderAtomicInt64Features : return "PhysicalDeviceShaderAtomicInt64Features"; - case StructureType::ePhysicalDeviceShaderFloat16Int8Features : return "PhysicalDeviceShaderFloat16Int8Features"; - case StructureType::ePhysicalDeviceFloatControlsProperties : return "PhysicalDeviceFloatControlsProperties"; - case StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo : return "DescriptorSetLayoutBindingFlagsCreateInfo"; - case StructureType::ePhysicalDeviceDescriptorIndexingFeatures : return "PhysicalDeviceDescriptorIndexingFeatures"; - case StructureType::ePhysicalDeviceDescriptorIndexingProperties : return "PhysicalDeviceDescriptorIndexingProperties"; - case StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo : return "DescriptorSetVariableDescriptorCountAllocateInfo"; - case StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport : return "DescriptorSetVariableDescriptorCountLayoutSupport"; - case StructureType::ePhysicalDeviceDepthStencilResolveProperties : return "PhysicalDeviceDepthStencilResolveProperties"; - case StructureType::eSubpassDescriptionDepthStencilResolve : return "SubpassDescriptionDepthStencilResolve"; - case StructureType::ePhysicalDeviceScalarBlockLayoutFeatures : return "PhysicalDeviceScalarBlockLayoutFeatures"; - case StructureType::eImageStencilUsageCreateInfo : return "ImageStencilUsageCreateInfo"; - case StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties : return "PhysicalDeviceSamplerFilterMinmaxProperties"; - case StructureType::eSamplerReductionModeCreateInfo : return "SamplerReductionModeCreateInfo"; - case StructureType::ePhysicalDeviceVulkanMemoryModelFeatures : return "PhysicalDeviceVulkanMemoryModelFeatures"; - case StructureType::ePhysicalDeviceImagelessFramebufferFeatures : return "PhysicalDeviceImagelessFramebufferFeatures"; - case StructureType::eFramebufferAttachmentsCreateInfo : return "FramebufferAttachmentsCreateInfo"; - case StructureType::eFramebufferAttachmentImageInfo : return "FramebufferAttachmentImageInfo"; - case StructureType::eRenderPassAttachmentBeginInfo : return "RenderPassAttachmentBeginInfo"; - case StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures : return "PhysicalDeviceUniformBufferStandardLayoutFeatures"; - case StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures : return "PhysicalDeviceShaderSubgroupExtendedTypesFeatures"; - case StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures : return "PhysicalDeviceSeparateDepthStencilLayoutsFeatures"; - case StructureType::eAttachmentReferenceStencilLayout : return "AttachmentReferenceStencilLayout"; - case StructureType::eAttachmentDescriptionStencilLayout : return "AttachmentDescriptionStencilLayout"; - case StructureType::ePhysicalDeviceHostQueryResetFeatures : return "PhysicalDeviceHostQueryResetFeatures"; - case StructureType::ePhysicalDeviceTimelineSemaphoreFeatures : return "PhysicalDeviceTimelineSemaphoreFeatures"; - case StructureType::ePhysicalDeviceTimelineSemaphoreProperties : return "PhysicalDeviceTimelineSemaphoreProperties"; - case StructureType::eSemaphoreTypeCreateInfo : return "SemaphoreTypeCreateInfo"; - case StructureType::eTimelineSemaphoreSubmitInfo : return "TimelineSemaphoreSubmitInfo"; - case StructureType::eSemaphoreWaitInfo : return "SemaphoreWaitInfo"; - case StructureType::eSemaphoreSignalInfo : return "SemaphoreSignalInfo"; - case StructureType::ePhysicalDeviceBufferDeviceAddressFeatures : return "PhysicalDeviceBufferDeviceAddressFeatures"; - case StructureType::eBufferDeviceAddressInfo : return "BufferDeviceAddressInfo"; - case StructureType::eBufferOpaqueCaptureAddressCreateInfo : return "BufferOpaqueCaptureAddressCreateInfo"; - case StructureType::eMemoryOpaqueCaptureAddressAllocateInfo : return "MemoryOpaqueCaptureAddressAllocateInfo"; - case StructureType::eDeviceMemoryOpaqueCaptureAddressInfo : return "DeviceMemoryOpaqueCaptureAddressInfo"; - case StructureType::eSwapchainCreateInfoKHR : return "SwapchainCreateInfoKHR"; - case StructureType::ePresentInfoKHR : return "PresentInfoKHR"; - case StructureType::eDeviceGroupPresentCapabilitiesKHR : return "DeviceGroupPresentCapabilitiesKHR"; - case StructureType::eImageSwapchainCreateInfoKHR : return "ImageSwapchainCreateInfoKHR"; - case StructureType::eBindImageMemorySwapchainInfoKHR : return "BindImageMemorySwapchainInfoKHR"; - case StructureType::eAcquireNextImageInfoKHR : return "AcquireNextImageInfoKHR"; - case StructureType::eDeviceGroupPresentInfoKHR : return "DeviceGroupPresentInfoKHR"; - case StructureType::eDeviceGroupSwapchainCreateInfoKHR : return "DeviceGroupSwapchainCreateInfoKHR"; - case StructureType::eDisplayModeCreateInfoKHR : return "DisplayModeCreateInfoKHR"; - case StructureType::eDisplaySurfaceCreateInfoKHR : return "DisplaySurfaceCreateInfoKHR"; - case StructureType::eDisplayPresentInfoKHR : return "DisplayPresentInfoKHR"; - case StructureType::eXlibSurfaceCreateInfoKHR : return "XlibSurfaceCreateInfoKHR"; - case StructureType::eXcbSurfaceCreateInfoKHR : return "XcbSurfaceCreateInfoKHR"; - case StructureType::eWaylandSurfaceCreateInfoKHR : return "WaylandSurfaceCreateInfoKHR"; - case StructureType::eAndroidSurfaceCreateInfoKHR : return "AndroidSurfaceCreateInfoKHR"; - case StructureType::eWin32SurfaceCreateInfoKHR : return "Win32SurfaceCreateInfoKHR"; - case StructureType::eDebugReportCallbackCreateInfoEXT : return "DebugReportCallbackCreateInfoEXT"; - case StructureType::ePipelineRasterizationStateRasterizationOrderAMD : return "PipelineRasterizationStateRasterizationOrderAMD"; - case StructureType::eDebugMarkerObjectNameInfoEXT : return "DebugMarkerObjectNameInfoEXT"; - case StructureType::eDebugMarkerObjectTagInfoEXT : return "DebugMarkerObjectTagInfoEXT"; - case StructureType::eDebugMarkerMarkerInfoEXT : return "DebugMarkerMarkerInfoEXT"; - case StructureType::eDedicatedAllocationImageCreateInfoNV : return "DedicatedAllocationImageCreateInfoNV"; - case StructureType::eDedicatedAllocationBufferCreateInfoNV : return "DedicatedAllocationBufferCreateInfoNV"; - case StructureType::eDedicatedAllocationMemoryAllocateInfoNV : return "DedicatedAllocationMemoryAllocateInfoNV"; - case StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT : return "PhysicalDeviceTransformFeedbackFeaturesEXT"; - case StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT : return "PhysicalDeviceTransformFeedbackPropertiesEXT"; - case StructureType::ePipelineRasterizationStateStreamCreateInfoEXT : return "PipelineRasterizationStateStreamCreateInfoEXT"; - case StructureType::eImageViewHandleInfoNVX : return "ImageViewHandleInfoNVX"; - case StructureType::eImageViewAddressPropertiesNVX : return "ImageViewAddressPropertiesNVX"; - case StructureType::eTextureLodGatherFormatPropertiesAMD : return "TextureLodGatherFormatPropertiesAMD"; - case StructureType::eStreamDescriptorSurfaceCreateInfoGGP : return "StreamDescriptorSurfaceCreateInfoGGP"; - case StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV : return "PhysicalDeviceCornerSampledImageFeaturesNV"; - case StructureType::eExternalMemoryImageCreateInfoNV : return "ExternalMemoryImageCreateInfoNV"; - case StructureType::eExportMemoryAllocateInfoNV : return "ExportMemoryAllocateInfoNV"; - case StructureType::eImportMemoryWin32HandleInfoNV : return "ImportMemoryWin32HandleInfoNV"; - case StructureType::eExportMemoryWin32HandleInfoNV : return "ExportMemoryWin32HandleInfoNV"; - case StructureType::eWin32KeyedMutexAcquireReleaseInfoNV : return "Win32KeyedMutexAcquireReleaseInfoNV"; - case StructureType::eValidationFlagsEXT : return "ValidationFlagsEXT"; - case StructureType::eViSurfaceCreateInfoNN : return "ViSurfaceCreateInfoNN"; - case StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT : return "PhysicalDeviceTextureCompressionAstcHdrFeaturesEXT"; - case StructureType::eImageViewAstcDecodeModeEXT : return "ImageViewAstcDecodeModeEXT"; - case StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT : return "PhysicalDeviceAstcDecodeFeaturesEXT"; - case StructureType::eImportMemoryWin32HandleInfoKHR : return "ImportMemoryWin32HandleInfoKHR"; - case StructureType::eExportMemoryWin32HandleInfoKHR : return "ExportMemoryWin32HandleInfoKHR"; - case StructureType::eMemoryWin32HandlePropertiesKHR : return "MemoryWin32HandlePropertiesKHR"; - case StructureType::eMemoryGetWin32HandleInfoKHR : return "MemoryGetWin32HandleInfoKHR"; - case StructureType::eImportMemoryFdInfoKHR : return "ImportMemoryFdInfoKHR"; - case StructureType::eMemoryFdPropertiesKHR : return "MemoryFdPropertiesKHR"; - case StructureType::eMemoryGetFdInfoKHR : return "MemoryGetFdInfoKHR"; - case StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR : return "Win32KeyedMutexAcquireReleaseInfoKHR"; - case StructureType::eImportSemaphoreWin32HandleInfoKHR : return "ImportSemaphoreWin32HandleInfoKHR"; - case StructureType::eExportSemaphoreWin32HandleInfoKHR : return "ExportSemaphoreWin32HandleInfoKHR"; - case StructureType::eD3D12FenceSubmitInfoKHR : return "D3D12FenceSubmitInfoKHR"; - case StructureType::eSemaphoreGetWin32HandleInfoKHR : return "SemaphoreGetWin32HandleInfoKHR"; - case StructureType::eImportSemaphoreFdInfoKHR : return "ImportSemaphoreFdInfoKHR"; - case StructureType::eSemaphoreGetFdInfoKHR : return "SemaphoreGetFdInfoKHR"; - case StructureType::ePhysicalDevicePushDescriptorPropertiesKHR : return "PhysicalDevicePushDescriptorPropertiesKHR"; - case StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT : return "CommandBufferInheritanceConditionalRenderingInfoEXT"; - case StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT : return "PhysicalDeviceConditionalRenderingFeaturesEXT"; - case StructureType::eConditionalRenderingBeginInfoEXT : return "ConditionalRenderingBeginInfoEXT"; - case StructureType::ePresentRegionsKHR : return "PresentRegionsKHR"; - case StructureType::ePipelineViewportWScalingStateCreateInfoNV : return "PipelineViewportWScalingStateCreateInfoNV"; - case StructureType::eSurfaceCapabilities2EXT : return "SurfaceCapabilities2EXT"; - case StructureType::eDisplayPowerInfoEXT : return "DisplayPowerInfoEXT"; - case StructureType::eDeviceEventInfoEXT : return "DeviceEventInfoEXT"; - case StructureType::eDisplayEventInfoEXT : return "DisplayEventInfoEXT"; - case StructureType::eSwapchainCounterCreateInfoEXT : return "SwapchainCounterCreateInfoEXT"; - case StructureType::ePresentTimesInfoGOOGLE : return "PresentTimesInfoGOOGLE"; - case StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX : return "PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX"; - case StructureType::ePipelineViewportSwizzleStateCreateInfoNV : return "PipelineViewportSwizzleStateCreateInfoNV"; - case StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT : return "PhysicalDeviceDiscardRectanglePropertiesEXT"; - case StructureType::ePipelineDiscardRectangleStateCreateInfoEXT : return "PipelineDiscardRectangleStateCreateInfoEXT"; - case StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT : return "PhysicalDeviceConservativeRasterizationPropertiesEXT"; - case StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT : return "PipelineRasterizationConservativeStateCreateInfoEXT"; - case StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT : return "PhysicalDeviceDepthClipEnableFeaturesEXT"; - case StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT : return "PipelineRasterizationDepthClipStateCreateInfoEXT"; - case StructureType::eHdrMetadataEXT : return "HdrMetadataEXT"; - case StructureType::eSharedPresentSurfaceCapabilitiesKHR : return "SharedPresentSurfaceCapabilitiesKHR"; - case StructureType::eImportFenceWin32HandleInfoKHR : return "ImportFenceWin32HandleInfoKHR"; - case StructureType::eExportFenceWin32HandleInfoKHR : return "ExportFenceWin32HandleInfoKHR"; - case StructureType::eFenceGetWin32HandleInfoKHR : return "FenceGetWin32HandleInfoKHR"; - case StructureType::eImportFenceFdInfoKHR : return "ImportFenceFdInfoKHR"; - case StructureType::eFenceGetFdInfoKHR : return "FenceGetFdInfoKHR"; - case StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR : return "PhysicalDevicePerformanceQueryFeaturesKHR"; - case StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR : return "PhysicalDevicePerformanceQueryPropertiesKHR"; - case StructureType::eQueryPoolPerformanceCreateInfoKHR : return "QueryPoolPerformanceCreateInfoKHR"; - case StructureType::ePerformanceQuerySubmitInfoKHR : return "PerformanceQuerySubmitInfoKHR"; - case StructureType::eAcquireProfilingLockInfoKHR : return "AcquireProfilingLockInfoKHR"; - case StructureType::ePerformanceCounterKHR : return "PerformanceCounterKHR"; - case StructureType::ePerformanceCounterDescriptionKHR : return "PerformanceCounterDescriptionKHR"; - case StructureType::ePhysicalDeviceSurfaceInfo2KHR : return "PhysicalDeviceSurfaceInfo2KHR"; - case StructureType::eSurfaceCapabilities2KHR : return "SurfaceCapabilities2KHR"; - case StructureType::eSurfaceFormat2KHR : return "SurfaceFormat2KHR"; - case StructureType::eDisplayProperties2KHR : return "DisplayProperties2KHR"; - case StructureType::eDisplayPlaneProperties2KHR : return "DisplayPlaneProperties2KHR"; - case StructureType::eDisplayModeProperties2KHR : return "DisplayModeProperties2KHR"; - case StructureType::eDisplayPlaneInfo2KHR : return "DisplayPlaneInfo2KHR"; - case StructureType::eDisplayPlaneCapabilities2KHR : return "DisplayPlaneCapabilities2KHR"; - case StructureType::eIosSurfaceCreateInfoMVK : return "IosSurfaceCreateInfoMVK"; - case StructureType::eMacosSurfaceCreateInfoMVK : return "MacosSurfaceCreateInfoMVK"; - case StructureType::eDebugUtilsObjectNameInfoEXT : return "DebugUtilsObjectNameInfoEXT"; - case StructureType::eDebugUtilsObjectTagInfoEXT : return "DebugUtilsObjectTagInfoEXT"; - case StructureType::eDebugUtilsLabelEXT : return "DebugUtilsLabelEXT"; - case StructureType::eDebugUtilsMessengerCallbackDataEXT : return "DebugUtilsMessengerCallbackDataEXT"; - case StructureType::eDebugUtilsMessengerCreateInfoEXT : return "DebugUtilsMessengerCreateInfoEXT"; - case StructureType::eAndroidHardwareBufferUsageANDROID : return "AndroidHardwareBufferUsageANDROID"; - case StructureType::eAndroidHardwareBufferPropertiesANDROID : return "AndroidHardwareBufferPropertiesANDROID"; - case StructureType::eAndroidHardwareBufferFormatPropertiesANDROID : return "AndroidHardwareBufferFormatPropertiesANDROID"; - case StructureType::eImportAndroidHardwareBufferInfoANDROID : return "ImportAndroidHardwareBufferInfoANDROID"; - case StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID : return "MemoryGetAndroidHardwareBufferInfoANDROID"; - case StructureType::eExternalFormatANDROID : return "ExternalFormatANDROID"; - case StructureType::ePhysicalDeviceInlineUniformBlockFeaturesEXT : return "PhysicalDeviceInlineUniformBlockFeaturesEXT"; - case StructureType::ePhysicalDeviceInlineUniformBlockPropertiesEXT : return "PhysicalDeviceInlineUniformBlockPropertiesEXT"; - case StructureType::eWriteDescriptorSetInlineUniformBlockEXT : return "WriteDescriptorSetInlineUniformBlockEXT"; - case StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT : return "DescriptorPoolInlineUniformBlockCreateInfoEXT"; - case StructureType::eSampleLocationsInfoEXT : return "SampleLocationsInfoEXT"; - case StructureType::eRenderPassSampleLocationsBeginInfoEXT : return "RenderPassSampleLocationsBeginInfoEXT"; - case StructureType::ePipelineSampleLocationsStateCreateInfoEXT : return "PipelineSampleLocationsStateCreateInfoEXT"; - case StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT : return "PhysicalDeviceSampleLocationsPropertiesEXT"; - case StructureType::eMultisamplePropertiesEXT : return "MultisamplePropertiesEXT"; - case StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT : return "PhysicalDeviceBlendOperationAdvancedFeaturesEXT"; - case StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT : return "PhysicalDeviceBlendOperationAdvancedPropertiesEXT"; - case StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT : return "PipelineColorBlendAdvancedStateCreateInfoEXT"; - case StructureType::ePipelineCoverageToColorStateCreateInfoNV : return "PipelineCoverageToColorStateCreateInfoNV"; - case StructureType::eBindAccelerationStructureMemoryInfoKHR : return "BindAccelerationStructureMemoryInfoKHR"; - case StructureType::eWriteDescriptorSetAccelerationStructureKHR : return "WriteDescriptorSetAccelerationStructureKHR"; - case StructureType::eAccelerationStructureBuildGeometryInfoKHR : return "AccelerationStructureBuildGeometryInfoKHR"; - case StructureType::eAccelerationStructureCreateGeometryTypeInfoKHR : return "AccelerationStructureCreateGeometryTypeInfoKHR"; - case StructureType::eAccelerationStructureDeviceAddressInfoKHR : return "AccelerationStructureDeviceAddressInfoKHR"; - case StructureType::eAccelerationStructureGeometryAabbsDataKHR : return "AccelerationStructureGeometryAabbsDataKHR"; - case StructureType::eAccelerationStructureGeometryInstancesDataKHR : return "AccelerationStructureGeometryInstancesDataKHR"; - case StructureType::eAccelerationStructureGeometryTrianglesDataKHR : return "AccelerationStructureGeometryTrianglesDataKHR"; - case StructureType::eAccelerationStructureGeometryKHR : return "AccelerationStructureGeometryKHR"; - case StructureType::eAccelerationStructureMemoryRequirementsInfoKHR : return "AccelerationStructureMemoryRequirementsInfoKHR"; - case StructureType::eAccelerationStructureVersionKHR : return "AccelerationStructureVersionKHR"; - case StructureType::eCopyAccelerationStructureInfoKHR : return "CopyAccelerationStructureInfoKHR"; - case StructureType::eCopyAccelerationStructureToMemoryInfoKHR : return "CopyAccelerationStructureToMemoryInfoKHR"; - case StructureType::eCopyMemoryToAccelerationStructureInfoKHR : return "CopyMemoryToAccelerationStructureInfoKHR"; - case StructureType::ePhysicalDeviceRayTracingFeaturesKHR : return "PhysicalDeviceRayTracingFeaturesKHR"; - case StructureType::ePhysicalDeviceRayTracingPropertiesKHR : return "PhysicalDeviceRayTracingPropertiesKHR"; - case StructureType::eRayTracingPipelineCreateInfoKHR : return "RayTracingPipelineCreateInfoKHR"; - case StructureType::eRayTracingShaderGroupCreateInfoKHR : return "RayTracingShaderGroupCreateInfoKHR"; - case StructureType::eAccelerationStructureCreateInfoKHR : return "AccelerationStructureCreateInfoKHR"; - case StructureType::eRayTracingPipelineInterfaceCreateInfoKHR : return "RayTracingPipelineInterfaceCreateInfoKHR"; - case StructureType::ePipelineCoverageModulationStateCreateInfoNV : return "PipelineCoverageModulationStateCreateInfoNV"; - case StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV : return "PhysicalDeviceShaderSmBuiltinsFeaturesNV"; - case StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV : return "PhysicalDeviceShaderSmBuiltinsPropertiesNV"; - case StructureType::eDrmFormatModifierPropertiesListEXT : return "DrmFormatModifierPropertiesListEXT"; - case StructureType::eDrmFormatModifierPropertiesEXT : return "DrmFormatModifierPropertiesEXT"; - case StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT : return "PhysicalDeviceImageDrmFormatModifierInfoEXT"; - case StructureType::eImageDrmFormatModifierListCreateInfoEXT : return "ImageDrmFormatModifierListCreateInfoEXT"; - case StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT : return "ImageDrmFormatModifierExplicitCreateInfoEXT"; - case StructureType::eImageDrmFormatModifierPropertiesEXT : return "ImageDrmFormatModifierPropertiesEXT"; - case StructureType::eValidationCacheCreateInfoEXT : return "ValidationCacheCreateInfoEXT"; - case StructureType::eShaderModuleValidationCacheCreateInfoEXT : return "ShaderModuleValidationCacheCreateInfoEXT"; - case StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV : return "PipelineViewportShadingRateImageStateCreateInfoNV"; - case StructureType::ePhysicalDeviceShadingRateImageFeaturesNV : return "PhysicalDeviceShadingRateImageFeaturesNV"; - case StructureType::ePhysicalDeviceShadingRateImagePropertiesNV : return "PhysicalDeviceShadingRateImagePropertiesNV"; - case StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV : return "PipelineViewportCoarseSampleOrderStateCreateInfoNV"; - case StructureType::eRayTracingPipelineCreateInfoNV : return "RayTracingPipelineCreateInfoNV"; - case StructureType::eAccelerationStructureCreateInfoNV : return "AccelerationStructureCreateInfoNV"; - case StructureType::eGeometryNV : return "GeometryNV"; - case StructureType::eGeometryTrianglesNV : return "GeometryTrianglesNV"; - case StructureType::eGeometryAabbNV : return "GeometryAabbNV"; - case StructureType::eAccelerationStructureMemoryRequirementsInfoNV : return "AccelerationStructureMemoryRequirementsInfoNV"; - case StructureType::ePhysicalDeviceRayTracingPropertiesNV : return "PhysicalDeviceRayTracingPropertiesNV"; - case StructureType::eRayTracingShaderGroupCreateInfoNV : return "RayTracingShaderGroupCreateInfoNV"; - case StructureType::eAccelerationStructureInfoNV : return "AccelerationStructureInfoNV"; - case StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV : return "PhysicalDeviceRepresentativeFragmentTestFeaturesNV"; - case StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV : return "PipelineRepresentativeFragmentTestStateCreateInfoNV"; - case StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT : return "PhysicalDeviceImageViewImageFormatInfoEXT"; - case StructureType::eFilterCubicImageViewImageFormatPropertiesEXT : return "FilterCubicImageViewImageFormatPropertiesEXT"; - case StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT : return "DeviceQueueGlobalPriorityCreateInfoEXT"; - case StructureType::eImportMemoryHostPointerInfoEXT : return "ImportMemoryHostPointerInfoEXT"; - case StructureType::eMemoryHostPointerPropertiesEXT : return "MemoryHostPointerPropertiesEXT"; - case StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT : return "PhysicalDeviceExternalMemoryHostPropertiesEXT"; - case StructureType::ePhysicalDeviceShaderClockFeaturesKHR : return "PhysicalDeviceShaderClockFeaturesKHR"; - case StructureType::ePipelineCompilerControlCreateInfoAMD : return "PipelineCompilerControlCreateInfoAMD"; - case StructureType::eCalibratedTimestampInfoEXT : return "CalibratedTimestampInfoEXT"; - case StructureType::ePhysicalDeviceShaderCorePropertiesAMD : return "PhysicalDeviceShaderCorePropertiesAMD"; - case StructureType::eDeviceMemoryOverallocationCreateInfoAMD : return "DeviceMemoryOverallocationCreateInfoAMD"; - case StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT : return "PhysicalDeviceVertexAttributeDivisorPropertiesEXT"; - case StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT : return "PipelineVertexInputDivisorStateCreateInfoEXT"; - case StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT : return "PhysicalDeviceVertexAttributeDivisorFeaturesEXT"; - case StructureType::ePresentFrameTokenGGP : return "PresentFrameTokenGGP"; - case StructureType::ePipelineCreationFeedbackCreateInfoEXT : return "PipelineCreationFeedbackCreateInfoEXT"; - case StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV : return "PhysicalDeviceComputeShaderDerivativesFeaturesNV"; - case StructureType::ePhysicalDeviceMeshShaderFeaturesNV : return "PhysicalDeviceMeshShaderFeaturesNV"; - case StructureType::ePhysicalDeviceMeshShaderPropertiesNV : return "PhysicalDeviceMeshShaderPropertiesNV"; - case StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV : return "PhysicalDeviceFragmentShaderBarycentricFeaturesNV"; - case StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV : return "PhysicalDeviceShaderImageFootprintFeaturesNV"; - case StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV : return "PipelineViewportExclusiveScissorStateCreateInfoNV"; - case StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV : return "PhysicalDeviceExclusiveScissorFeaturesNV"; - case StructureType::eCheckpointDataNV : return "CheckpointDataNV"; - case StructureType::eQueueFamilyCheckpointPropertiesNV : return "QueueFamilyCheckpointPropertiesNV"; - case StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL : return "PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL"; - case StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL : return "QueryPoolPerformanceQueryCreateInfoINTEL"; - case StructureType::eInitializePerformanceApiInfoINTEL : return "InitializePerformanceApiInfoINTEL"; - case StructureType::ePerformanceMarkerInfoINTEL : return "PerformanceMarkerInfoINTEL"; - case StructureType::ePerformanceStreamMarkerInfoINTEL : return "PerformanceStreamMarkerInfoINTEL"; - case StructureType::ePerformanceOverrideInfoINTEL : return "PerformanceOverrideInfoINTEL"; - case StructureType::ePerformanceConfigurationAcquireInfoINTEL : return "PerformanceConfigurationAcquireInfoINTEL"; - case StructureType::ePhysicalDevicePciBusInfoPropertiesEXT : return "PhysicalDevicePciBusInfoPropertiesEXT"; - case StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD : return "DisplayNativeHdrSurfaceCapabilitiesAMD"; - case StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD : return "SwapchainDisplayNativeHdrCreateInfoAMD"; - case StructureType::eImagepipeSurfaceCreateInfoFUCHSIA : return "ImagepipeSurfaceCreateInfoFUCHSIA"; - case StructureType::eMetalSurfaceCreateInfoEXT : return "MetalSurfaceCreateInfoEXT"; - case StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT : return "PhysicalDeviceFragmentDensityMapFeaturesEXT"; - case StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT : return "PhysicalDeviceFragmentDensityMapPropertiesEXT"; - case StructureType::eRenderPassFragmentDensityMapCreateInfoEXT : return "RenderPassFragmentDensityMapCreateInfoEXT"; - case StructureType::ePhysicalDeviceSubgroupSizeControlPropertiesEXT : return "PhysicalDeviceSubgroupSizeControlPropertiesEXT"; - case StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT : return "PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT"; - case StructureType::ePhysicalDeviceSubgroupSizeControlFeaturesEXT : return "PhysicalDeviceSubgroupSizeControlFeaturesEXT"; - case StructureType::ePhysicalDeviceShaderCoreProperties2AMD : return "PhysicalDeviceShaderCoreProperties2AMD"; - case StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD : return "PhysicalDeviceCoherentMemoryFeaturesAMD"; - case StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT : return "PhysicalDeviceMemoryBudgetPropertiesEXT"; - case StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT : return "PhysicalDeviceMemoryPriorityFeaturesEXT"; - case StructureType::eMemoryPriorityAllocateInfoEXT : return "MemoryPriorityAllocateInfoEXT"; - case StructureType::eSurfaceProtectedCapabilitiesKHR : return "SurfaceProtectedCapabilitiesKHR"; - case StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV : return "PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV"; - case StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT : return "PhysicalDeviceBufferDeviceAddressFeaturesEXT"; - case StructureType::eBufferDeviceAddressCreateInfoEXT : return "BufferDeviceAddressCreateInfoEXT"; - case StructureType::ePhysicalDeviceToolPropertiesEXT : return "PhysicalDeviceToolPropertiesEXT"; - case StructureType::eValidationFeaturesEXT : return "ValidationFeaturesEXT"; - case StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV : return "PhysicalDeviceCooperativeMatrixFeaturesNV"; - case StructureType::eCooperativeMatrixPropertiesNV : return "CooperativeMatrixPropertiesNV"; - case StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV : return "PhysicalDeviceCooperativeMatrixPropertiesNV"; - case StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV : return "PhysicalDeviceCoverageReductionModeFeaturesNV"; - case StructureType::ePipelineCoverageReductionStateCreateInfoNV : return "PipelineCoverageReductionStateCreateInfoNV"; - case StructureType::eFramebufferMixedSamplesCombinationNV : return "FramebufferMixedSamplesCombinationNV"; - case StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT : return "PhysicalDeviceFragmentShaderInterlockFeaturesEXT"; - case StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT : return "PhysicalDeviceYcbcrImageArraysFeaturesEXT"; - case StructureType::eSurfaceFullScreenExclusiveInfoEXT : return "SurfaceFullScreenExclusiveInfoEXT"; - case StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT : return "SurfaceCapabilitiesFullScreenExclusiveEXT"; - case StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT : return "SurfaceFullScreenExclusiveWin32InfoEXT"; - case StructureType::eHeadlessSurfaceCreateInfoEXT : return "HeadlessSurfaceCreateInfoEXT"; - case StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT : return "PhysicalDeviceLineRasterizationFeaturesEXT"; - case StructureType::ePipelineRasterizationLineStateCreateInfoEXT : return "PipelineRasterizationLineStateCreateInfoEXT"; - case StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT : return "PhysicalDeviceLineRasterizationPropertiesEXT"; - case StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT : return "PhysicalDeviceIndexTypeUint8FeaturesEXT"; - case StructureType::eDeferredOperationInfoKHR : return "DeferredOperationInfoKHR"; - case StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR : return "PhysicalDevicePipelineExecutablePropertiesFeaturesKHR"; - case StructureType::ePipelineInfoKHR : return "PipelineInfoKHR"; - case StructureType::ePipelineExecutablePropertiesKHR : return "PipelineExecutablePropertiesKHR"; - case StructureType::ePipelineExecutableInfoKHR : return "PipelineExecutableInfoKHR"; - case StructureType::ePipelineExecutableStatisticKHR : return "PipelineExecutableStatisticKHR"; - case StructureType::ePipelineExecutableInternalRepresentationKHR : return "PipelineExecutableInternalRepresentationKHR"; - case StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT : return "PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT"; - case StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV : return "PhysicalDeviceDeviceGeneratedCommandsPropertiesNV"; - case StructureType::eGraphicsShaderGroupCreateInfoNV : return "GraphicsShaderGroupCreateInfoNV"; - case StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV : return "GraphicsPipelineShaderGroupsCreateInfoNV"; - case StructureType::eIndirectCommandsLayoutTokenNV : return "IndirectCommandsLayoutTokenNV"; - case StructureType::eIndirectCommandsLayoutCreateInfoNV : return "IndirectCommandsLayoutCreateInfoNV"; - case StructureType::eGeneratedCommandsInfoNV : return "GeneratedCommandsInfoNV"; - case StructureType::eGeneratedCommandsMemoryRequirementsInfoNV : return "GeneratedCommandsMemoryRequirementsInfoNV"; - case StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV : return "PhysicalDeviceDeviceGeneratedCommandsFeaturesNV"; - case StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT : return "PhysicalDeviceTexelBufferAlignmentFeaturesEXT"; - case StructureType::ePhysicalDeviceTexelBufferAlignmentPropertiesEXT : return "PhysicalDeviceTexelBufferAlignmentPropertiesEXT"; - case StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM : return "CommandBufferInheritanceRenderPassTransformInfoQCOM"; - case StructureType::eRenderPassTransformBeginInfoQCOM : return "RenderPassTransformBeginInfoQCOM"; - case StructureType::ePipelineLibraryCreateInfoKHR : return "PipelineLibraryCreateInfoKHR"; - case StructureType::ePhysicalDevicePipelineCreationCacheControlFeaturesEXT : return "PhysicalDevicePipelineCreationCacheControlFeaturesEXT"; - case StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV : return "PhysicalDeviceDiagnosticsConfigFeaturesNV"; - case StructureType::eDeviceDiagnosticsConfigCreateInfoNV : return "DeviceDiagnosticsConfigCreateInfoNV"; + case StructureType::eApplicationInfo: return "ApplicationInfo"; + case StructureType::eInstanceCreateInfo: return "InstanceCreateInfo"; + case StructureType::eDeviceQueueCreateInfo: return "DeviceQueueCreateInfo"; + case StructureType::eDeviceCreateInfo: return "DeviceCreateInfo"; + case StructureType::eSubmitInfo: return "SubmitInfo"; + case StructureType::eMemoryAllocateInfo: return "MemoryAllocateInfo"; + case StructureType::eMappedMemoryRange: return "MappedMemoryRange"; + case StructureType::eBindSparseInfo: return "BindSparseInfo"; + case StructureType::eFenceCreateInfo: return "FenceCreateInfo"; + case StructureType::eSemaphoreCreateInfo: return "SemaphoreCreateInfo"; + case StructureType::eEventCreateInfo: return "EventCreateInfo"; + case StructureType::eQueryPoolCreateInfo: return "QueryPoolCreateInfo"; + case StructureType::eBufferCreateInfo: return "BufferCreateInfo"; + case StructureType::eBufferViewCreateInfo: return "BufferViewCreateInfo"; + case StructureType::eImageCreateInfo: return "ImageCreateInfo"; + case StructureType::eImageViewCreateInfo: return "ImageViewCreateInfo"; + case StructureType::eShaderModuleCreateInfo: return "ShaderModuleCreateInfo"; + case StructureType::ePipelineCacheCreateInfo: return "PipelineCacheCreateInfo"; + case StructureType::ePipelineShaderStageCreateInfo: return "PipelineShaderStageCreateInfo"; + case StructureType::ePipelineVertexInputStateCreateInfo: return "PipelineVertexInputStateCreateInfo"; + case StructureType::ePipelineInputAssemblyStateCreateInfo: return "PipelineInputAssemblyStateCreateInfo"; + case StructureType::ePipelineTessellationStateCreateInfo: return "PipelineTessellationStateCreateInfo"; + case StructureType::ePipelineViewportStateCreateInfo: return "PipelineViewportStateCreateInfo"; + case StructureType::ePipelineRasterizationStateCreateInfo: return "PipelineRasterizationStateCreateInfo"; + case StructureType::ePipelineMultisampleStateCreateInfo: return "PipelineMultisampleStateCreateInfo"; + case StructureType::ePipelineDepthStencilStateCreateInfo: return "PipelineDepthStencilStateCreateInfo"; + case StructureType::ePipelineColorBlendStateCreateInfo: return "PipelineColorBlendStateCreateInfo"; + case StructureType::ePipelineDynamicStateCreateInfo: return "PipelineDynamicStateCreateInfo"; + case StructureType::eGraphicsPipelineCreateInfo: return "GraphicsPipelineCreateInfo"; + case StructureType::eComputePipelineCreateInfo: return "ComputePipelineCreateInfo"; + case StructureType::ePipelineLayoutCreateInfo: return "PipelineLayoutCreateInfo"; + case StructureType::eSamplerCreateInfo: return "SamplerCreateInfo"; + case StructureType::eDescriptorSetLayoutCreateInfo: return "DescriptorSetLayoutCreateInfo"; + case StructureType::eDescriptorPoolCreateInfo: return "DescriptorPoolCreateInfo"; + case StructureType::eDescriptorSetAllocateInfo: return "DescriptorSetAllocateInfo"; + case StructureType::eWriteDescriptorSet: return "WriteDescriptorSet"; + case StructureType::eCopyDescriptorSet: return "CopyDescriptorSet"; + case StructureType::eFramebufferCreateInfo: return "FramebufferCreateInfo"; + case StructureType::eRenderPassCreateInfo: return "RenderPassCreateInfo"; + case StructureType::eCommandPoolCreateInfo: return "CommandPoolCreateInfo"; + case StructureType::eCommandBufferAllocateInfo: return "CommandBufferAllocateInfo"; + case StructureType::eCommandBufferInheritanceInfo: return "CommandBufferInheritanceInfo"; + case StructureType::eCommandBufferBeginInfo: return "CommandBufferBeginInfo"; + case StructureType::eRenderPassBeginInfo: return "RenderPassBeginInfo"; + case StructureType::eBufferMemoryBarrier: return "BufferMemoryBarrier"; + case StructureType::eImageMemoryBarrier: return "ImageMemoryBarrier"; + case StructureType::eMemoryBarrier: return "MemoryBarrier"; + case StructureType::eLoaderInstanceCreateInfo: return "LoaderInstanceCreateInfo"; + case StructureType::eLoaderDeviceCreateInfo: return "LoaderDeviceCreateInfo"; + case StructureType::ePhysicalDeviceSubgroupProperties: return "PhysicalDeviceSubgroupProperties"; + case StructureType::eBindBufferMemoryInfo: return "BindBufferMemoryInfo"; + case StructureType::eBindImageMemoryInfo: return "BindImageMemoryInfo"; + case StructureType::ePhysicalDevice16BitStorageFeatures: return "PhysicalDevice16BitStorageFeatures"; + case StructureType::eMemoryDedicatedRequirements: return "MemoryDedicatedRequirements"; + case StructureType::eMemoryDedicatedAllocateInfo: return "MemoryDedicatedAllocateInfo"; + case StructureType::eMemoryAllocateFlagsInfo: return "MemoryAllocateFlagsInfo"; + case StructureType::eDeviceGroupRenderPassBeginInfo: return "DeviceGroupRenderPassBeginInfo"; + case StructureType::eDeviceGroupCommandBufferBeginInfo: return "DeviceGroupCommandBufferBeginInfo"; + case StructureType::eDeviceGroupSubmitInfo: return "DeviceGroupSubmitInfo"; + case StructureType::eDeviceGroupBindSparseInfo: return "DeviceGroupBindSparseInfo"; + case StructureType::eBindBufferMemoryDeviceGroupInfo: return "BindBufferMemoryDeviceGroupInfo"; + case StructureType::eBindImageMemoryDeviceGroupInfo: return "BindImageMemoryDeviceGroupInfo"; + case StructureType::ePhysicalDeviceGroupProperties: return "PhysicalDeviceGroupProperties"; + case StructureType::eDeviceGroupDeviceCreateInfo: return "DeviceGroupDeviceCreateInfo"; + case StructureType::eBufferMemoryRequirementsInfo2: return "BufferMemoryRequirementsInfo2"; + case StructureType::eImageMemoryRequirementsInfo2: return "ImageMemoryRequirementsInfo2"; + case StructureType::eImageSparseMemoryRequirementsInfo2: return "ImageSparseMemoryRequirementsInfo2"; + case StructureType::eMemoryRequirements2: return "MemoryRequirements2"; + case StructureType::eSparseImageMemoryRequirements2: return "SparseImageMemoryRequirements2"; + case StructureType::ePhysicalDeviceFeatures2: return "PhysicalDeviceFeatures2"; + case StructureType::ePhysicalDeviceProperties2: return "PhysicalDeviceProperties2"; + case StructureType::eFormatProperties2: return "FormatProperties2"; + case StructureType::eImageFormatProperties2: return "ImageFormatProperties2"; + case StructureType::ePhysicalDeviceImageFormatInfo2: return "PhysicalDeviceImageFormatInfo2"; + case StructureType::eQueueFamilyProperties2: return "QueueFamilyProperties2"; + case StructureType::ePhysicalDeviceMemoryProperties2: return "PhysicalDeviceMemoryProperties2"; + case StructureType::eSparseImageFormatProperties2: return "SparseImageFormatProperties2"; + case StructureType::ePhysicalDeviceSparseImageFormatInfo2: return "PhysicalDeviceSparseImageFormatInfo2"; + case StructureType::ePhysicalDevicePointClippingProperties: return "PhysicalDevicePointClippingProperties"; + case StructureType::eRenderPassInputAttachmentAspectCreateInfo: + return "RenderPassInputAttachmentAspectCreateInfo"; + case StructureType::eImageViewUsageCreateInfo: return "ImageViewUsageCreateInfo"; + case StructureType::ePipelineTessellationDomainOriginStateCreateInfo: + return "PipelineTessellationDomainOriginStateCreateInfo"; + case StructureType::eRenderPassMultiviewCreateInfo: return "RenderPassMultiviewCreateInfo"; + case StructureType::ePhysicalDeviceMultiviewFeatures: return "PhysicalDeviceMultiviewFeatures"; + case StructureType::ePhysicalDeviceMultiviewProperties: return "PhysicalDeviceMultiviewProperties"; + case StructureType::ePhysicalDeviceVariablePointersFeatures: return "PhysicalDeviceVariablePointersFeatures"; + case StructureType::eProtectedSubmitInfo: return "ProtectedSubmitInfo"; + case StructureType::ePhysicalDeviceProtectedMemoryFeatures: return "PhysicalDeviceProtectedMemoryFeatures"; + case StructureType::ePhysicalDeviceProtectedMemoryProperties: return "PhysicalDeviceProtectedMemoryProperties"; + case StructureType::eDeviceQueueInfo2: return "DeviceQueueInfo2"; + case StructureType::eSamplerYcbcrConversionCreateInfo: return "SamplerYcbcrConversionCreateInfo"; + case StructureType::eSamplerYcbcrConversionInfo: return "SamplerYcbcrConversionInfo"; + case StructureType::eBindImagePlaneMemoryInfo: return "BindImagePlaneMemoryInfo"; + case StructureType::eImagePlaneMemoryRequirementsInfo: return "ImagePlaneMemoryRequirementsInfo"; + case StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures: + return "PhysicalDeviceSamplerYcbcrConversionFeatures"; + case StructureType::eSamplerYcbcrConversionImageFormatProperties: + return "SamplerYcbcrConversionImageFormatProperties"; + case StructureType::eDescriptorUpdateTemplateCreateInfo: return "DescriptorUpdateTemplateCreateInfo"; + case StructureType::ePhysicalDeviceExternalImageFormatInfo: return "PhysicalDeviceExternalImageFormatInfo"; + case StructureType::eExternalImageFormatProperties: return "ExternalImageFormatProperties"; + case StructureType::ePhysicalDeviceExternalBufferInfo: return "PhysicalDeviceExternalBufferInfo"; + case StructureType::eExternalBufferProperties: return "ExternalBufferProperties"; + case StructureType::ePhysicalDeviceIdProperties: return "PhysicalDeviceIdProperties"; + case StructureType::eExternalMemoryBufferCreateInfo: return "ExternalMemoryBufferCreateInfo"; + case StructureType::eExternalMemoryImageCreateInfo: return "ExternalMemoryImageCreateInfo"; + case StructureType::eExportMemoryAllocateInfo: return "ExportMemoryAllocateInfo"; + case StructureType::ePhysicalDeviceExternalFenceInfo: return "PhysicalDeviceExternalFenceInfo"; + case StructureType::eExternalFenceProperties: return "ExternalFenceProperties"; + case StructureType::eExportFenceCreateInfo: return "ExportFenceCreateInfo"; + case StructureType::eExportSemaphoreCreateInfo: return "ExportSemaphoreCreateInfo"; + case StructureType::ePhysicalDeviceExternalSemaphoreInfo: return "PhysicalDeviceExternalSemaphoreInfo"; + case StructureType::eExternalSemaphoreProperties: return "ExternalSemaphoreProperties"; + case StructureType::ePhysicalDeviceMaintenance3Properties: return "PhysicalDeviceMaintenance3Properties"; + case StructureType::eDescriptorSetLayoutSupport: return "DescriptorSetLayoutSupport"; + case StructureType::ePhysicalDeviceShaderDrawParametersFeatures: + return "PhysicalDeviceShaderDrawParametersFeatures"; + case StructureType::ePhysicalDeviceVulkan11Features: return "PhysicalDeviceVulkan11Features"; + case StructureType::ePhysicalDeviceVulkan11Properties: return "PhysicalDeviceVulkan11Properties"; + case StructureType::ePhysicalDeviceVulkan12Features: return "PhysicalDeviceVulkan12Features"; + case StructureType::ePhysicalDeviceVulkan12Properties: return "PhysicalDeviceVulkan12Properties"; + case StructureType::eImageFormatListCreateInfo: return "ImageFormatListCreateInfo"; + case StructureType::eAttachmentDescription2: return "AttachmentDescription2"; + case StructureType::eAttachmentReference2: return "AttachmentReference2"; + case StructureType::eSubpassDescription2: return "SubpassDescription2"; + case StructureType::eSubpassDependency2: return "SubpassDependency2"; + case StructureType::eRenderPassCreateInfo2: return "RenderPassCreateInfo2"; + case StructureType::eSubpassBeginInfo: return "SubpassBeginInfo"; + case StructureType::eSubpassEndInfo: return "SubpassEndInfo"; + case StructureType::ePhysicalDevice8BitStorageFeatures: return "PhysicalDevice8BitStorageFeatures"; + case StructureType::ePhysicalDeviceDriverProperties: return "PhysicalDeviceDriverProperties"; + case StructureType::ePhysicalDeviceShaderAtomicInt64Features: return "PhysicalDeviceShaderAtomicInt64Features"; + case StructureType::ePhysicalDeviceShaderFloat16Int8Features: return "PhysicalDeviceShaderFloat16Int8Features"; + case StructureType::ePhysicalDeviceFloatControlsProperties: return "PhysicalDeviceFloatControlsProperties"; + case StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo: + return "DescriptorSetLayoutBindingFlagsCreateInfo"; + case StructureType::ePhysicalDeviceDescriptorIndexingFeatures: return "PhysicalDeviceDescriptorIndexingFeatures"; + case StructureType::ePhysicalDeviceDescriptorIndexingProperties: + return "PhysicalDeviceDescriptorIndexingProperties"; + case StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo: + return "DescriptorSetVariableDescriptorCountAllocateInfo"; + case StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport: + return "DescriptorSetVariableDescriptorCountLayoutSupport"; + case StructureType::ePhysicalDeviceDepthStencilResolveProperties: + return "PhysicalDeviceDepthStencilResolveProperties"; + case StructureType::eSubpassDescriptionDepthStencilResolve: return "SubpassDescriptionDepthStencilResolve"; + case StructureType::ePhysicalDeviceScalarBlockLayoutFeatures: return "PhysicalDeviceScalarBlockLayoutFeatures"; + case StructureType::eImageStencilUsageCreateInfo: return "ImageStencilUsageCreateInfo"; + case StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties: + return "PhysicalDeviceSamplerFilterMinmaxProperties"; + case StructureType::eSamplerReductionModeCreateInfo: return "SamplerReductionModeCreateInfo"; + case StructureType::ePhysicalDeviceVulkanMemoryModelFeatures: return "PhysicalDeviceVulkanMemoryModelFeatures"; + case StructureType::ePhysicalDeviceImagelessFramebufferFeatures: + return "PhysicalDeviceImagelessFramebufferFeatures"; + case StructureType::eFramebufferAttachmentsCreateInfo: return "FramebufferAttachmentsCreateInfo"; + case StructureType::eFramebufferAttachmentImageInfo: return "FramebufferAttachmentImageInfo"; + case StructureType::eRenderPassAttachmentBeginInfo: return "RenderPassAttachmentBeginInfo"; + case StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures: + return "PhysicalDeviceUniformBufferStandardLayoutFeatures"; + case StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures: + return "PhysicalDeviceShaderSubgroupExtendedTypesFeatures"; + case StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures: + return "PhysicalDeviceSeparateDepthStencilLayoutsFeatures"; + case StructureType::eAttachmentReferenceStencilLayout: return "AttachmentReferenceStencilLayout"; + case StructureType::eAttachmentDescriptionStencilLayout: return "AttachmentDescriptionStencilLayout"; + case StructureType::ePhysicalDeviceHostQueryResetFeatures: return "PhysicalDeviceHostQueryResetFeatures"; + case StructureType::ePhysicalDeviceTimelineSemaphoreFeatures: return "PhysicalDeviceTimelineSemaphoreFeatures"; + case StructureType::ePhysicalDeviceTimelineSemaphoreProperties: + return "PhysicalDeviceTimelineSemaphoreProperties"; + case StructureType::eSemaphoreTypeCreateInfo: return "SemaphoreTypeCreateInfo"; + case StructureType::eTimelineSemaphoreSubmitInfo: return "TimelineSemaphoreSubmitInfo"; + case StructureType::eSemaphoreWaitInfo: return "SemaphoreWaitInfo"; + case StructureType::eSemaphoreSignalInfo: return "SemaphoreSignalInfo"; + case StructureType::ePhysicalDeviceBufferDeviceAddressFeatures: + return "PhysicalDeviceBufferDeviceAddressFeatures"; + case StructureType::eBufferDeviceAddressInfo: return "BufferDeviceAddressInfo"; + case StructureType::eBufferOpaqueCaptureAddressCreateInfo: return "BufferOpaqueCaptureAddressCreateInfo"; + case StructureType::eMemoryOpaqueCaptureAddressAllocateInfo: return "MemoryOpaqueCaptureAddressAllocateInfo"; + case StructureType::eDeviceMemoryOpaqueCaptureAddressInfo: return "DeviceMemoryOpaqueCaptureAddressInfo"; + case StructureType::eSwapchainCreateInfoKHR: return "SwapchainCreateInfoKHR"; + case StructureType::ePresentInfoKHR: return "PresentInfoKHR"; + case StructureType::eDeviceGroupPresentCapabilitiesKHR: return "DeviceGroupPresentCapabilitiesKHR"; + case StructureType::eImageSwapchainCreateInfoKHR: return "ImageSwapchainCreateInfoKHR"; + case StructureType::eBindImageMemorySwapchainInfoKHR: return "BindImageMemorySwapchainInfoKHR"; + case StructureType::eAcquireNextImageInfoKHR: return "AcquireNextImageInfoKHR"; + case StructureType::eDeviceGroupPresentInfoKHR: return "DeviceGroupPresentInfoKHR"; + case StructureType::eDeviceGroupSwapchainCreateInfoKHR: return "DeviceGroupSwapchainCreateInfoKHR"; + case StructureType::eDisplayModeCreateInfoKHR: return "DisplayModeCreateInfoKHR"; + case StructureType::eDisplaySurfaceCreateInfoKHR: return "DisplaySurfaceCreateInfoKHR"; + case StructureType::eDisplayPresentInfoKHR: return "DisplayPresentInfoKHR"; + case StructureType::eXlibSurfaceCreateInfoKHR: return "XlibSurfaceCreateInfoKHR"; + case StructureType::eXcbSurfaceCreateInfoKHR: return "XcbSurfaceCreateInfoKHR"; + case StructureType::eWaylandSurfaceCreateInfoKHR: return "WaylandSurfaceCreateInfoKHR"; + case StructureType::eAndroidSurfaceCreateInfoKHR: return "AndroidSurfaceCreateInfoKHR"; + case StructureType::eWin32SurfaceCreateInfoKHR: return "Win32SurfaceCreateInfoKHR"; + case StructureType::eDebugReportCallbackCreateInfoEXT: return "DebugReportCallbackCreateInfoEXT"; + case StructureType::ePipelineRasterizationStateRasterizationOrderAMD: + return "PipelineRasterizationStateRasterizationOrderAMD"; + case StructureType::eDebugMarkerObjectNameInfoEXT: return "DebugMarkerObjectNameInfoEXT"; + case StructureType::eDebugMarkerObjectTagInfoEXT: return "DebugMarkerObjectTagInfoEXT"; + case StructureType::eDebugMarkerMarkerInfoEXT: return "DebugMarkerMarkerInfoEXT"; + case StructureType::eDedicatedAllocationImageCreateInfoNV: return "DedicatedAllocationImageCreateInfoNV"; + case StructureType::eDedicatedAllocationBufferCreateInfoNV: return "DedicatedAllocationBufferCreateInfoNV"; + case StructureType::eDedicatedAllocationMemoryAllocateInfoNV: return "DedicatedAllocationMemoryAllocateInfoNV"; + case StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT: + return "PhysicalDeviceTransformFeedbackFeaturesEXT"; + case StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT: + return "PhysicalDeviceTransformFeedbackPropertiesEXT"; + case StructureType::ePipelineRasterizationStateStreamCreateInfoEXT: + return "PipelineRasterizationStateStreamCreateInfoEXT"; + case StructureType::eImageViewHandleInfoNVX: return "ImageViewHandleInfoNVX"; + case StructureType::eImageViewAddressPropertiesNVX: return "ImageViewAddressPropertiesNVX"; + case StructureType::eTextureLodGatherFormatPropertiesAMD: return "TextureLodGatherFormatPropertiesAMD"; + case StructureType::eStreamDescriptorSurfaceCreateInfoGGP: return "StreamDescriptorSurfaceCreateInfoGGP"; + case StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV: + return "PhysicalDeviceCornerSampledImageFeaturesNV"; + case StructureType::eExternalMemoryImageCreateInfoNV: return "ExternalMemoryImageCreateInfoNV"; + case StructureType::eExportMemoryAllocateInfoNV: return "ExportMemoryAllocateInfoNV"; + case StructureType::eImportMemoryWin32HandleInfoNV: return "ImportMemoryWin32HandleInfoNV"; + case StructureType::eExportMemoryWin32HandleInfoNV: return "ExportMemoryWin32HandleInfoNV"; + case StructureType::eWin32KeyedMutexAcquireReleaseInfoNV: return "Win32KeyedMutexAcquireReleaseInfoNV"; + case StructureType::eValidationFlagsEXT: return "ValidationFlagsEXT"; + case StructureType::eViSurfaceCreateInfoNN: return "ViSurfaceCreateInfoNN"; + case StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT: + return "PhysicalDeviceTextureCompressionAstcHdrFeaturesEXT"; + case StructureType::eImageViewAstcDecodeModeEXT: return "ImageViewAstcDecodeModeEXT"; + case StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT: return "PhysicalDeviceAstcDecodeFeaturesEXT"; + case StructureType::eImportMemoryWin32HandleInfoKHR: return "ImportMemoryWin32HandleInfoKHR"; + case StructureType::eExportMemoryWin32HandleInfoKHR: return "ExportMemoryWin32HandleInfoKHR"; + case StructureType::eMemoryWin32HandlePropertiesKHR: return "MemoryWin32HandlePropertiesKHR"; + case StructureType::eMemoryGetWin32HandleInfoKHR: return "MemoryGetWin32HandleInfoKHR"; + case StructureType::eImportMemoryFdInfoKHR: return "ImportMemoryFdInfoKHR"; + case StructureType::eMemoryFdPropertiesKHR: return "MemoryFdPropertiesKHR"; + case StructureType::eMemoryGetFdInfoKHR: return "MemoryGetFdInfoKHR"; + case StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR: return "Win32KeyedMutexAcquireReleaseInfoKHR"; + case StructureType::eImportSemaphoreWin32HandleInfoKHR: return "ImportSemaphoreWin32HandleInfoKHR"; + case StructureType::eExportSemaphoreWin32HandleInfoKHR: return "ExportSemaphoreWin32HandleInfoKHR"; + case StructureType::eD3D12FenceSubmitInfoKHR: return "D3D12FenceSubmitInfoKHR"; + case StructureType::eSemaphoreGetWin32HandleInfoKHR: return "SemaphoreGetWin32HandleInfoKHR"; + case StructureType::eImportSemaphoreFdInfoKHR: return "ImportSemaphoreFdInfoKHR"; + case StructureType::eSemaphoreGetFdInfoKHR: return "SemaphoreGetFdInfoKHR"; + case StructureType::ePhysicalDevicePushDescriptorPropertiesKHR: + return "PhysicalDevicePushDescriptorPropertiesKHR"; + case StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT: + return "CommandBufferInheritanceConditionalRenderingInfoEXT"; + case StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT: + return "PhysicalDeviceConditionalRenderingFeaturesEXT"; + case StructureType::eConditionalRenderingBeginInfoEXT: return "ConditionalRenderingBeginInfoEXT"; + case StructureType::ePresentRegionsKHR: return "PresentRegionsKHR"; + case StructureType::ePipelineViewportWScalingStateCreateInfoNV: + return "PipelineViewportWScalingStateCreateInfoNV"; + case StructureType::eSurfaceCapabilities2EXT: return "SurfaceCapabilities2EXT"; + case StructureType::eDisplayPowerInfoEXT: return "DisplayPowerInfoEXT"; + case StructureType::eDeviceEventInfoEXT: return "DeviceEventInfoEXT"; + case StructureType::eDisplayEventInfoEXT: return "DisplayEventInfoEXT"; + case StructureType::eSwapchainCounterCreateInfoEXT: return "SwapchainCounterCreateInfoEXT"; + case StructureType::ePresentTimesInfoGOOGLE: return "PresentTimesInfoGOOGLE"; + case StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX: + return "PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX"; + case StructureType::ePipelineViewportSwizzleStateCreateInfoNV: return "PipelineViewportSwizzleStateCreateInfoNV"; + case StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT: + return "PhysicalDeviceDiscardRectanglePropertiesEXT"; + case StructureType::ePipelineDiscardRectangleStateCreateInfoEXT: + return "PipelineDiscardRectangleStateCreateInfoEXT"; + case StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT: + return "PhysicalDeviceConservativeRasterizationPropertiesEXT"; + case StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT: + return "PipelineRasterizationConservativeStateCreateInfoEXT"; + case StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT: return "PhysicalDeviceDepthClipEnableFeaturesEXT"; + case StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT: + return "PipelineRasterizationDepthClipStateCreateInfoEXT"; + case StructureType::eHdrMetadataEXT: return "HdrMetadataEXT"; + case StructureType::eSharedPresentSurfaceCapabilitiesKHR: return "SharedPresentSurfaceCapabilitiesKHR"; + case StructureType::eImportFenceWin32HandleInfoKHR: return "ImportFenceWin32HandleInfoKHR"; + case StructureType::eExportFenceWin32HandleInfoKHR: return "ExportFenceWin32HandleInfoKHR"; + case StructureType::eFenceGetWin32HandleInfoKHR: return "FenceGetWin32HandleInfoKHR"; + case StructureType::eImportFenceFdInfoKHR: return "ImportFenceFdInfoKHR"; + case StructureType::eFenceGetFdInfoKHR: return "FenceGetFdInfoKHR"; + case StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR: + return "PhysicalDevicePerformanceQueryFeaturesKHR"; + case StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR: + return "PhysicalDevicePerformanceQueryPropertiesKHR"; + case StructureType::eQueryPoolPerformanceCreateInfoKHR: return "QueryPoolPerformanceCreateInfoKHR"; + case StructureType::ePerformanceQuerySubmitInfoKHR: return "PerformanceQuerySubmitInfoKHR"; + case StructureType::eAcquireProfilingLockInfoKHR: return "AcquireProfilingLockInfoKHR"; + case StructureType::ePerformanceCounterKHR: return "PerformanceCounterKHR"; + case StructureType::ePerformanceCounterDescriptionKHR: return "PerformanceCounterDescriptionKHR"; + case StructureType::ePhysicalDeviceSurfaceInfo2KHR: return "PhysicalDeviceSurfaceInfo2KHR"; + case StructureType::eSurfaceCapabilities2KHR: return "SurfaceCapabilities2KHR"; + case StructureType::eSurfaceFormat2KHR: return "SurfaceFormat2KHR"; + case StructureType::eDisplayProperties2KHR: return "DisplayProperties2KHR"; + case StructureType::eDisplayPlaneProperties2KHR: return "DisplayPlaneProperties2KHR"; + case StructureType::eDisplayModeProperties2KHR: return "DisplayModeProperties2KHR"; + case StructureType::eDisplayPlaneInfo2KHR: return "DisplayPlaneInfo2KHR"; + case StructureType::eDisplayPlaneCapabilities2KHR: return "DisplayPlaneCapabilities2KHR"; + case StructureType::eIosSurfaceCreateInfoMVK: return "IosSurfaceCreateInfoMVK"; + case StructureType::eMacosSurfaceCreateInfoMVK: return "MacosSurfaceCreateInfoMVK"; + case StructureType::eDebugUtilsObjectNameInfoEXT: return "DebugUtilsObjectNameInfoEXT"; + case StructureType::eDebugUtilsObjectTagInfoEXT: return "DebugUtilsObjectTagInfoEXT"; + case StructureType::eDebugUtilsLabelEXT: return "DebugUtilsLabelEXT"; + case StructureType::eDebugUtilsMessengerCallbackDataEXT: return "DebugUtilsMessengerCallbackDataEXT"; + case StructureType::eDebugUtilsMessengerCreateInfoEXT: return "DebugUtilsMessengerCreateInfoEXT"; + case StructureType::eAndroidHardwareBufferUsageANDROID: return "AndroidHardwareBufferUsageANDROID"; + case StructureType::eAndroidHardwareBufferPropertiesANDROID: return "AndroidHardwareBufferPropertiesANDROID"; + case StructureType::eAndroidHardwareBufferFormatPropertiesANDROID: + return "AndroidHardwareBufferFormatPropertiesANDROID"; + case StructureType::eImportAndroidHardwareBufferInfoANDROID: return "ImportAndroidHardwareBufferInfoANDROID"; + case StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID: + return "MemoryGetAndroidHardwareBufferInfoANDROID"; + case StructureType::eExternalFormatANDROID: return "ExternalFormatANDROID"; + case StructureType::ePhysicalDeviceInlineUniformBlockFeaturesEXT: + return "PhysicalDeviceInlineUniformBlockFeaturesEXT"; + case StructureType::ePhysicalDeviceInlineUniformBlockPropertiesEXT: + return "PhysicalDeviceInlineUniformBlockPropertiesEXT"; + case StructureType::eWriteDescriptorSetInlineUniformBlockEXT: return "WriteDescriptorSetInlineUniformBlockEXT"; + case StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT: + return "DescriptorPoolInlineUniformBlockCreateInfoEXT"; + case StructureType::eSampleLocationsInfoEXT: return "SampleLocationsInfoEXT"; + case StructureType::eRenderPassSampleLocationsBeginInfoEXT: return "RenderPassSampleLocationsBeginInfoEXT"; + case StructureType::ePipelineSampleLocationsStateCreateInfoEXT: + return "PipelineSampleLocationsStateCreateInfoEXT"; + case StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT: + return "PhysicalDeviceSampleLocationsPropertiesEXT"; + case StructureType::eMultisamplePropertiesEXT: return "MultisamplePropertiesEXT"; + case StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT: + return "PhysicalDeviceBlendOperationAdvancedFeaturesEXT"; + case StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT: + return "PhysicalDeviceBlendOperationAdvancedPropertiesEXT"; + case StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT: + return "PipelineColorBlendAdvancedStateCreateInfoEXT"; + case StructureType::ePipelineCoverageToColorStateCreateInfoNV: return "PipelineCoverageToColorStateCreateInfoNV"; + case StructureType::eBindAccelerationStructureMemoryInfoKHR: return "BindAccelerationStructureMemoryInfoKHR"; + case StructureType::eWriteDescriptorSetAccelerationStructureKHR: + return "WriteDescriptorSetAccelerationStructureKHR"; + case StructureType::eAccelerationStructureBuildGeometryInfoKHR: + return "AccelerationStructureBuildGeometryInfoKHR"; + case StructureType::eAccelerationStructureCreateGeometryTypeInfoKHR: + return "AccelerationStructureCreateGeometryTypeInfoKHR"; + case StructureType::eAccelerationStructureDeviceAddressInfoKHR: + return "AccelerationStructureDeviceAddressInfoKHR"; + case StructureType::eAccelerationStructureGeometryAabbsDataKHR: + return "AccelerationStructureGeometryAabbsDataKHR"; + case StructureType::eAccelerationStructureGeometryInstancesDataKHR: + return "AccelerationStructureGeometryInstancesDataKHR"; + case StructureType::eAccelerationStructureGeometryTrianglesDataKHR: + return "AccelerationStructureGeometryTrianglesDataKHR"; + case StructureType::eAccelerationStructureGeometryKHR: return "AccelerationStructureGeometryKHR"; + case StructureType::eAccelerationStructureMemoryRequirementsInfoKHR: + return "AccelerationStructureMemoryRequirementsInfoKHR"; + case StructureType::eAccelerationStructureVersionKHR: return "AccelerationStructureVersionKHR"; + case StructureType::eCopyAccelerationStructureInfoKHR: return "CopyAccelerationStructureInfoKHR"; + case StructureType::eCopyAccelerationStructureToMemoryInfoKHR: return "CopyAccelerationStructureToMemoryInfoKHR"; + case StructureType::eCopyMemoryToAccelerationStructureInfoKHR: return "CopyMemoryToAccelerationStructureInfoKHR"; + case StructureType::ePhysicalDeviceRayTracingFeaturesKHR: return "PhysicalDeviceRayTracingFeaturesKHR"; + case StructureType::ePhysicalDeviceRayTracingPropertiesKHR: return "PhysicalDeviceRayTracingPropertiesKHR"; + case StructureType::eRayTracingPipelineCreateInfoKHR: return "RayTracingPipelineCreateInfoKHR"; + case StructureType::eRayTracingShaderGroupCreateInfoKHR: return "RayTracingShaderGroupCreateInfoKHR"; + case StructureType::eAccelerationStructureCreateInfoKHR: return "AccelerationStructureCreateInfoKHR"; + case StructureType::eRayTracingPipelineInterfaceCreateInfoKHR: return "RayTracingPipelineInterfaceCreateInfoKHR"; + case StructureType::ePipelineCoverageModulationStateCreateInfoNV: + return "PipelineCoverageModulationStateCreateInfoNV"; + case StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV: return "PhysicalDeviceShaderSmBuiltinsFeaturesNV"; + case StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV: + return "PhysicalDeviceShaderSmBuiltinsPropertiesNV"; + case StructureType::eDrmFormatModifierPropertiesListEXT: return "DrmFormatModifierPropertiesListEXT"; + case StructureType::eDrmFormatModifierPropertiesEXT: return "DrmFormatModifierPropertiesEXT"; + case StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT: + return "PhysicalDeviceImageDrmFormatModifierInfoEXT"; + case StructureType::eImageDrmFormatModifierListCreateInfoEXT: return "ImageDrmFormatModifierListCreateInfoEXT"; + case StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT: + return "ImageDrmFormatModifierExplicitCreateInfoEXT"; + case StructureType::eImageDrmFormatModifierPropertiesEXT: return "ImageDrmFormatModifierPropertiesEXT"; + case StructureType::eValidationCacheCreateInfoEXT: return "ValidationCacheCreateInfoEXT"; + case StructureType::eShaderModuleValidationCacheCreateInfoEXT: return "ShaderModuleValidationCacheCreateInfoEXT"; + case StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV: + return "PipelineViewportShadingRateImageStateCreateInfoNV"; + case StructureType::ePhysicalDeviceShadingRateImageFeaturesNV: return "PhysicalDeviceShadingRateImageFeaturesNV"; + case StructureType::ePhysicalDeviceShadingRateImagePropertiesNV: + return "PhysicalDeviceShadingRateImagePropertiesNV"; + case StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV: + return "PipelineViewportCoarseSampleOrderStateCreateInfoNV"; + case StructureType::eRayTracingPipelineCreateInfoNV: return "RayTracingPipelineCreateInfoNV"; + case StructureType::eAccelerationStructureCreateInfoNV: return "AccelerationStructureCreateInfoNV"; + case StructureType::eGeometryNV: return "GeometryNV"; + case StructureType::eGeometryTrianglesNV: return "GeometryTrianglesNV"; + case StructureType::eGeometryAabbNV: return "GeometryAabbNV"; + case StructureType::eAccelerationStructureMemoryRequirementsInfoNV: + return "AccelerationStructureMemoryRequirementsInfoNV"; + case StructureType::ePhysicalDeviceRayTracingPropertiesNV: return "PhysicalDeviceRayTracingPropertiesNV"; + case StructureType::eRayTracingShaderGroupCreateInfoNV: return "RayTracingShaderGroupCreateInfoNV"; + case StructureType::eAccelerationStructureInfoNV: return "AccelerationStructureInfoNV"; + case StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV: + return "PhysicalDeviceRepresentativeFragmentTestFeaturesNV"; + case StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV: + return "PipelineRepresentativeFragmentTestStateCreateInfoNV"; + case StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT: + return "PhysicalDeviceImageViewImageFormatInfoEXT"; + case StructureType::eFilterCubicImageViewImageFormatPropertiesEXT: + return "FilterCubicImageViewImageFormatPropertiesEXT"; + case StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT: return "DeviceQueueGlobalPriorityCreateInfoEXT"; + case StructureType::eImportMemoryHostPointerInfoEXT: return "ImportMemoryHostPointerInfoEXT"; + case StructureType::eMemoryHostPointerPropertiesEXT: return "MemoryHostPointerPropertiesEXT"; + case StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT: + return "PhysicalDeviceExternalMemoryHostPropertiesEXT"; + case StructureType::ePhysicalDeviceShaderClockFeaturesKHR: return "PhysicalDeviceShaderClockFeaturesKHR"; + case StructureType::ePipelineCompilerControlCreateInfoAMD: return "PipelineCompilerControlCreateInfoAMD"; + case StructureType::eCalibratedTimestampInfoEXT: return "CalibratedTimestampInfoEXT"; + case StructureType::ePhysicalDeviceShaderCorePropertiesAMD: return "PhysicalDeviceShaderCorePropertiesAMD"; + case StructureType::eDeviceMemoryOverallocationCreateInfoAMD: return "DeviceMemoryOverallocationCreateInfoAMD"; + case StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT: + return "PhysicalDeviceVertexAttributeDivisorPropertiesEXT"; + case StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT: + return "PipelineVertexInputDivisorStateCreateInfoEXT"; + case StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT: + return "PhysicalDeviceVertexAttributeDivisorFeaturesEXT"; + case StructureType::ePresentFrameTokenGGP: return "PresentFrameTokenGGP"; + case StructureType::ePipelineCreationFeedbackCreateInfoEXT: return "PipelineCreationFeedbackCreateInfoEXT"; + case StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV: + return "PhysicalDeviceComputeShaderDerivativesFeaturesNV"; + case StructureType::ePhysicalDeviceMeshShaderFeaturesNV: return "PhysicalDeviceMeshShaderFeaturesNV"; + case StructureType::ePhysicalDeviceMeshShaderPropertiesNV: return "PhysicalDeviceMeshShaderPropertiesNV"; + case StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV: + return "PhysicalDeviceFragmentShaderBarycentricFeaturesNV"; + case StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV: + return "PhysicalDeviceShaderImageFootprintFeaturesNV"; + case StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV: + return "PipelineViewportExclusiveScissorStateCreateInfoNV"; + case StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV: return "PhysicalDeviceExclusiveScissorFeaturesNV"; + case StructureType::eCheckpointDataNV: return "CheckpointDataNV"; + case StructureType::eQueueFamilyCheckpointPropertiesNV: return "QueueFamilyCheckpointPropertiesNV"; + case StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL: + return "PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL"; + case StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL: return "QueryPoolPerformanceQueryCreateInfoINTEL"; + case StructureType::eInitializePerformanceApiInfoINTEL: return "InitializePerformanceApiInfoINTEL"; + case StructureType::ePerformanceMarkerInfoINTEL: return "PerformanceMarkerInfoINTEL"; + case StructureType::ePerformanceStreamMarkerInfoINTEL: return "PerformanceStreamMarkerInfoINTEL"; + case StructureType::ePerformanceOverrideInfoINTEL: return "PerformanceOverrideInfoINTEL"; + case StructureType::ePerformanceConfigurationAcquireInfoINTEL: return "PerformanceConfigurationAcquireInfoINTEL"; + case StructureType::ePhysicalDevicePciBusInfoPropertiesEXT: return "PhysicalDevicePciBusInfoPropertiesEXT"; + case StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD: return "DisplayNativeHdrSurfaceCapabilitiesAMD"; + case StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD: return "SwapchainDisplayNativeHdrCreateInfoAMD"; + case StructureType::eImagepipeSurfaceCreateInfoFUCHSIA: return "ImagepipeSurfaceCreateInfoFUCHSIA"; + case StructureType::eMetalSurfaceCreateInfoEXT: return "MetalSurfaceCreateInfoEXT"; + case StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT: + return "PhysicalDeviceFragmentDensityMapFeaturesEXT"; + case StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT: + return "PhysicalDeviceFragmentDensityMapPropertiesEXT"; + case StructureType::eRenderPassFragmentDensityMapCreateInfoEXT: + return "RenderPassFragmentDensityMapCreateInfoEXT"; + case StructureType::ePhysicalDeviceSubgroupSizeControlPropertiesEXT: + return "PhysicalDeviceSubgroupSizeControlPropertiesEXT"; + case StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT: + return "PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT"; + case StructureType::ePhysicalDeviceSubgroupSizeControlFeaturesEXT: + return "PhysicalDeviceSubgroupSizeControlFeaturesEXT"; + case StructureType::ePhysicalDeviceShaderCoreProperties2AMD: return "PhysicalDeviceShaderCoreProperties2AMD"; + case StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD: return "PhysicalDeviceCoherentMemoryFeaturesAMD"; + case StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT: return "PhysicalDeviceMemoryBudgetPropertiesEXT"; + case StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT: return "PhysicalDeviceMemoryPriorityFeaturesEXT"; + case StructureType::eMemoryPriorityAllocateInfoEXT: return "MemoryPriorityAllocateInfoEXT"; + case StructureType::eSurfaceProtectedCapabilitiesKHR: return "SurfaceProtectedCapabilitiesKHR"; + case StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV: + return "PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV"; + case StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT: + return "PhysicalDeviceBufferDeviceAddressFeaturesEXT"; + case StructureType::eBufferDeviceAddressCreateInfoEXT: return "BufferDeviceAddressCreateInfoEXT"; + case StructureType::ePhysicalDeviceToolPropertiesEXT: return "PhysicalDeviceToolPropertiesEXT"; + case StructureType::eValidationFeaturesEXT: return "ValidationFeaturesEXT"; + case StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV: + return "PhysicalDeviceCooperativeMatrixFeaturesNV"; + case StructureType::eCooperativeMatrixPropertiesNV: return "CooperativeMatrixPropertiesNV"; + case StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV: + return "PhysicalDeviceCooperativeMatrixPropertiesNV"; + case StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV: + return "PhysicalDeviceCoverageReductionModeFeaturesNV"; + case StructureType::ePipelineCoverageReductionStateCreateInfoNV: + return "PipelineCoverageReductionStateCreateInfoNV"; + case StructureType::eFramebufferMixedSamplesCombinationNV: return "FramebufferMixedSamplesCombinationNV"; + case StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT: + return "PhysicalDeviceFragmentShaderInterlockFeaturesEXT"; + case StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT: + return "PhysicalDeviceYcbcrImageArraysFeaturesEXT"; + case StructureType::eSurfaceFullScreenExclusiveInfoEXT: return "SurfaceFullScreenExclusiveInfoEXT"; + case StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT: + return "SurfaceCapabilitiesFullScreenExclusiveEXT"; + case StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT: return "SurfaceFullScreenExclusiveWin32InfoEXT"; + case StructureType::eHeadlessSurfaceCreateInfoEXT: return "HeadlessSurfaceCreateInfoEXT"; + case StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT: + return "PhysicalDeviceLineRasterizationFeaturesEXT"; + case StructureType::ePipelineRasterizationLineStateCreateInfoEXT: + return "PipelineRasterizationLineStateCreateInfoEXT"; + case StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT: + return "PhysicalDeviceLineRasterizationPropertiesEXT"; + case StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT: return "PhysicalDeviceIndexTypeUint8FeaturesEXT"; + case StructureType::eDeferredOperationInfoKHR: return "DeferredOperationInfoKHR"; + case StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR: + return "PhysicalDevicePipelineExecutablePropertiesFeaturesKHR"; + case StructureType::ePipelineInfoKHR: return "PipelineInfoKHR"; + case StructureType::ePipelineExecutablePropertiesKHR: return "PipelineExecutablePropertiesKHR"; + case StructureType::ePipelineExecutableInfoKHR: return "PipelineExecutableInfoKHR"; + case StructureType::ePipelineExecutableStatisticKHR: return "PipelineExecutableStatisticKHR"; + case StructureType::ePipelineExecutableInternalRepresentationKHR: + return "PipelineExecutableInternalRepresentationKHR"; + case StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT: + return "PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT"; + case StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV: + return "PhysicalDeviceDeviceGeneratedCommandsPropertiesNV"; + case StructureType::eGraphicsShaderGroupCreateInfoNV: return "GraphicsShaderGroupCreateInfoNV"; + case StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV: return "GraphicsPipelineShaderGroupsCreateInfoNV"; + case StructureType::eIndirectCommandsLayoutTokenNV: return "IndirectCommandsLayoutTokenNV"; + case StructureType::eIndirectCommandsLayoutCreateInfoNV: return "IndirectCommandsLayoutCreateInfoNV"; + case StructureType::eGeneratedCommandsInfoNV: return "GeneratedCommandsInfoNV"; + case StructureType::eGeneratedCommandsMemoryRequirementsInfoNV: + return "GeneratedCommandsMemoryRequirementsInfoNV"; + case StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV: + return "PhysicalDeviceDeviceGeneratedCommandsFeaturesNV"; + case StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT: + return "PhysicalDeviceTexelBufferAlignmentFeaturesEXT"; + case StructureType::ePhysicalDeviceTexelBufferAlignmentPropertiesEXT: + return "PhysicalDeviceTexelBufferAlignmentPropertiesEXT"; + case StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM: + return "CommandBufferInheritanceRenderPassTransformInfoQCOM"; + case StructureType::eRenderPassTransformBeginInfoQCOM: return "RenderPassTransformBeginInfoQCOM"; + case StructureType::ePipelineLibraryCreateInfoKHR: return "PipelineLibraryCreateInfoKHR"; + case StructureType::ePhysicalDevicePipelineCreationCacheControlFeaturesEXT: + return "PhysicalDevicePipelineCreationCacheControlFeaturesEXT"; + case StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV: + return "PhysicalDeviceDiagnosticsConfigFeaturesNV"; + case StructureType::eDeviceDiagnosticsConfigCreateInfoNV: return "DeviceDiagnosticsConfigCreateInfoNV"; default: return "invalid"; } } enum class SubgroupFeatureFlagBits : VkSubgroupFeatureFlags { - eBasic = VK_SUBGROUP_FEATURE_BASIC_BIT, - eVote = VK_SUBGROUP_FEATURE_VOTE_BIT, - eArithmetic = VK_SUBGROUP_FEATURE_ARITHMETIC_BIT, - eBallot = VK_SUBGROUP_FEATURE_BALLOT_BIT, - eShuffle = VK_SUBGROUP_FEATURE_SHUFFLE_BIT, + eBasic = VK_SUBGROUP_FEATURE_BASIC_BIT, + eVote = VK_SUBGROUP_FEATURE_VOTE_BIT, + eArithmetic = VK_SUBGROUP_FEATURE_ARITHMETIC_BIT, + eBallot = VK_SUBGROUP_FEATURE_BALLOT_BIT, + eShuffle = VK_SUBGROUP_FEATURE_SHUFFLE_BIT, eShuffleRelative = VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT, - eClustered = VK_SUBGROUP_FEATURE_CLUSTERED_BIT, - eQuad = VK_SUBGROUP_FEATURE_QUAD_BIT, - ePartitionedNV = VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV + eClustered = VK_SUBGROUP_FEATURE_CLUSTERED_BIT, + eQuad = VK_SUBGROUP_FEATURE_QUAD_BIT, + ePartitionedNV = VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV }; VULKAN_HPP_INLINE std::string to_string( SubgroupFeatureFlagBits value ) { switch ( value ) { - case SubgroupFeatureFlagBits::eBasic : return "Basic"; - case SubgroupFeatureFlagBits::eVote : return "Vote"; - case SubgroupFeatureFlagBits::eArithmetic : return "Arithmetic"; - case SubgroupFeatureFlagBits::eBallot : return "Ballot"; - case SubgroupFeatureFlagBits::eShuffle : return "Shuffle"; - case SubgroupFeatureFlagBits::eShuffleRelative : return "ShuffleRelative"; - case SubgroupFeatureFlagBits::eClustered : return "Clustered"; - case SubgroupFeatureFlagBits::eQuad : return "Quad"; - case SubgroupFeatureFlagBits::ePartitionedNV : return "PartitionedNV"; + case SubgroupFeatureFlagBits::eBasic: return "Basic"; + case SubgroupFeatureFlagBits::eVote: return "Vote"; + case SubgroupFeatureFlagBits::eArithmetic: return "Arithmetic"; + case SubgroupFeatureFlagBits::eBallot: return "Ballot"; + case SubgroupFeatureFlagBits::eShuffle: return "Shuffle"; + case SubgroupFeatureFlagBits::eShuffleRelative: return "ShuffleRelative"; + case SubgroupFeatureFlagBits::eClustered: return "Clustered"; + case SubgroupFeatureFlagBits::eQuad: return "Quad"; + case SubgroupFeatureFlagBits::ePartitionedNV: return "PartitionedNV"; default: return "invalid"; } } enum class SubpassContents { - eInline = VK_SUBPASS_CONTENTS_INLINE, + eInline = VK_SUBPASS_CONTENTS_INLINE, eSecondaryCommandBuffers = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS }; @@ -8555,15 +10059,15 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case SubpassContents::eInline : return "Inline"; - case SubpassContents::eSecondaryCommandBuffers : return "SecondaryCommandBuffers"; + case SubpassContents::eInline: return "Inline"; + case SubpassContents::eSecondaryCommandBuffers: return "SecondaryCommandBuffers"; default: return "invalid"; } } enum class SubpassDescriptionFlagBits : VkSubpassDescriptionFlags { - ePerViewAttributesNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX, + ePerViewAttributesNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX, ePerViewPositionXOnlyNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX }; @@ -8571,8 +10075,8 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case SubpassDescriptionFlagBits::ePerViewAttributesNVX : return "PerViewAttributesNVX"; - case SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX : return "PerViewPositionXOnlyNVX"; + case SubpassDescriptionFlagBits::ePerViewAttributesNVX: return "PerViewAttributesNVX"; + case SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX: return "PerViewPositionXOnlyNVX"; default: return "invalid"; } } @@ -8586,37 +10090,37 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case SurfaceCounterFlagBitsEXT::eVblank : return "Vblank"; + case SurfaceCounterFlagBitsEXT::eVblank: return "Vblank"; default: return "invalid"; } } enum class SurfaceTransformFlagBitsKHR : VkSurfaceTransformFlagsKHR { - eIdentity = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR, - eRotate90 = VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR, - eRotate180 = VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR, - eRotate270 = VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR, - eHorizontalMirror = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR, - eHorizontalMirrorRotate90 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR, + eIdentity = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR, + eRotate90 = VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR, + eRotate180 = VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR, + eRotate270 = VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR, + eHorizontalMirror = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR, + eHorizontalMirrorRotate90 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR, eHorizontalMirrorRotate180 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR, eHorizontalMirrorRotate270 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR, - eInherit = VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR + eInherit = VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR }; VULKAN_HPP_INLINE std::string to_string( SurfaceTransformFlagBitsKHR value ) { switch ( value ) { - case SurfaceTransformFlagBitsKHR::eIdentity : return "Identity"; - case SurfaceTransformFlagBitsKHR::eRotate90 : return "Rotate90"; - case SurfaceTransformFlagBitsKHR::eRotate180 : return "Rotate180"; - case SurfaceTransformFlagBitsKHR::eRotate270 : return "Rotate270"; - case SurfaceTransformFlagBitsKHR::eHorizontalMirror : return "HorizontalMirror"; - case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90 : return "HorizontalMirrorRotate90"; - case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180 : return "HorizontalMirrorRotate180"; - case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270 : return "HorizontalMirrorRotate270"; - case SurfaceTransformFlagBitsKHR::eInherit : return "Inherit"; + case SurfaceTransformFlagBitsKHR::eIdentity: return "Identity"; + case SurfaceTransformFlagBitsKHR::eRotate90: return "Rotate90"; + case SurfaceTransformFlagBitsKHR::eRotate180: return "Rotate180"; + case SurfaceTransformFlagBitsKHR::eRotate270: return "Rotate270"; + case SurfaceTransformFlagBitsKHR::eHorizontalMirror: return "HorizontalMirror"; + case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90: return "HorizontalMirrorRotate90"; + case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180: return "HorizontalMirrorRotate180"; + case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270: return "HorizontalMirrorRotate270"; + case SurfaceTransformFlagBitsKHR::eInherit: return "Inherit"; default: return "invalid"; } } @@ -8624,27 +10128,27 @@ namespace VULKAN_HPP_NAMESPACE enum class SwapchainCreateFlagBitsKHR : VkSwapchainCreateFlagsKHR { eSplitInstanceBindRegions = VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR, - eProtected = VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR, - eMutableFormat = VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR + eProtected = VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR, + eMutableFormat = VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR }; VULKAN_HPP_INLINE std::string to_string( SwapchainCreateFlagBitsKHR value ) { switch ( value ) { - case SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions : return "SplitInstanceBindRegions"; - case SwapchainCreateFlagBitsKHR::eProtected : return "Protected"; - case SwapchainCreateFlagBitsKHR::eMutableFormat : return "MutableFormat"; + case SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions: return "SplitInstanceBindRegions"; + case SwapchainCreateFlagBitsKHR::eProtected: return "Protected"; + case SwapchainCreateFlagBitsKHR::eMutableFormat: return "MutableFormat"; default: return "invalid"; } } enum class SystemAllocationScope { - eCommand = VK_SYSTEM_ALLOCATION_SCOPE_COMMAND, - eObject = VK_SYSTEM_ALLOCATION_SCOPE_OBJECT, - eCache = VK_SYSTEM_ALLOCATION_SCOPE_CACHE, - eDevice = VK_SYSTEM_ALLOCATION_SCOPE_DEVICE, + eCommand = VK_SYSTEM_ALLOCATION_SCOPE_COMMAND, + eObject = VK_SYSTEM_ALLOCATION_SCOPE_OBJECT, + eCache = VK_SYSTEM_ALLOCATION_SCOPE_CACHE, + eDevice = VK_SYSTEM_ALLOCATION_SCOPE_DEVICE, eInstance = VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE }; @@ -8652,11 +10156,11 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case SystemAllocationScope::eCommand : return "Command"; - case SystemAllocationScope::eObject : return "Object"; - case SystemAllocationScope::eCache : return "Cache"; - case SystemAllocationScope::eDevice : return "Device"; - case SystemAllocationScope::eInstance : return "Instance"; + case SystemAllocationScope::eCommand: return "Command"; + case SystemAllocationScope::eObject: return "Object"; + case SystemAllocationScope::eCache: return "Cache"; + case SystemAllocationScope::eDevice: return "Device"; + case SystemAllocationScope::eInstance: return "Instance"; default: return "invalid"; } } @@ -8672,17 +10176,17 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case TessellationDomainOrigin::eUpperLeft : return "UpperLeft"; - case TessellationDomainOrigin::eLowerLeft : return "LowerLeft"; + case TessellationDomainOrigin::eUpperLeft: return "UpperLeft"; + case TessellationDomainOrigin::eLowerLeft: return "LowerLeft"; default: return "invalid"; } } enum class TimeDomainEXT { - eDevice = VK_TIME_DOMAIN_DEVICE_EXT, - eClockMonotonic = VK_TIME_DOMAIN_CLOCK_MONOTONIC_EXT, - eClockMonotonicRaw = VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_EXT, + eDevice = VK_TIME_DOMAIN_DEVICE_EXT, + eClockMonotonic = VK_TIME_DOMAIN_CLOCK_MONOTONIC_EXT, + eClockMonotonicRaw = VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_EXT, eQueryPerformanceCounter = VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT }; @@ -8690,36 +10194,36 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case TimeDomainEXT::eDevice : return "Device"; - case TimeDomainEXT::eClockMonotonic : return "ClockMonotonic"; - case TimeDomainEXT::eClockMonotonicRaw : return "ClockMonotonicRaw"; - case TimeDomainEXT::eQueryPerformanceCounter : return "QueryPerformanceCounter"; + case TimeDomainEXT::eDevice: return "Device"; + case TimeDomainEXT::eClockMonotonic: return "ClockMonotonic"; + case TimeDomainEXT::eClockMonotonicRaw: return "ClockMonotonicRaw"; + case TimeDomainEXT::eQueryPerformanceCounter: return "QueryPerformanceCounter"; default: return "invalid"; } } enum class ToolPurposeFlagBitsEXT : VkToolPurposeFlagsEXT { - eValidation = VK_TOOL_PURPOSE_VALIDATION_BIT_EXT, - eProfiling = VK_TOOL_PURPOSE_PROFILING_BIT_EXT, - eTracing = VK_TOOL_PURPOSE_TRACING_BIT_EXT, + eValidation = VK_TOOL_PURPOSE_VALIDATION_BIT_EXT, + eProfiling = VK_TOOL_PURPOSE_PROFILING_BIT_EXT, + eTracing = VK_TOOL_PURPOSE_TRACING_BIT_EXT, eAdditionalFeatures = VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT_EXT, - eModifyingFeatures = VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT_EXT, - eDebugReporting = VK_TOOL_PURPOSE_DEBUG_REPORTING_BIT_EXT, - eDebugMarkers = VK_TOOL_PURPOSE_DEBUG_MARKERS_BIT_EXT + eModifyingFeatures = VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT_EXT, + eDebugReporting = VK_TOOL_PURPOSE_DEBUG_REPORTING_BIT_EXT, + eDebugMarkers = VK_TOOL_PURPOSE_DEBUG_MARKERS_BIT_EXT }; VULKAN_HPP_INLINE std::string to_string( ToolPurposeFlagBitsEXT value ) { switch ( value ) { - case ToolPurposeFlagBitsEXT::eValidation : return "Validation"; - case ToolPurposeFlagBitsEXT::eProfiling : return "Profiling"; - case ToolPurposeFlagBitsEXT::eTracing : return "Tracing"; - case ToolPurposeFlagBitsEXT::eAdditionalFeatures : return "AdditionalFeatures"; - case ToolPurposeFlagBitsEXT::eModifyingFeatures : return "ModifyingFeatures"; - case ToolPurposeFlagBitsEXT::eDebugReporting : return "DebugReporting"; - case ToolPurposeFlagBitsEXT::eDebugMarkers : return "DebugMarkers"; + case ToolPurposeFlagBitsEXT::eValidation: return "Validation"; + case ToolPurposeFlagBitsEXT::eProfiling: return "Profiling"; + case ToolPurposeFlagBitsEXT::eTracing: return "Tracing"; + case ToolPurposeFlagBitsEXT::eAdditionalFeatures: return "AdditionalFeatures"; + case ToolPurposeFlagBitsEXT::eModifyingFeatures: return "ModifyingFeatures"; + case ToolPurposeFlagBitsEXT::eDebugReporting: return "DebugReporting"; + case ToolPurposeFlagBitsEXT::eDebugMarkers: return "DebugMarkers"; default: return "invalid"; } } @@ -8733,14 +10237,14 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case ValidationCacheHeaderVersionEXT::eOne : return "One"; + case ValidationCacheHeaderVersionEXT::eOne: return "One"; default: return "invalid"; } } enum class ValidationCheckEXT { - eAll = VK_VALIDATION_CHECK_ALL_EXT, + eAll = VK_VALIDATION_CHECK_ALL_EXT, eShaders = VK_VALIDATION_CHECK_SHADERS_EXT }; @@ -8748,63 +10252,63 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case ValidationCheckEXT::eAll : return "All"; - case ValidationCheckEXT::eShaders : return "Shaders"; + case ValidationCheckEXT::eAll: return "All"; + case ValidationCheckEXT::eShaders: return "Shaders"; default: return "invalid"; } } enum class ValidationFeatureDisableEXT { - eAll = VK_VALIDATION_FEATURE_DISABLE_ALL_EXT, - eShaders = VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT, - eThreadSafety = VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT, - eApiParameters = VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT, + eAll = VK_VALIDATION_FEATURE_DISABLE_ALL_EXT, + eShaders = VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT, + eThreadSafety = VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT, + eApiParameters = VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT, eObjectLifetimes = VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT, - eCoreChecks = VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT, - eUniqueHandles = VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT + eCoreChecks = VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT, + eUniqueHandles = VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT }; VULKAN_HPP_INLINE std::string to_string( ValidationFeatureDisableEXT value ) { switch ( value ) { - case ValidationFeatureDisableEXT::eAll : return "All"; - case ValidationFeatureDisableEXT::eShaders : return "Shaders"; - case ValidationFeatureDisableEXT::eThreadSafety : return "ThreadSafety"; - case ValidationFeatureDisableEXT::eApiParameters : return "ApiParameters"; - case ValidationFeatureDisableEXT::eObjectLifetimes : return "ObjectLifetimes"; - case ValidationFeatureDisableEXT::eCoreChecks : return "CoreChecks"; - case ValidationFeatureDisableEXT::eUniqueHandles : return "UniqueHandles"; + case ValidationFeatureDisableEXT::eAll: return "All"; + case ValidationFeatureDisableEXT::eShaders: return "Shaders"; + case ValidationFeatureDisableEXT::eThreadSafety: return "ThreadSafety"; + case ValidationFeatureDisableEXT::eApiParameters: return "ApiParameters"; + case ValidationFeatureDisableEXT::eObjectLifetimes: return "ObjectLifetimes"; + case ValidationFeatureDisableEXT::eCoreChecks: return "CoreChecks"; + case ValidationFeatureDisableEXT::eUniqueHandles: return "UniqueHandles"; default: return "invalid"; } } enum class ValidationFeatureEnableEXT { - eGpuAssisted = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT, + eGpuAssisted = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT, eGpuAssistedReserveBindingSlot = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT, - eBestPractices = VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT, - eDebugPrintf = VK_VALIDATION_FEATURE_ENABLE_DEBUG_PRINTF_EXT + eBestPractices = VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT, + eDebugPrintf = VK_VALIDATION_FEATURE_ENABLE_DEBUG_PRINTF_EXT }; VULKAN_HPP_INLINE std::string to_string( ValidationFeatureEnableEXT value ) { switch ( value ) { - case ValidationFeatureEnableEXT::eGpuAssisted : return "GpuAssisted"; - case ValidationFeatureEnableEXT::eGpuAssistedReserveBindingSlot : return "GpuAssistedReserveBindingSlot"; - case ValidationFeatureEnableEXT::eBestPractices : return "BestPractices"; - case ValidationFeatureEnableEXT::eDebugPrintf : return "DebugPrintf"; + case ValidationFeatureEnableEXT::eGpuAssisted: return "GpuAssisted"; + case ValidationFeatureEnableEXT::eGpuAssistedReserveBindingSlot: return "GpuAssistedReserveBindingSlot"; + case ValidationFeatureEnableEXT::eBestPractices: return "BestPractices"; + case ValidationFeatureEnableEXT::eDebugPrintf: return "DebugPrintf"; default: return "invalid"; } } enum class VendorId { - eVIV = VK_VENDOR_ID_VIV, - eVSI = VK_VENDOR_ID_VSI, - eKazan = VK_VENDOR_ID_KAZAN, + eVIV = VK_VENDOR_ID_VIV, + eVSI = VK_VENDOR_ID_VSI, + eKazan = VK_VENDOR_ID_KAZAN, eCodeplay = VK_VENDOR_ID_CODEPLAY }; @@ -8812,17 +10316,17 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case VendorId::eVIV : return "VIV"; - case VendorId::eVSI : return "VSI"; - case VendorId::eKazan : return "Kazan"; - case VendorId::eCodeplay : return "Codeplay"; + case VendorId::eVIV: return "VIV"; + case VendorId::eVSI: return "VSI"; + case VendorId::eKazan: return "Kazan"; + case VendorId::eCodeplay: return "Codeplay"; default: return "invalid"; } } enum class VertexInputRate { - eVertex = VK_VERTEX_INPUT_RATE_VERTEX, + eVertex = VK_VERTEX_INPUT_RATE_VERTEX, eInstance = VK_VERTEX_INPUT_RATE_INSTANCE }; @@ -8830,8 +10334,8 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case VertexInputRate::eVertex : return "Vertex"; - case VertexInputRate::eInstance : return "Instance"; + case VertexInputRate::eVertex: return "Vertex"; + case VertexInputRate::eInstance: return "Instance"; default: return "invalid"; } } @@ -8852,44 +10356,64 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case ViewportCoordinateSwizzleNV::ePositiveX : return "PositiveX"; - case ViewportCoordinateSwizzleNV::eNegativeX : return "NegativeX"; - case ViewportCoordinateSwizzleNV::ePositiveY : return "PositiveY"; - case ViewportCoordinateSwizzleNV::eNegativeY : return "NegativeY"; - case ViewportCoordinateSwizzleNV::ePositiveZ : return "PositiveZ"; - case ViewportCoordinateSwizzleNV::eNegativeZ : return "NegativeZ"; - case ViewportCoordinateSwizzleNV::ePositiveW : return "PositiveW"; - case ViewportCoordinateSwizzleNV::eNegativeW : return "NegativeW"; + case ViewportCoordinateSwizzleNV::ePositiveX: return "PositiveX"; + case ViewportCoordinateSwizzleNV::eNegativeX: return "NegativeX"; + case ViewportCoordinateSwizzleNV::ePositiveY: return "PositiveY"; + case ViewportCoordinateSwizzleNV::eNegativeY: return "NegativeY"; + case ViewportCoordinateSwizzleNV::ePositiveZ: return "PositiveZ"; + case ViewportCoordinateSwizzleNV::eNegativeZ: return "NegativeZ"; + case ViewportCoordinateSwizzleNV::ePositiveW: return "PositiveW"; + case ViewportCoordinateSwizzleNV::eNegativeW: return "NegativeW"; default: return "invalid"; } } - template + template struct cpp_type - { - }; + {}; using AccessFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(AccessFlagBits::eIndirectCommandRead) | VkFlags(AccessFlagBits::eIndexRead) | VkFlags(AccessFlagBits::eVertexAttributeRead) | VkFlags(AccessFlagBits::eUniformRead) | VkFlags(AccessFlagBits::eInputAttachmentRead) | VkFlags(AccessFlagBits::eShaderRead) | VkFlags(AccessFlagBits::eShaderWrite) | VkFlags(AccessFlagBits::eColorAttachmentRead) | VkFlags(AccessFlagBits::eColorAttachmentWrite) | VkFlags(AccessFlagBits::eDepthStencilAttachmentRead) | VkFlags(AccessFlagBits::eDepthStencilAttachmentWrite) | VkFlags(AccessFlagBits::eTransferRead) | VkFlags(AccessFlagBits::eTransferWrite) | VkFlags(AccessFlagBits::eHostRead) | VkFlags(AccessFlagBits::eHostWrite) | VkFlags(AccessFlagBits::eMemoryRead) | VkFlags(AccessFlagBits::eMemoryWrite) | VkFlags(AccessFlagBits::eTransformFeedbackWriteEXT) | VkFlags(AccessFlagBits::eTransformFeedbackCounterReadEXT) | VkFlags(AccessFlagBits::eTransformFeedbackCounterWriteEXT) | VkFlags(AccessFlagBits::eConditionalRenderingReadEXT) | VkFlags(AccessFlagBits::eColorAttachmentReadNoncoherentEXT) | VkFlags(AccessFlagBits::eAccelerationStructureReadKHR) | VkFlags(AccessFlagBits::eAccelerationStructureWriteKHR) | VkFlags(AccessFlagBits::eShadingRateImageReadNV) | VkFlags(AccessFlagBits::eFragmentDensityMapReadEXT) | VkFlags(AccessFlagBits::eCommandPreprocessReadNV) | VkFlags(AccessFlagBits::eCommandPreprocessWriteNV) + allFlags = + VkFlags( AccessFlagBits::eIndirectCommandRead ) | VkFlags( AccessFlagBits::eIndexRead ) | + VkFlags( AccessFlagBits::eVertexAttributeRead ) | VkFlags( AccessFlagBits::eUniformRead ) | + VkFlags( AccessFlagBits::eInputAttachmentRead ) | VkFlags( AccessFlagBits::eShaderRead ) | + VkFlags( AccessFlagBits::eShaderWrite ) | VkFlags( AccessFlagBits::eColorAttachmentRead ) | + VkFlags( AccessFlagBits::eColorAttachmentWrite ) | VkFlags( AccessFlagBits::eDepthStencilAttachmentRead ) | + VkFlags( AccessFlagBits::eDepthStencilAttachmentWrite ) | VkFlags( AccessFlagBits::eTransferRead ) | + VkFlags( AccessFlagBits::eTransferWrite ) | VkFlags( AccessFlagBits::eHostRead ) | + VkFlags( AccessFlagBits::eHostWrite ) | VkFlags( AccessFlagBits::eMemoryRead ) | + VkFlags( AccessFlagBits::eMemoryWrite ) | VkFlags( AccessFlagBits::eTransformFeedbackWriteEXT ) | + VkFlags( AccessFlagBits::eTransformFeedbackCounterReadEXT ) | + VkFlags( AccessFlagBits::eTransformFeedbackCounterWriteEXT ) | + VkFlags( AccessFlagBits::eConditionalRenderingReadEXT ) | + VkFlags( AccessFlagBits::eColorAttachmentReadNoncoherentEXT ) | + VkFlags( AccessFlagBits::eAccelerationStructureReadKHR ) | + VkFlags( AccessFlagBits::eAccelerationStructureWriteKHR ) | VkFlags( AccessFlagBits::eShadingRateImageReadNV ) | + VkFlags( AccessFlagBits::eFragmentDensityMapReadEXT ) | VkFlags( AccessFlagBits::eCommandPreprocessReadNV ) | + VkFlags( AccessFlagBits::eCommandPreprocessWriteNV ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator|( AccessFlagBits bit0, AccessFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator|( AccessFlagBits bit0, + AccessFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return AccessFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator&( AccessFlagBits bit0, AccessFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator&( AccessFlagBits bit0, + AccessFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return AccessFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator^( AccessFlagBits bit0, AccessFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator^( AccessFlagBits bit0, + AccessFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return AccessFlags( bit0 ) ^ bit1; } @@ -8899,52 +10423,82 @@ namespace VULKAN_HPP_NAMESPACE return ~( AccessFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( AccessFlags value ) + VULKAN_HPP_INLINE std::string to_string( AccessFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & AccessFlagBits::eIndirectCommandRead ) result += "IndirectCommandRead | "; - if ( value & AccessFlagBits::eIndexRead ) result += "IndexRead | "; - if ( value & AccessFlagBits::eVertexAttributeRead ) result += "VertexAttributeRead | "; - if ( value & AccessFlagBits::eUniformRead ) result += "UniformRead | "; - if ( value & AccessFlagBits::eInputAttachmentRead ) result += "InputAttachmentRead | "; - if ( value & AccessFlagBits::eShaderRead ) result += "ShaderRead | "; - if ( value & AccessFlagBits::eShaderWrite ) result += "ShaderWrite | "; - if ( value & AccessFlagBits::eColorAttachmentRead ) result += "ColorAttachmentRead | "; - if ( value & AccessFlagBits::eColorAttachmentWrite ) result += "ColorAttachmentWrite | "; - if ( value & AccessFlagBits::eDepthStencilAttachmentRead ) result += "DepthStencilAttachmentRead | "; - if ( value & AccessFlagBits::eDepthStencilAttachmentWrite ) result += "DepthStencilAttachmentWrite | "; - if ( value & AccessFlagBits::eTransferRead ) result += "TransferRead | "; - if ( value & AccessFlagBits::eTransferWrite ) result += "TransferWrite | "; - if ( value & AccessFlagBits::eHostRead ) result += "HostRead | "; - if ( value & AccessFlagBits::eHostWrite ) result += "HostWrite | "; - if ( value & AccessFlagBits::eMemoryRead ) result += "MemoryRead | "; - if ( value & AccessFlagBits::eMemoryWrite ) result += "MemoryWrite | "; - if ( value & AccessFlagBits::eTransformFeedbackWriteEXT ) result += "TransformFeedbackWriteEXT | "; - if ( value & AccessFlagBits::eTransformFeedbackCounterReadEXT ) result += "TransformFeedbackCounterReadEXT | "; - if ( value & AccessFlagBits::eTransformFeedbackCounterWriteEXT ) result += "TransformFeedbackCounterWriteEXT | "; - if ( value & AccessFlagBits::eConditionalRenderingReadEXT ) result += "ConditionalRenderingReadEXT | "; - if ( value & AccessFlagBits::eColorAttachmentReadNoncoherentEXT ) result += "ColorAttachmentReadNoncoherentEXT | "; - if ( value & AccessFlagBits::eAccelerationStructureReadKHR ) result += "AccelerationStructureReadKHR | "; - if ( value & AccessFlagBits::eAccelerationStructureWriteKHR ) result += "AccelerationStructureWriteKHR | "; - if ( value & AccessFlagBits::eShadingRateImageReadNV ) result += "ShadingRateImageReadNV | "; - if ( value & AccessFlagBits::eFragmentDensityMapReadEXT ) result += "FragmentDensityMapReadEXT | "; - if ( value & AccessFlagBits::eCommandPreprocessReadNV ) result += "CommandPreprocessReadNV | "; - if ( value & AccessFlagBits::eCommandPreprocessWriteNV ) result += "CommandPreprocessWriteNV | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & AccessFlagBits::eIndirectCommandRead ) + result += "IndirectCommandRead | "; + if ( value & AccessFlagBits::eIndexRead ) + result += "IndexRead | "; + if ( value & AccessFlagBits::eVertexAttributeRead ) + result += "VertexAttributeRead | "; + if ( value & AccessFlagBits::eUniformRead ) + result += "UniformRead | "; + if ( value & AccessFlagBits::eInputAttachmentRead ) + result += "InputAttachmentRead | "; + if ( value & AccessFlagBits::eShaderRead ) + result += "ShaderRead | "; + if ( value & AccessFlagBits::eShaderWrite ) + result += "ShaderWrite | "; + if ( value & AccessFlagBits::eColorAttachmentRead ) + result += "ColorAttachmentRead | "; + if ( value & AccessFlagBits::eColorAttachmentWrite ) + result += "ColorAttachmentWrite | "; + if ( value & AccessFlagBits::eDepthStencilAttachmentRead ) + result += "DepthStencilAttachmentRead | "; + if ( value & AccessFlagBits::eDepthStencilAttachmentWrite ) + result += "DepthStencilAttachmentWrite | "; + if ( value & AccessFlagBits::eTransferRead ) + result += "TransferRead | "; + if ( value & AccessFlagBits::eTransferWrite ) + result += "TransferWrite | "; + if ( value & AccessFlagBits::eHostRead ) + result += "HostRead | "; + if ( value & AccessFlagBits::eHostWrite ) + result += "HostWrite | "; + if ( value & AccessFlagBits::eMemoryRead ) + result += "MemoryRead | "; + if ( value & AccessFlagBits::eMemoryWrite ) + result += "MemoryWrite | "; + if ( value & AccessFlagBits::eTransformFeedbackWriteEXT ) + result += "TransformFeedbackWriteEXT | "; + if ( value & AccessFlagBits::eTransformFeedbackCounterReadEXT ) + result += "TransformFeedbackCounterReadEXT | "; + if ( value & AccessFlagBits::eTransformFeedbackCounterWriteEXT ) + result += "TransformFeedbackCounterWriteEXT | "; + if ( value & AccessFlagBits::eConditionalRenderingReadEXT ) + result += "ConditionalRenderingReadEXT | "; + if ( value & AccessFlagBits::eColorAttachmentReadNoncoherentEXT ) + result += "ColorAttachmentReadNoncoherentEXT | "; + if ( value & AccessFlagBits::eAccelerationStructureReadKHR ) + result += "AccelerationStructureReadKHR | "; + if ( value & AccessFlagBits::eAccelerationStructureWriteKHR ) + result += "AccelerationStructureWriteKHR | "; + if ( value & AccessFlagBits::eShadingRateImageReadNV ) + result += "ShadingRateImageReadNV | "; + if ( value & AccessFlagBits::eFragmentDensityMapReadEXT ) + result += "FragmentDensityMapReadEXT | "; + if ( value & AccessFlagBits::eCommandPreprocessReadNV ) + result += "CommandPreprocessReadNV | "; + if ( value & AccessFlagBits::eCommandPreprocessWriteNV ) + result += "CommandPreprocessWriteNV | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using AcquireProfilingLockFlagsKHR = Flags; - VULKAN_HPP_INLINE std::string to_string( AcquireProfilingLockFlagsKHR ) + VULKAN_HPP_INLINE std::string to_string( AcquireProfilingLockFlagsKHR ) { return "{}"; } #ifdef VK_USE_PLATFORM_ANDROID_KHR enum class AndroidSurfaceCreateFlagBitsKHR : VkFlags - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( AndroidSurfaceCreateFlagBitsKHR ) { @@ -8953,7 +10507,7 @@ namespace VULKAN_HPP_NAMESPACE using AndroidSurfaceCreateFlagsKHR = Flags; - VULKAN_HPP_INLINE std::string to_string( AndroidSurfaceCreateFlagsKHR ) + VULKAN_HPP_INLINE std::string to_string( AndroidSurfaceCreateFlagsKHR ) { return "{}"; } @@ -8961,64 +10515,77 @@ namespace VULKAN_HPP_NAMESPACE using AttachmentDescriptionFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(AttachmentDescriptionFlagBits::eMayAlias) + allFlags = VkFlags( AttachmentDescriptionFlagBits::eMayAlias ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags operator|( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags + operator|( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return AttachmentDescriptionFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags operator&( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags + operator&( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return AttachmentDescriptionFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags operator^( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags + operator^( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return AttachmentDescriptionFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags operator~( AttachmentDescriptionFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags operator~( AttachmentDescriptionFlagBits bits ) + VULKAN_HPP_NOEXCEPT { return ~( AttachmentDescriptionFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( AttachmentDescriptionFlags value ) + VULKAN_HPP_INLINE std::string to_string( AttachmentDescriptionFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & AttachmentDescriptionFlagBits::eMayAlias ) result += "MayAlias | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & AttachmentDescriptionFlagBits::eMayAlias ) + result += "MayAlias | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using BufferCreateFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(BufferCreateFlagBits::eSparseBinding) | VkFlags(BufferCreateFlagBits::eSparseResidency) | VkFlags(BufferCreateFlagBits::eSparseAliased) | VkFlags(BufferCreateFlagBits::eProtected) | VkFlags(BufferCreateFlagBits::eDeviceAddressCaptureReplay) + allFlags = VkFlags( BufferCreateFlagBits::eSparseBinding ) | VkFlags( BufferCreateFlagBits::eSparseResidency ) | + VkFlags( BufferCreateFlagBits::eSparseAliased ) | VkFlags( BufferCreateFlagBits::eProtected ) | + VkFlags( BufferCreateFlagBits::eDeviceAddressCaptureReplay ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator|( BufferCreateFlagBits bit0, BufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator|( BufferCreateFlagBits bit0, + BufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return BufferCreateFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator&( BufferCreateFlagBits bit0, BufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator&( BufferCreateFlagBits bit0, + BufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return BufferCreateFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator^( BufferCreateFlagBits bit0, BufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator^( BufferCreateFlagBits bit0, + BufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return BufferCreateFlags( bit0 ) ^ bit1; } @@ -9028,40 +10595,58 @@ namespace VULKAN_HPP_NAMESPACE return ~( BufferCreateFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( BufferCreateFlags value ) + VULKAN_HPP_INLINE std::string to_string( BufferCreateFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & BufferCreateFlagBits::eSparseBinding ) result += "SparseBinding | "; - if ( value & BufferCreateFlagBits::eSparseResidency ) result += "SparseResidency | "; - if ( value & BufferCreateFlagBits::eSparseAliased ) result += "SparseAliased | "; - if ( value & BufferCreateFlagBits::eProtected ) result += "Protected | "; - if ( value & BufferCreateFlagBits::eDeviceAddressCaptureReplay ) result += "DeviceAddressCaptureReplay | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & BufferCreateFlagBits::eSparseBinding ) + result += "SparseBinding | "; + if ( value & BufferCreateFlagBits::eSparseResidency ) + result += "SparseResidency | "; + if ( value & BufferCreateFlagBits::eSparseAliased ) + result += "SparseAliased | "; + if ( value & BufferCreateFlagBits::eProtected ) + result += "Protected | "; + if ( value & BufferCreateFlagBits::eDeviceAddressCaptureReplay ) + result += "DeviceAddressCaptureReplay | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using BufferUsageFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(BufferUsageFlagBits::eTransferSrc) | VkFlags(BufferUsageFlagBits::eTransferDst) | VkFlags(BufferUsageFlagBits::eUniformTexelBuffer) | VkFlags(BufferUsageFlagBits::eStorageTexelBuffer) | VkFlags(BufferUsageFlagBits::eUniformBuffer) | VkFlags(BufferUsageFlagBits::eStorageBuffer) | VkFlags(BufferUsageFlagBits::eIndexBuffer) | VkFlags(BufferUsageFlagBits::eVertexBuffer) | VkFlags(BufferUsageFlagBits::eIndirectBuffer) | VkFlags(BufferUsageFlagBits::eShaderDeviceAddress) | VkFlags(BufferUsageFlagBits::eTransformFeedbackBufferEXT) | VkFlags(BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT) | VkFlags(BufferUsageFlagBits::eConditionalRenderingEXT) | VkFlags(BufferUsageFlagBits::eRayTracingKHR) + allFlags = + VkFlags( BufferUsageFlagBits::eTransferSrc ) | VkFlags( BufferUsageFlagBits::eTransferDst ) | + VkFlags( BufferUsageFlagBits::eUniformTexelBuffer ) | VkFlags( BufferUsageFlagBits::eStorageTexelBuffer ) | + VkFlags( BufferUsageFlagBits::eUniformBuffer ) | VkFlags( BufferUsageFlagBits::eStorageBuffer ) | + VkFlags( BufferUsageFlagBits::eIndexBuffer ) | VkFlags( BufferUsageFlagBits::eVertexBuffer ) | + VkFlags( BufferUsageFlagBits::eIndirectBuffer ) | VkFlags( BufferUsageFlagBits::eShaderDeviceAddress ) | + VkFlags( BufferUsageFlagBits::eTransformFeedbackBufferEXT ) | + VkFlags( BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT ) | + VkFlags( BufferUsageFlagBits::eConditionalRenderingEXT ) | VkFlags( BufferUsageFlagBits::eRayTracingKHR ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator|( BufferUsageFlagBits bit0, BufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator|( BufferUsageFlagBits bit0, + BufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return BufferUsageFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator&( BufferUsageFlagBits bit0, BufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator&( BufferUsageFlagBits bit0, + BufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return BufferUsageFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator^( BufferUsageFlagBits bit0, BufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator^( BufferUsageFlagBits bit0, + BufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return BufferUsageFlags( bit0 ) ^ bit1; } @@ -9071,284 +10656,362 @@ namespace VULKAN_HPP_NAMESPACE return ~( BufferUsageFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( BufferUsageFlags value ) + VULKAN_HPP_INLINE std::string to_string( BufferUsageFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & BufferUsageFlagBits::eTransferSrc ) result += "TransferSrc | "; - if ( value & BufferUsageFlagBits::eTransferDst ) result += "TransferDst | "; - if ( value & BufferUsageFlagBits::eUniformTexelBuffer ) result += "UniformTexelBuffer | "; - if ( value & BufferUsageFlagBits::eStorageTexelBuffer ) result += "StorageTexelBuffer | "; - if ( value & BufferUsageFlagBits::eUniformBuffer ) result += "UniformBuffer | "; - if ( value & BufferUsageFlagBits::eStorageBuffer ) result += "StorageBuffer | "; - if ( value & BufferUsageFlagBits::eIndexBuffer ) result += "IndexBuffer | "; - if ( value & BufferUsageFlagBits::eVertexBuffer ) result += "VertexBuffer | "; - if ( value & BufferUsageFlagBits::eIndirectBuffer ) result += "IndirectBuffer | "; - if ( value & BufferUsageFlagBits::eShaderDeviceAddress ) result += "ShaderDeviceAddress | "; - if ( value & BufferUsageFlagBits::eTransformFeedbackBufferEXT ) result += "TransformFeedbackBufferEXT | "; - if ( value & BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT ) result += "TransformFeedbackCounterBufferEXT | "; - if ( value & BufferUsageFlagBits::eConditionalRenderingEXT ) result += "ConditionalRenderingEXT | "; - if ( value & BufferUsageFlagBits::eRayTracingKHR ) result += "RayTracingKHR | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & BufferUsageFlagBits::eTransferSrc ) + result += "TransferSrc | "; + if ( value & BufferUsageFlagBits::eTransferDst ) + result += "TransferDst | "; + if ( value & BufferUsageFlagBits::eUniformTexelBuffer ) + result += "UniformTexelBuffer | "; + if ( value & BufferUsageFlagBits::eStorageTexelBuffer ) + result += "StorageTexelBuffer | "; + if ( value & BufferUsageFlagBits::eUniformBuffer ) + result += "UniformBuffer | "; + if ( value & BufferUsageFlagBits::eStorageBuffer ) + result += "StorageBuffer | "; + if ( value & BufferUsageFlagBits::eIndexBuffer ) + result += "IndexBuffer | "; + if ( value & BufferUsageFlagBits::eVertexBuffer ) + result += "VertexBuffer | "; + if ( value & BufferUsageFlagBits::eIndirectBuffer ) + result += "IndirectBuffer | "; + if ( value & BufferUsageFlagBits::eShaderDeviceAddress ) + result += "ShaderDeviceAddress | "; + if ( value & BufferUsageFlagBits::eTransformFeedbackBufferEXT ) + result += "TransformFeedbackBufferEXT | "; + if ( value & BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT ) + result += "TransformFeedbackCounterBufferEXT | "; + if ( value & BufferUsageFlagBits::eConditionalRenderingEXT ) + result += "ConditionalRenderingEXT | "; + if ( value & BufferUsageFlagBits::eRayTracingKHR ) + result += "RayTracingKHR | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using BufferViewCreateFlags = Flags; - VULKAN_HPP_INLINE std::string to_string( BufferViewCreateFlags ) + VULKAN_HPP_INLINE std::string to_string( BufferViewCreateFlags ) { return "{}"; } using BuildAccelerationStructureFlagsKHR = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(BuildAccelerationStructureFlagBitsKHR::eAllowUpdate) | VkFlags(BuildAccelerationStructureFlagBitsKHR::eAllowCompaction) | VkFlags(BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace) | VkFlags(BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild) | VkFlags(BuildAccelerationStructureFlagBitsKHR::eLowMemory) + allFlags = VkFlags( BuildAccelerationStructureFlagBitsKHR::eAllowUpdate ) | + VkFlags( BuildAccelerationStructureFlagBitsKHR::eAllowCompaction ) | + VkFlags( BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace ) | + VkFlags( BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild ) | + VkFlags( BuildAccelerationStructureFlagBitsKHR::eLowMemory ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator|( BuildAccelerationStructureFlagBitsKHR bit0, BuildAccelerationStructureFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator|( + BuildAccelerationStructureFlagBitsKHR bit0, BuildAccelerationStructureFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { return BuildAccelerationStructureFlagsKHR( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator&( BuildAccelerationStructureFlagBitsKHR bit0, BuildAccelerationStructureFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator&( + BuildAccelerationStructureFlagBitsKHR bit0, BuildAccelerationStructureFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { return BuildAccelerationStructureFlagsKHR( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator^( BuildAccelerationStructureFlagBitsKHR bit0, BuildAccelerationStructureFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator^( + BuildAccelerationStructureFlagBitsKHR bit0, BuildAccelerationStructureFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { return BuildAccelerationStructureFlagsKHR( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator~( BuildAccelerationStructureFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR + operator~( BuildAccelerationStructureFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT { return ~( BuildAccelerationStructureFlagsKHR( bits ) ); } using BuildAccelerationStructureFlagsNV = BuildAccelerationStructureFlagsKHR; - VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagsKHR value ) + VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagsKHR value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowUpdate ) result += "AllowUpdate | "; - if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowCompaction ) result += "AllowCompaction | "; - if ( value & BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace ) result += "PreferFastTrace | "; - if ( value & BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild ) result += "PreferFastBuild | "; - if ( value & BuildAccelerationStructureFlagBitsKHR::eLowMemory ) result += "LowMemory | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowUpdate ) + result += "AllowUpdate | "; + if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowCompaction ) + result += "AllowCompaction | "; + if ( value & BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace ) + result += "PreferFastTrace | "; + if ( value & BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild ) + result += "PreferFastBuild | "; + if ( value & BuildAccelerationStructureFlagBitsKHR::eLowMemory ) + result += "LowMemory | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using ColorComponentFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(ColorComponentFlagBits::eR) | VkFlags(ColorComponentFlagBits::eG) | VkFlags(ColorComponentFlagBits::eB) | VkFlags(ColorComponentFlagBits::eA) + allFlags = VkFlags( ColorComponentFlagBits::eR ) | VkFlags( ColorComponentFlagBits::eG ) | + VkFlags( ColorComponentFlagBits::eB ) | VkFlags( ColorComponentFlagBits::eA ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags operator|( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags + operator|( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ColorComponentFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags operator&( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags + operator&( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ColorComponentFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags operator^( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags + operator^( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ColorComponentFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags operator~( ColorComponentFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags operator~( ColorComponentFlagBits bits ) + VULKAN_HPP_NOEXCEPT { return ~( ColorComponentFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( ColorComponentFlags value ) + VULKAN_HPP_INLINE std::string to_string( ColorComponentFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & ColorComponentFlagBits::eR ) result += "R | "; - if ( value & ColorComponentFlagBits::eG ) result += "G | "; - if ( value & ColorComponentFlagBits::eB ) result += "B | "; - if ( value & ColorComponentFlagBits::eA ) result += "A | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & ColorComponentFlagBits::eR ) + result += "R | "; + if ( value & ColorComponentFlagBits::eG ) + result += "G | "; + if ( value & ColorComponentFlagBits::eB ) + result += "B | "; + if ( value & ColorComponentFlagBits::eA ) + result += "A | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using CommandBufferResetFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(CommandBufferResetFlagBits::eReleaseResources) + allFlags = VkFlags( CommandBufferResetFlagBits::eReleaseResources ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags operator|( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags + operator|( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return CommandBufferResetFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags operator&( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags + operator&( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return CommandBufferResetFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags operator^( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags + operator^( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return CommandBufferResetFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags operator~( CommandBufferResetFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags operator~( CommandBufferResetFlagBits bits ) + VULKAN_HPP_NOEXCEPT { return ~( CommandBufferResetFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( CommandBufferResetFlags value ) + VULKAN_HPP_INLINE std::string to_string( CommandBufferResetFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & CommandBufferResetFlagBits::eReleaseResources ) result += "ReleaseResources | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & CommandBufferResetFlagBits::eReleaseResources ) + result += "ReleaseResources | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using CommandBufferUsageFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(CommandBufferUsageFlagBits::eOneTimeSubmit) | VkFlags(CommandBufferUsageFlagBits::eRenderPassContinue) | VkFlags(CommandBufferUsageFlagBits::eSimultaneousUse) + allFlags = VkFlags( CommandBufferUsageFlagBits::eOneTimeSubmit ) | + VkFlags( CommandBufferUsageFlagBits::eRenderPassContinue ) | + VkFlags( CommandBufferUsageFlagBits::eSimultaneousUse ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags operator|( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags + operator|( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return CommandBufferUsageFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags operator&( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags + operator&( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return CommandBufferUsageFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags operator^( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags + operator^( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return CommandBufferUsageFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags operator~( CommandBufferUsageFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags operator~( CommandBufferUsageFlagBits bits ) + VULKAN_HPP_NOEXCEPT { return ~( CommandBufferUsageFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( CommandBufferUsageFlags value ) + VULKAN_HPP_INLINE std::string to_string( CommandBufferUsageFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & CommandBufferUsageFlagBits::eOneTimeSubmit ) result += "OneTimeSubmit | "; - if ( value & CommandBufferUsageFlagBits::eRenderPassContinue ) result += "RenderPassContinue | "; - if ( value & CommandBufferUsageFlagBits::eSimultaneousUse ) result += "SimultaneousUse | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & CommandBufferUsageFlagBits::eOneTimeSubmit ) + result += "OneTimeSubmit | "; + if ( value & CommandBufferUsageFlagBits::eRenderPassContinue ) + result += "RenderPassContinue | "; + if ( value & CommandBufferUsageFlagBits::eSimultaneousUse ) + result += "SimultaneousUse | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using CommandPoolCreateFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(CommandPoolCreateFlagBits::eTransient) | VkFlags(CommandPoolCreateFlagBits::eResetCommandBuffer) | VkFlags(CommandPoolCreateFlagBits::eProtected) + allFlags = VkFlags( CommandPoolCreateFlagBits::eTransient ) | + VkFlags( CommandPoolCreateFlagBits::eResetCommandBuffer ) | + VkFlags( CommandPoolCreateFlagBits::eProtected ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags operator|( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags + operator|( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return CommandPoolCreateFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags operator&( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags + operator&( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return CommandPoolCreateFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags operator^( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags + operator^( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return CommandPoolCreateFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags operator~( CommandPoolCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags operator~( CommandPoolCreateFlagBits bits ) + VULKAN_HPP_NOEXCEPT { return ~( CommandPoolCreateFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( CommandPoolCreateFlags value ) + VULKAN_HPP_INLINE std::string to_string( CommandPoolCreateFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & CommandPoolCreateFlagBits::eTransient ) result += "Transient | "; - if ( value & CommandPoolCreateFlagBits::eResetCommandBuffer ) result += "ResetCommandBuffer | "; - if ( value & CommandPoolCreateFlagBits::eProtected ) result += "Protected | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & CommandPoolCreateFlagBits::eTransient ) + result += "Transient | "; + if ( value & CommandPoolCreateFlagBits::eResetCommandBuffer ) + result += "ResetCommandBuffer | "; + if ( value & CommandPoolCreateFlagBits::eProtected ) + result += "Protected | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using CommandPoolResetFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(CommandPoolResetFlagBits::eReleaseResources) + allFlags = VkFlags( CommandPoolResetFlagBits::eReleaseResources ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags operator|( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags + operator|( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return CommandPoolResetFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags operator&( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags + operator&( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return CommandPoolResetFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags operator^( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags + operator^( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return CommandPoolResetFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags operator~( CommandPoolResetFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags operator~( CommandPoolResetFlagBits bits ) + VULKAN_HPP_NOEXCEPT { return ~( CommandPoolResetFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( CommandPoolResetFlags value ) + VULKAN_HPP_INLINE std::string to_string( CommandPoolResetFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & CommandPoolResetFlagBits::eReleaseResources ) result += "ReleaseResources | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & CommandPoolResetFlagBits::eReleaseResources ) + result += "ReleaseResources | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } enum class CommandPoolTrimFlagBits : VkFlags - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( CommandPoolTrimFlagBits ) { @@ -9359,113 +11022,136 @@ namespace VULKAN_HPP_NAMESPACE using CommandPoolTrimFlagsKHR = CommandPoolTrimFlags; - VULKAN_HPP_INLINE std::string to_string( CommandPoolTrimFlags ) + VULKAN_HPP_INLINE std::string to_string( CommandPoolTrimFlags ) { return "{}"; } using CompositeAlphaFlagsKHR = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(CompositeAlphaFlagBitsKHR::eOpaque) | VkFlags(CompositeAlphaFlagBitsKHR::ePreMultiplied) | VkFlags(CompositeAlphaFlagBitsKHR::ePostMultiplied) | VkFlags(CompositeAlphaFlagBitsKHR::eInherit) + allFlags = VkFlags( CompositeAlphaFlagBitsKHR::eOpaque ) | VkFlags( CompositeAlphaFlagBitsKHR::ePreMultiplied ) | + VkFlags( CompositeAlphaFlagBitsKHR::ePostMultiplied ) | VkFlags( CompositeAlphaFlagBitsKHR::eInherit ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR operator|( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR + operator|( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { return CompositeAlphaFlagsKHR( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR operator&( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR + operator&( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { return CompositeAlphaFlagsKHR( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR operator^( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR + operator^( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { return CompositeAlphaFlagsKHR( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR operator~( CompositeAlphaFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR operator~( CompositeAlphaFlagBitsKHR bits ) + VULKAN_HPP_NOEXCEPT { return ~( CompositeAlphaFlagsKHR( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( CompositeAlphaFlagsKHR value ) + VULKAN_HPP_INLINE std::string to_string( CompositeAlphaFlagsKHR value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & CompositeAlphaFlagBitsKHR::eOpaque ) result += "Opaque | "; - if ( value & CompositeAlphaFlagBitsKHR::ePreMultiplied ) result += "PreMultiplied | "; - if ( value & CompositeAlphaFlagBitsKHR::ePostMultiplied ) result += "PostMultiplied | "; - if ( value & CompositeAlphaFlagBitsKHR::eInherit ) result += "Inherit | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & CompositeAlphaFlagBitsKHR::eOpaque ) + result += "Opaque | "; + if ( value & CompositeAlphaFlagBitsKHR::ePreMultiplied ) + result += "PreMultiplied | "; + if ( value & CompositeAlphaFlagBitsKHR::ePostMultiplied ) + result += "PostMultiplied | "; + if ( value & CompositeAlphaFlagBitsKHR::eInherit ) + result += "Inherit | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using ConditionalRenderingFlagsEXT = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(ConditionalRenderingFlagBitsEXT::eInverted) + allFlags = VkFlags( ConditionalRenderingFlagBitsEXT::eInverted ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT operator|( ConditionalRenderingFlagBitsEXT bit0, ConditionalRenderingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT + operator|( ConditionalRenderingFlagBitsEXT bit0, ConditionalRenderingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT { return ConditionalRenderingFlagsEXT( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT operator&( ConditionalRenderingFlagBitsEXT bit0, ConditionalRenderingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT + operator&( ConditionalRenderingFlagBitsEXT bit0, ConditionalRenderingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT { return ConditionalRenderingFlagsEXT( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT operator^( ConditionalRenderingFlagBitsEXT bit0, ConditionalRenderingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT + operator^( ConditionalRenderingFlagBitsEXT bit0, ConditionalRenderingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT { return ConditionalRenderingFlagsEXT( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT operator~( ConditionalRenderingFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT operator~( ConditionalRenderingFlagBitsEXT bits ) + VULKAN_HPP_NOEXCEPT { return ~( ConditionalRenderingFlagsEXT( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( ConditionalRenderingFlagsEXT value ) + VULKAN_HPP_INLINE std::string to_string( ConditionalRenderingFlagsEXT value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & ConditionalRenderingFlagBitsEXT::eInverted ) result += "Inverted | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & ConditionalRenderingFlagBitsEXT::eInverted ) + result += "Inverted | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using CullModeFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(CullModeFlagBits::eNone) | VkFlags(CullModeFlagBits::eFront) | VkFlags(CullModeFlagBits::eBack) | VkFlags(CullModeFlagBits::eFrontAndBack) + allFlags = VkFlags( CullModeFlagBits::eNone ) | VkFlags( CullModeFlagBits::eFront ) | + VkFlags( CullModeFlagBits::eBack ) | VkFlags( CullModeFlagBits::eFrontAndBack ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator|( CullModeFlagBits bit0, CullModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator|( CullModeFlagBits bit0, + CullModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return CullModeFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator&( CullModeFlagBits bit0, CullModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator&( CullModeFlagBits bit0, + CullModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return CullModeFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator^( CullModeFlagBits bit0, CullModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator^( CullModeFlagBits bit0, + CullModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return CullModeFlags( bit0 ) ^ bit1; } @@ -9475,144 +11161,185 @@ namespace VULKAN_HPP_NAMESPACE return ~( CullModeFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( CullModeFlags value ) + VULKAN_HPP_INLINE std::string to_string( CullModeFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & CullModeFlagBits::eFront ) result += "Front | "; - if ( value & CullModeFlagBits::eBack ) result += "Back | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & CullModeFlagBits::eFront ) + result += "Front | "; + if ( value & CullModeFlagBits::eBack ) + result += "Back | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using DebugReportFlagsEXT = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(DebugReportFlagBitsEXT::eInformation) | VkFlags(DebugReportFlagBitsEXT::eWarning) | VkFlags(DebugReportFlagBitsEXT::ePerformanceWarning) | VkFlags(DebugReportFlagBitsEXT::eError) | VkFlags(DebugReportFlagBitsEXT::eDebug) + allFlags = VkFlags( DebugReportFlagBitsEXT::eInformation ) | VkFlags( DebugReportFlagBitsEXT::eWarning ) | + VkFlags( DebugReportFlagBitsEXT::ePerformanceWarning ) | VkFlags( DebugReportFlagBitsEXT::eError ) | + VkFlags( DebugReportFlagBitsEXT::eDebug ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT operator|( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT + operator|( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT { return DebugReportFlagsEXT( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT operator&( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT + operator&( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT { return DebugReportFlagsEXT( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT operator^( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT + operator^( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT { return DebugReportFlagsEXT( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT operator~( DebugReportFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT operator~( DebugReportFlagBitsEXT bits ) + VULKAN_HPP_NOEXCEPT { return ~( DebugReportFlagsEXT( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( DebugReportFlagsEXT value ) + VULKAN_HPP_INLINE std::string to_string( DebugReportFlagsEXT value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & DebugReportFlagBitsEXT::eInformation ) result += "Information | "; - if ( value & DebugReportFlagBitsEXT::eWarning ) result += "Warning | "; - if ( value & DebugReportFlagBitsEXT::ePerformanceWarning ) result += "PerformanceWarning | "; - if ( value & DebugReportFlagBitsEXT::eError ) result += "Error | "; - if ( value & DebugReportFlagBitsEXT::eDebug ) result += "Debug | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & DebugReportFlagBitsEXT::eInformation ) + result += "Information | "; + if ( value & DebugReportFlagBitsEXT::eWarning ) + result += "Warning | "; + if ( value & DebugReportFlagBitsEXT::ePerformanceWarning ) + result += "PerformanceWarning | "; + if ( value & DebugReportFlagBitsEXT::eError ) + result += "Error | "; + if ( value & DebugReportFlagBitsEXT::eDebug ) + result += "Debug | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using DebugUtilsMessageSeverityFlagsEXT = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eVerbose) | VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eInfo) | VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eWarning) | VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eError) + allFlags = VkFlags( DebugUtilsMessageSeverityFlagBitsEXT::eVerbose ) | + VkFlags( DebugUtilsMessageSeverityFlagBitsEXT::eInfo ) | + VkFlags( DebugUtilsMessageSeverityFlagBitsEXT::eWarning ) | + VkFlags( DebugUtilsMessageSeverityFlagBitsEXT::eError ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT operator|( DebugUtilsMessageSeverityFlagBitsEXT bit0, DebugUtilsMessageSeverityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT operator|( + DebugUtilsMessageSeverityFlagBitsEXT bit0, DebugUtilsMessageSeverityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT { return DebugUtilsMessageSeverityFlagsEXT( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT operator&( DebugUtilsMessageSeverityFlagBitsEXT bit0, DebugUtilsMessageSeverityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT operator&( + DebugUtilsMessageSeverityFlagBitsEXT bit0, DebugUtilsMessageSeverityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT { return DebugUtilsMessageSeverityFlagsEXT( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT operator^( DebugUtilsMessageSeverityFlagBitsEXT bit0, DebugUtilsMessageSeverityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT operator^( + DebugUtilsMessageSeverityFlagBitsEXT bit0, DebugUtilsMessageSeverityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT { return DebugUtilsMessageSeverityFlagsEXT( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT operator~( DebugUtilsMessageSeverityFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT + operator~( DebugUtilsMessageSeverityFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT { return ~( DebugUtilsMessageSeverityFlagsEXT( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageSeverityFlagsEXT value ) + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageSeverityFlagsEXT value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eVerbose ) result += "Verbose | "; - if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eInfo ) result += "Info | "; - if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eWarning ) result += "Warning | "; - if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eError ) result += "Error | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eVerbose ) + result += "Verbose | "; + if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eInfo ) + result += "Info | "; + if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eWarning ) + result += "Warning | "; + if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eError ) + result += "Error | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using DebugUtilsMessageTypeFlagsEXT = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(DebugUtilsMessageTypeFlagBitsEXT::eGeneral) | VkFlags(DebugUtilsMessageTypeFlagBitsEXT::eValidation) | VkFlags(DebugUtilsMessageTypeFlagBitsEXT::ePerformance) + allFlags = VkFlags( DebugUtilsMessageTypeFlagBitsEXT::eGeneral ) | + VkFlags( DebugUtilsMessageTypeFlagBitsEXT::eValidation ) | + VkFlags( DebugUtilsMessageTypeFlagBitsEXT::ePerformance ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT operator|( DebugUtilsMessageTypeFlagBitsEXT bit0, DebugUtilsMessageTypeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT + operator|( DebugUtilsMessageTypeFlagBitsEXT bit0, DebugUtilsMessageTypeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT { return DebugUtilsMessageTypeFlagsEXT( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT operator&( DebugUtilsMessageTypeFlagBitsEXT bit0, DebugUtilsMessageTypeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT + operator&( DebugUtilsMessageTypeFlagBitsEXT bit0, DebugUtilsMessageTypeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT { return DebugUtilsMessageTypeFlagsEXT( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT operator^( DebugUtilsMessageTypeFlagBitsEXT bit0, DebugUtilsMessageTypeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT + operator^( DebugUtilsMessageTypeFlagBitsEXT bit0, DebugUtilsMessageTypeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT { return DebugUtilsMessageTypeFlagsEXT( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT operator~( DebugUtilsMessageTypeFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT + operator~( DebugUtilsMessageTypeFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT { return ~( DebugUtilsMessageTypeFlagsEXT( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageTypeFlagsEXT value ) + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageTypeFlagsEXT value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & DebugUtilsMessageTypeFlagBitsEXT::eGeneral ) result += "General | "; - if ( value & DebugUtilsMessageTypeFlagBitsEXT::eValidation ) result += "Validation | "; - if ( value & DebugUtilsMessageTypeFlagBitsEXT::ePerformance ) result += "Performance | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & DebugUtilsMessageTypeFlagBitsEXT::eGeneral ) + result += "General | "; + if ( value & DebugUtilsMessageTypeFlagBitsEXT::eValidation ) + result += "Validation | "; + if ( value & DebugUtilsMessageTypeFlagBitsEXT::ePerformance ) + result += "Performance | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } enum class DebugUtilsMessengerCallbackDataFlagBitsEXT : VkFlags - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCallbackDataFlagBitsEXT ) { @@ -9621,13 +11348,14 @@ namespace VULKAN_HPP_NAMESPACE using DebugUtilsMessengerCallbackDataFlagsEXT = Flags; - VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCallbackDataFlagsEXT ) + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCallbackDataFlagsEXT ) { return "{}"; } enum class DebugUtilsMessengerCreateFlagBitsEXT : VkFlags - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCreateFlagBitsEXT ) { @@ -9636,32 +11364,37 @@ namespace VULKAN_HPP_NAMESPACE using DebugUtilsMessengerCreateFlagsEXT = Flags; - VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCreateFlagsEXT ) + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCreateFlagsEXT ) { return "{}"; } using DependencyFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(DependencyFlagBits::eByRegion) | VkFlags(DependencyFlagBits::eDeviceGroup) | VkFlags(DependencyFlagBits::eViewLocal) + allFlags = VkFlags( DependencyFlagBits::eByRegion ) | VkFlags( DependencyFlagBits::eDeviceGroup ) | + VkFlags( DependencyFlagBits::eViewLocal ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator|( DependencyFlagBits bit0, DependencyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator|( DependencyFlagBits bit0, + DependencyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return DependencyFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator&( DependencyFlagBits bit0, DependencyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator&( DependencyFlagBits bit0, + DependencyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return DependencyFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator^( DependencyFlagBits bit0, DependencyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator^( DependencyFlagBits bit0, + DependencyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return DependencyFlags( bit0 ) ^ bit1; } @@ -9671,103 +11404,130 @@ namespace VULKAN_HPP_NAMESPACE return ~( DependencyFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( DependencyFlags value ) + VULKAN_HPP_INLINE std::string to_string( DependencyFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & DependencyFlagBits::eByRegion ) result += "ByRegion | "; - if ( value & DependencyFlagBits::eDeviceGroup ) result += "DeviceGroup | "; - if ( value & DependencyFlagBits::eViewLocal ) result += "ViewLocal | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & DependencyFlagBits::eByRegion ) + result += "ByRegion | "; + if ( value & DependencyFlagBits::eDeviceGroup ) + result += "DeviceGroup | "; + if ( value & DependencyFlagBits::eViewLocal ) + result += "ViewLocal | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using DescriptorBindingFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(DescriptorBindingFlagBits::eUpdateAfterBind) | VkFlags(DescriptorBindingFlagBits::eUpdateUnusedWhilePending) | VkFlags(DescriptorBindingFlagBits::ePartiallyBound) | VkFlags(DescriptorBindingFlagBits::eVariableDescriptorCount) + allFlags = VkFlags( DescriptorBindingFlagBits::eUpdateAfterBind ) | + VkFlags( DescriptorBindingFlagBits::eUpdateUnusedWhilePending ) | + VkFlags( DescriptorBindingFlagBits::ePartiallyBound ) | + VkFlags( DescriptorBindingFlagBits::eVariableDescriptorCount ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags operator|( DescriptorBindingFlagBits bit0, DescriptorBindingFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags + operator|( DescriptorBindingFlagBits bit0, DescriptorBindingFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return DescriptorBindingFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags operator&( DescriptorBindingFlagBits bit0, DescriptorBindingFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags + operator&( DescriptorBindingFlagBits bit0, DescriptorBindingFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return DescriptorBindingFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags operator^( DescriptorBindingFlagBits bit0, DescriptorBindingFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags + operator^( DescriptorBindingFlagBits bit0, DescriptorBindingFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return DescriptorBindingFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags operator~( DescriptorBindingFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags operator~( DescriptorBindingFlagBits bits ) + VULKAN_HPP_NOEXCEPT { return ~( DescriptorBindingFlags( bits ) ); } using DescriptorBindingFlagsEXT = DescriptorBindingFlags; - VULKAN_HPP_INLINE std::string to_string( DescriptorBindingFlags value ) + VULKAN_HPP_INLINE std::string to_string( DescriptorBindingFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & DescriptorBindingFlagBits::eUpdateAfterBind ) result += "UpdateAfterBind | "; - if ( value & DescriptorBindingFlagBits::eUpdateUnusedWhilePending ) result += "UpdateUnusedWhilePending | "; - if ( value & DescriptorBindingFlagBits::ePartiallyBound ) result += "PartiallyBound | "; - if ( value & DescriptorBindingFlagBits::eVariableDescriptorCount ) result += "VariableDescriptorCount | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & DescriptorBindingFlagBits::eUpdateAfterBind ) + result += "UpdateAfterBind | "; + if ( value & DescriptorBindingFlagBits::eUpdateUnusedWhilePending ) + result += "UpdateUnusedWhilePending | "; + if ( value & DescriptorBindingFlagBits::ePartiallyBound ) + result += "PartiallyBound | "; + if ( value & DescriptorBindingFlagBits::eVariableDescriptorCount ) + result += "VariableDescriptorCount | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using DescriptorPoolCreateFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(DescriptorPoolCreateFlagBits::eFreeDescriptorSet) | VkFlags(DescriptorPoolCreateFlagBits::eUpdateAfterBind) + allFlags = VkFlags( DescriptorPoolCreateFlagBits::eFreeDescriptorSet ) | + VkFlags( DescriptorPoolCreateFlagBits::eUpdateAfterBind ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags operator|( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags + operator|( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return DescriptorPoolCreateFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags operator&( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags + operator&( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return DescriptorPoolCreateFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags operator^( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags + operator^( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return DescriptorPoolCreateFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags operator~( DescriptorPoolCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags operator~( DescriptorPoolCreateFlagBits bits ) + VULKAN_HPP_NOEXCEPT { return ~( DescriptorPoolCreateFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( DescriptorPoolCreateFlags value ) + VULKAN_HPP_INLINE std::string to_string( DescriptorPoolCreateFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & DescriptorPoolCreateFlagBits::eFreeDescriptorSet ) result += "FreeDescriptorSet | "; - if ( value & DescriptorPoolCreateFlagBits::eUpdateAfterBind ) result += "UpdateAfterBind | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & DescriptorPoolCreateFlagBits::eFreeDescriptorSet ) + result += "FreeDescriptorSet | "; + if ( value & DescriptorPoolCreateFlagBits::eUpdateAfterBind ) + result += "UpdateAfterBind | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } enum class DescriptorPoolResetFlagBits : VkFlags - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( DescriptorPoolResetFlagBits ) { @@ -9776,53 +11536,63 @@ namespace VULKAN_HPP_NAMESPACE using DescriptorPoolResetFlags = Flags; - VULKAN_HPP_INLINE std::string to_string( DescriptorPoolResetFlags ) + VULKAN_HPP_INLINE std::string to_string( DescriptorPoolResetFlags ) { return "{}"; } using DescriptorSetLayoutCreateFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool) | VkFlags(DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR) + allFlags = VkFlags( DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool ) | + VkFlags( DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags operator|( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags + operator|( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return DescriptorSetLayoutCreateFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags operator&( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags + operator&( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return DescriptorSetLayoutCreateFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags operator^( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags + operator^( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return DescriptorSetLayoutCreateFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags operator~( DescriptorSetLayoutCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags + operator~( DescriptorSetLayoutCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT { return ~( DescriptorSetLayoutCreateFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( DescriptorSetLayoutCreateFlags value ) + VULKAN_HPP_INLINE std::string to_string( DescriptorSetLayoutCreateFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool ) result += "UpdateAfterBindPool | "; - if ( value & DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR ) result += "PushDescriptorKHR | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool ) + result += "UpdateAfterBindPool | "; + if ( value & DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR ) + result += "PushDescriptorKHR | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } enum class DescriptorUpdateTemplateCreateFlagBits : VkFlags - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateCreateFlagBits ) { @@ -9833,142 +11603,174 @@ namespace VULKAN_HPP_NAMESPACE using DescriptorUpdateTemplateCreateFlagsKHR = DescriptorUpdateTemplateCreateFlags; - VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateCreateFlags ) + VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateCreateFlags ) { return "{}"; } using DeviceCreateFlags = Flags; - VULKAN_HPP_INLINE std::string to_string( DeviceCreateFlags ) + VULKAN_HPP_INLINE std::string to_string( DeviceCreateFlags ) { return "{}"; } using DeviceDiagnosticsConfigFlagsNV = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo) | VkFlags(DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking) | VkFlags(DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints) + allFlags = VkFlags( DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo ) | + VkFlags( DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking ) | + VkFlags( DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV operator|( DeviceDiagnosticsConfigFlagBitsNV bit0, DeviceDiagnosticsConfigFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV + operator|( DeviceDiagnosticsConfigFlagBitsNV bit0, DeviceDiagnosticsConfigFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT { return DeviceDiagnosticsConfigFlagsNV( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV operator&( DeviceDiagnosticsConfigFlagBitsNV bit0, DeviceDiagnosticsConfigFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV + operator&( DeviceDiagnosticsConfigFlagBitsNV bit0, DeviceDiagnosticsConfigFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT { return DeviceDiagnosticsConfigFlagsNV( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV operator^( DeviceDiagnosticsConfigFlagBitsNV bit0, DeviceDiagnosticsConfigFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV + operator^( DeviceDiagnosticsConfigFlagBitsNV bit0, DeviceDiagnosticsConfigFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT { return DeviceDiagnosticsConfigFlagsNV( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV operator~( DeviceDiagnosticsConfigFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV + operator~( DeviceDiagnosticsConfigFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT { return ~( DeviceDiagnosticsConfigFlagsNV( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( DeviceDiagnosticsConfigFlagsNV value ) + VULKAN_HPP_INLINE std::string to_string( DeviceDiagnosticsConfigFlagsNV value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo ) result += "EnableShaderDebugInfo | "; - if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking ) result += "EnableResourceTracking | "; - if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints ) result += "EnableAutomaticCheckpoints | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo ) + result += "EnableShaderDebugInfo | "; + if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking ) + result += "EnableResourceTracking | "; + if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints ) + result += "EnableAutomaticCheckpoints | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using DeviceGroupPresentModeFlagsKHR = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(DeviceGroupPresentModeFlagBitsKHR::eLocal) | VkFlags(DeviceGroupPresentModeFlagBitsKHR::eRemote) | VkFlags(DeviceGroupPresentModeFlagBitsKHR::eSum) | VkFlags(DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice) + allFlags = VkFlags( DeviceGroupPresentModeFlagBitsKHR::eLocal ) | + VkFlags( DeviceGroupPresentModeFlagBitsKHR::eRemote ) | + VkFlags( DeviceGroupPresentModeFlagBitsKHR::eSum ) | + VkFlags( DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR operator|( DeviceGroupPresentModeFlagBitsKHR bit0, DeviceGroupPresentModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR + operator|( DeviceGroupPresentModeFlagBitsKHR bit0, DeviceGroupPresentModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { return DeviceGroupPresentModeFlagsKHR( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR operator&( DeviceGroupPresentModeFlagBitsKHR bit0, DeviceGroupPresentModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR + operator&( DeviceGroupPresentModeFlagBitsKHR bit0, DeviceGroupPresentModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { return DeviceGroupPresentModeFlagsKHR( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR operator^( DeviceGroupPresentModeFlagBitsKHR bit0, DeviceGroupPresentModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR + operator^( DeviceGroupPresentModeFlagBitsKHR bit0, DeviceGroupPresentModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { return DeviceGroupPresentModeFlagsKHR( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR operator~( DeviceGroupPresentModeFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR + operator~( DeviceGroupPresentModeFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT { return ~( DeviceGroupPresentModeFlagsKHR( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( DeviceGroupPresentModeFlagsKHR value ) + VULKAN_HPP_INLINE std::string to_string( DeviceGroupPresentModeFlagsKHR value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & DeviceGroupPresentModeFlagBitsKHR::eLocal ) result += "Local | "; - if ( value & DeviceGroupPresentModeFlagBitsKHR::eRemote ) result += "Remote | "; - if ( value & DeviceGroupPresentModeFlagBitsKHR::eSum ) result += "Sum | "; - if ( value & DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice ) result += "LocalMultiDevice | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & DeviceGroupPresentModeFlagBitsKHR::eLocal ) + result += "Local | "; + if ( value & DeviceGroupPresentModeFlagBitsKHR::eRemote ) + result += "Remote | "; + if ( value & DeviceGroupPresentModeFlagBitsKHR::eSum ) + result += "Sum | "; + if ( value & DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice ) + result += "LocalMultiDevice | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using DeviceQueueCreateFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(DeviceQueueCreateFlagBits::eProtected) + allFlags = VkFlags( DeviceQueueCreateFlagBits::eProtected ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags operator|( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags + operator|( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return DeviceQueueCreateFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags operator&( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags + operator&( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return DeviceQueueCreateFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags operator^( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags + operator^( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return DeviceQueueCreateFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags operator~( DeviceQueueCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags operator~( DeviceQueueCreateFlagBits bits ) + VULKAN_HPP_NOEXCEPT { return ~( DeviceQueueCreateFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( DeviceQueueCreateFlags value ) + VULKAN_HPP_INLINE std::string to_string( DeviceQueueCreateFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & DeviceQueueCreateFlagBits::eProtected ) result += "Protected | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & DeviceQueueCreateFlagBits::eProtected ) + result += "Protected | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } enum class DisplayModeCreateFlagBitsKHR : VkFlags - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( DisplayModeCreateFlagBitsKHR ) { @@ -9977,55 +11779,68 @@ namespace VULKAN_HPP_NAMESPACE using DisplayModeCreateFlagsKHR = Flags; - VULKAN_HPP_INLINE std::string to_string( DisplayModeCreateFlagsKHR ) + VULKAN_HPP_INLINE std::string to_string( DisplayModeCreateFlagsKHR ) { return "{}"; } using DisplayPlaneAlphaFlagsKHR = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(DisplayPlaneAlphaFlagBitsKHR::eOpaque) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::eGlobal) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::ePerPixel) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied) + allFlags = VkFlags( DisplayPlaneAlphaFlagBitsKHR::eOpaque ) | VkFlags( DisplayPlaneAlphaFlagBitsKHR::eGlobal ) | + VkFlags( DisplayPlaneAlphaFlagBitsKHR::ePerPixel ) | + VkFlags( DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR operator|( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR + operator|( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { return DisplayPlaneAlphaFlagsKHR( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR operator&( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR + operator&( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { return DisplayPlaneAlphaFlagsKHR( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR operator^( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR + operator^( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { return DisplayPlaneAlphaFlagsKHR( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR operator~( DisplayPlaneAlphaFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR operator~( DisplayPlaneAlphaFlagBitsKHR bits ) + VULKAN_HPP_NOEXCEPT { return ~( DisplayPlaneAlphaFlagsKHR( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( DisplayPlaneAlphaFlagsKHR value ) + VULKAN_HPP_INLINE std::string to_string( DisplayPlaneAlphaFlagsKHR value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & DisplayPlaneAlphaFlagBitsKHR::eOpaque ) result += "Opaque | "; - if ( value & DisplayPlaneAlphaFlagBitsKHR::eGlobal ) result += "Global | "; - if ( value & DisplayPlaneAlphaFlagBitsKHR::ePerPixel ) result += "PerPixel | "; - if ( value & DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied ) result += "PerPixelPremultiplied | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & DisplayPlaneAlphaFlagBitsKHR::eOpaque ) + result += "Opaque | "; + if ( value & DisplayPlaneAlphaFlagBitsKHR::eGlobal ) + result += "Global | "; + if ( value & DisplayPlaneAlphaFlagBitsKHR::ePerPixel ) + result += "PerPixel | "; + if ( value & DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied ) + result += "PerPixelPremultiplied | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } enum class DisplaySurfaceCreateFlagBitsKHR : VkFlags - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( DisplaySurfaceCreateFlagBitsKHR ) { @@ -10034,13 +11849,14 @@ namespace VULKAN_HPP_NAMESPACE using DisplaySurfaceCreateFlagsKHR = Flags; - VULKAN_HPP_INLINE std::string to_string( DisplaySurfaceCreateFlagsKHR ) + VULKAN_HPP_INLINE std::string to_string( DisplaySurfaceCreateFlagsKHR ) { return "{}"; } enum class EventCreateFlagBits : VkFlags - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( EventCreateFlagBits ) { @@ -10049,382 +11865,494 @@ namespace VULKAN_HPP_NAMESPACE using EventCreateFlags = Flags; - VULKAN_HPP_INLINE std::string to_string( EventCreateFlags ) + VULKAN_HPP_INLINE std::string to_string( EventCreateFlags ) { return "{}"; } using ExternalFenceFeatureFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(ExternalFenceFeatureFlagBits::eExportable) | VkFlags(ExternalFenceFeatureFlagBits::eImportable) + allFlags = + VkFlags( ExternalFenceFeatureFlagBits::eExportable ) | VkFlags( ExternalFenceFeatureFlagBits::eImportable ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags operator|( ExternalFenceFeatureFlagBits bit0, ExternalFenceFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags + operator|( ExternalFenceFeatureFlagBits bit0, ExternalFenceFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ExternalFenceFeatureFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags operator&( ExternalFenceFeatureFlagBits bit0, ExternalFenceFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags + operator&( ExternalFenceFeatureFlagBits bit0, ExternalFenceFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ExternalFenceFeatureFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags operator^( ExternalFenceFeatureFlagBits bit0, ExternalFenceFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags + operator^( ExternalFenceFeatureFlagBits bit0, ExternalFenceFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ExternalFenceFeatureFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags operator~( ExternalFenceFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags operator~( ExternalFenceFeatureFlagBits bits ) + VULKAN_HPP_NOEXCEPT { return ~( ExternalFenceFeatureFlags( bits ) ); } using ExternalFenceFeatureFlagsKHR = ExternalFenceFeatureFlags; - VULKAN_HPP_INLINE std::string to_string( ExternalFenceFeatureFlags value ) + VULKAN_HPP_INLINE std::string to_string( ExternalFenceFeatureFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & ExternalFenceFeatureFlagBits::eExportable ) result += "Exportable | "; - if ( value & ExternalFenceFeatureFlagBits::eImportable ) result += "Importable | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & ExternalFenceFeatureFlagBits::eExportable ) + result += "Exportable | "; + if ( value & ExternalFenceFeatureFlagBits::eImportable ) + result += "Importable | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using ExternalFenceHandleTypeFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(ExternalFenceHandleTypeFlagBits::eOpaqueFd) | VkFlags(ExternalFenceHandleTypeFlagBits::eOpaqueWin32) | VkFlags(ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt) | VkFlags(ExternalFenceHandleTypeFlagBits::eSyncFd) + allFlags = VkFlags( ExternalFenceHandleTypeFlagBits::eOpaqueFd ) | + VkFlags( ExternalFenceHandleTypeFlagBits::eOpaqueWin32 ) | + VkFlags( ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt ) | + VkFlags( ExternalFenceHandleTypeFlagBits::eSyncFd ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags operator|( ExternalFenceHandleTypeFlagBits bit0, ExternalFenceHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags + operator|( ExternalFenceHandleTypeFlagBits bit0, ExternalFenceHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ExternalFenceHandleTypeFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags operator&( ExternalFenceHandleTypeFlagBits bit0, ExternalFenceHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags + operator&( ExternalFenceHandleTypeFlagBits bit0, ExternalFenceHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ExternalFenceHandleTypeFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags operator^( ExternalFenceHandleTypeFlagBits bit0, ExternalFenceHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags + operator^( ExternalFenceHandleTypeFlagBits bit0, ExternalFenceHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ExternalFenceHandleTypeFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags operator~( ExternalFenceHandleTypeFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags operator~( ExternalFenceHandleTypeFlagBits bits ) + VULKAN_HPP_NOEXCEPT { return ~( ExternalFenceHandleTypeFlags( bits ) ); } using ExternalFenceHandleTypeFlagsKHR = ExternalFenceHandleTypeFlags; - VULKAN_HPP_INLINE std::string to_string( ExternalFenceHandleTypeFlags value ) + VULKAN_HPP_INLINE std::string to_string( ExternalFenceHandleTypeFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueFd ) result += "OpaqueFd | "; - if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueWin32 ) result += "OpaqueWin32 | "; - if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt ) result += "OpaqueWin32Kmt | "; - if ( value & ExternalFenceHandleTypeFlagBits::eSyncFd ) result += "SyncFd | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueFd ) + result += "OpaqueFd | "; + if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueWin32 ) + result += "OpaqueWin32 | "; + if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt ) + result += "OpaqueWin32Kmt | "; + if ( value & ExternalFenceHandleTypeFlagBits::eSyncFd ) + result += "SyncFd | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using ExternalMemoryFeatureFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(ExternalMemoryFeatureFlagBits::eDedicatedOnly) | VkFlags(ExternalMemoryFeatureFlagBits::eExportable) | VkFlags(ExternalMemoryFeatureFlagBits::eImportable) + allFlags = VkFlags( ExternalMemoryFeatureFlagBits::eDedicatedOnly ) | + VkFlags( ExternalMemoryFeatureFlagBits::eExportable ) | + VkFlags( ExternalMemoryFeatureFlagBits::eImportable ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags operator|( ExternalMemoryFeatureFlagBits bit0, ExternalMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags + operator|( ExternalMemoryFeatureFlagBits bit0, ExternalMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ExternalMemoryFeatureFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags operator&( ExternalMemoryFeatureFlagBits bit0, ExternalMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags + operator&( ExternalMemoryFeatureFlagBits bit0, ExternalMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ExternalMemoryFeatureFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags operator^( ExternalMemoryFeatureFlagBits bit0, ExternalMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags + operator^( ExternalMemoryFeatureFlagBits bit0, ExternalMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ExternalMemoryFeatureFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags operator~( ExternalMemoryFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags operator~( ExternalMemoryFeatureFlagBits bits ) + VULKAN_HPP_NOEXCEPT { return ~( ExternalMemoryFeatureFlags( bits ) ); } using ExternalMemoryFeatureFlagsKHR = ExternalMemoryFeatureFlags; - VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlags value ) + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & ExternalMemoryFeatureFlagBits::eDedicatedOnly ) result += "DedicatedOnly | "; - if ( value & ExternalMemoryFeatureFlagBits::eExportable ) result += "Exportable | "; - if ( value & ExternalMemoryFeatureFlagBits::eImportable ) result += "Importable | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & ExternalMemoryFeatureFlagBits::eDedicatedOnly ) + result += "DedicatedOnly | "; + if ( value & ExternalMemoryFeatureFlagBits::eExportable ) + result += "Exportable | "; + if ( value & ExternalMemoryFeatureFlagBits::eImportable ) + result += "Importable | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using ExternalMemoryFeatureFlagsNV = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly) | VkFlags(ExternalMemoryFeatureFlagBitsNV::eExportable) | VkFlags(ExternalMemoryFeatureFlagBitsNV::eImportable) + allFlags = VkFlags( ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly ) | + VkFlags( ExternalMemoryFeatureFlagBitsNV::eExportable ) | + VkFlags( ExternalMemoryFeatureFlagBitsNV::eImportable ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV operator|( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV + operator|( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT { return ExternalMemoryFeatureFlagsNV( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV operator&( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV + operator&( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT { return ExternalMemoryFeatureFlagsNV( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV operator^( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV + operator^( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT { return ExternalMemoryFeatureFlagsNV( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV operator~( ExternalMemoryFeatureFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV operator~( ExternalMemoryFeatureFlagBitsNV bits ) + VULKAN_HPP_NOEXCEPT { return ~( ExternalMemoryFeatureFlagsNV( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagsNV value ) + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagsNV value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly ) result += "DedicatedOnly | "; - if ( value & ExternalMemoryFeatureFlagBitsNV::eExportable ) result += "Exportable | "; - if ( value & ExternalMemoryFeatureFlagBitsNV::eImportable ) result += "Importable | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly ) + result += "DedicatedOnly | "; + if ( value & ExternalMemoryFeatureFlagBitsNV::eExportable ) + result += "Exportable | "; + if ( value & ExternalMemoryFeatureFlagBitsNV::eImportable ) + result += "Importable | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using ExternalMemoryHandleTypeFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(ExternalMemoryHandleTypeFlagBits::eOpaqueFd) | VkFlags(ExternalMemoryHandleTypeFlagBits::eOpaqueWin32) | VkFlags(ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D11Texture) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D12Heap) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D12Resource) | VkFlags(ExternalMemoryHandleTypeFlagBits::eDmaBufEXT) | VkFlags(ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID) | VkFlags(ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT) | VkFlags(ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT) + allFlags = VkFlags( ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) | + VkFlags( ExternalMemoryHandleTypeFlagBits::eOpaqueWin32 ) | + VkFlags( ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt ) | + VkFlags( ExternalMemoryHandleTypeFlagBits::eD3D11Texture ) | + VkFlags( ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt ) | + VkFlags( ExternalMemoryHandleTypeFlagBits::eD3D12Heap ) | + VkFlags( ExternalMemoryHandleTypeFlagBits::eD3D12Resource ) | + VkFlags( ExternalMemoryHandleTypeFlagBits::eDmaBufEXT ) | + VkFlags( ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID ) | + VkFlags( ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT ) | + VkFlags( ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags operator|( ExternalMemoryHandleTypeFlagBits bit0, ExternalMemoryHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags + operator|( ExternalMemoryHandleTypeFlagBits bit0, ExternalMemoryHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ExternalMemoryHandleTypeFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags operator&( ExternalMemoryHandleTypeFlagBits bit0, ExternalMemoryHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags + operator&( ExternalMemoryHandleTypeFlagBits bit0, ExternalMemoryHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ExternalMemoryHandleTypeFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags operator^( ExternalMemoryHandleTypeFlagBits bit0, ExternalMemoryHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags + operator^( ExternalMemoryHandleTypeFlagBits bit0, ExternalMemoryHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ExternalMemoryHandleTypeFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags operator~( ExternalMemoryHandleTypeFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags + operator~( ExternalMemoryHandleTypeFlagBits bits ) VULKAN_HPP_NOEXCEPT { return ~( ExternalMemoryHandleTypeFlags( bits ) ); } using ExternalMemoryHandleTypeFlagsKHR = ExternalMemoryHandleTypeFlags; - VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlags value ) + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) result += "OpaqueFd | "; - if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueWin32 ) result += "OpaqueWin32 | "; - if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt ) result += "OpaqueWin32Kmt | "; - if ( value & ExternalMemoryHandleTypeFlagBits::eD3D11Texture ) result += "D3D11Texture | "; - if ( value & ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt ) result += "D3D11TextureKmt | "; - if ( value & ExternalMemoryHandleTypeFlagBits::eD3D12Heap ) result += "D3D12Heap | "; - if ( value & ExternalMemoryHandleTypeFlagBits::eD3D12Resource ) result += "D3D12Resource | "; - if ( value & ExternalMemoryHandleTypeFlagBits::eDmaBufEXT ) result += "DmaBufEXT | "; - if ( value & ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID ) result += "AndroidHardwareBufferANDROID | "; - if ( value & ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT ) result += "HostAllocationEXT | "; - if ( value & ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT ) result += "HostMappedForeignMemoryEXT | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) + result += "OpaqueFd | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueWin32 ) + result += "OpaqueWin32 | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt ) + result += "OpaqueWin32Kmt | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eD3D11Texture ) + result += "D3D11Texture | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt ) + result += "D3D11TextureKmt | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eD3D12Heap ) + result += "D3D12Heap | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eD3D12Resource ) + result += "D3D12Resource | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eDmaBufEXT ) + result += "DmaBufEXT | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID ) + result += "AndroidHardwareBufferANDROID | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT ) + result += "HostAllocationEXT | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT ) + result += "HostMappedForeignMemoryEXT | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using ExternalMemoryHandleTypeFlagsNV = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt) + allFlags = VkFlags( ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32 ) | + VkFlags( ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt ) | + VkFlags( ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image ) | + VkFlags( ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV operator|( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV + operator|( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT { return ExternalMemoryHandleTypeFlagsNV( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV operator&( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV + operator&( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT { return ExternalMemoryHandleTypeFlagsNV( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV operator^( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV + operator^( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT { return ExternalMemoryHandleTypeFlagsNV( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV operator~( ExternalMemoryHandleTypeFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV + operator~( ExternalMemoryHandleTypeFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT { return ~( ExternalMemoryHandleTypeFlagsNV( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagsNV value ) + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagsNV value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32 ) result += "OpaqueWin32 | "; - if ( value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt ) result += "OpaqueWin32Kmt | "; - if ( value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image ) result += "D3D11Image | "; - if ( value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt ) result += "D3D11ImageKmt | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32 ) + result += "OpaqueWin32 | "; + if ( value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt ) + result += "OpaqueWin32Kmt | "; + if ( value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image ) + result += "D3D11Image | "; + if ( value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt ) + result += "D3D11ImageKmt | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using ExternalSemaphoreFeatureFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(ExternalSemaphoreFeatureFlagBits::eExportable) | VkFlags(ExternalSemaphoreFeatureFlagBits::eImportable) + allFlags = VkFlags( ExternalSemaphoreFeatureFlagBits::eExportable ) | + VkFlags( ExternalSemaphoreFeatureFlagBits::eImportable ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags operator|( ExternalSemaphoreFeatureFlagBits bit0, ExternalSemaphoreFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags + operator|( ExternalSemaphoreFeatureFlagBits bit0, ExternalSemaphoreFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ExternalSemaphoreFeatureFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags operator&( ExternalSemaphoreFeatureFlagBits bit0, ExternalSemaphoreFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags + operator&( ExternalSemaphoreFeatureFlagBits bit0, ExternalSemaphoreFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ExternalSemaphoreFeatureFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags operator^( ExternalSemaphoreFeatureFlagBits bit0, ExternalSemaphoreFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags + operator^( ExternalSemaphoreFeatureFlagBits bit0, ExternalSemaphoreFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ExternalSemaphoreFeatureFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags operator~( ExternalSemaphoreFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags + operator~( ExternalSemaphoreFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT { return ~( ExternalSemaphoreFeatureFlags( bits ) ); } using ExternalSemaphoreFeatureFlagsKHR = ExternalSemaphoreFeatureFlags; - VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreFeatureFlags value ) + VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreFeatureFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & ExternalSemaphoreFeatureFlagBits::eExportable ) result += "Exportable | "; - if ( value & ExternalSemaphoreFeatureFlagBits::eImportable ) result += "Importable | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & ExternalSemaphoreFeatureFlagBits::eExportable ) + result += "Exportable | "; + if ( value & ExternalSemaphoreFeatureFlagBits::eImportable ) + result += "Importable | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using ExternalSemaphoreHandleTypeFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eSyncFd) + allFlags = VkFlags( ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) | + VkFlags( ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32 ) | + VkFlags( ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt ) | + VkFlags( ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence ) | + VkFlags( ExternalSemaphoreHandleTypeFlagBits::eSyncFd ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags operator|( ExternalSemaphoreHandleTypeFlagBits bit0, ExternalSemaphoreHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags + operator|( ExternalSemaphoreHandleTypeFlagBits bit0, ExternalSemaphoreHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ExternalSemaphoreHandleTypeFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags operator&( ExternalSemaphoreHandleTypeFlagBits bit0, ExternalSemaphoreHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags + operator&( ExternalSemaphoreHandleTypeFlagBits bit0, ExternalSemaphoreHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ExternalSemaphoreHandleTypeFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags operator^( ExternalSemaphoreHandleTypeFlagBits bit0, ExternalSemaphoreHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags + operator^( ExternalSemaphoreHandleTypeFlagBits bit0, ExternalSemaphoreHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ExternalSemaphoreHandleTypeFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags operator~( ExternalSemaphoreHandleTypeFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags + operator~( ExternalSemaphoreHandleTypeFlagBits bits ) VULKAN_HPP_NOEXCEPT { return ~( ExternalSemaphoreHandleTypeFlags( bits ) ); } using ExternalSemaphoreHandleTypeFlagsKHR = ExternalSemaphoreHandleTypeFlags; - VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreHandleTypeFlags value ) + VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreHandleTypeFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) result += "OpaqueFd | "; - if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32 ) result += "OpaqueWin32 | "; - if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt ) result += "OpaqueWin32Kmt | "; - if ( value & ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence ) result += "D3D12Fence | "; - if ( value & ExternalSemaphoreHandleTypeFlagBits::eSyncFd ) result += "SyncFd | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) + result += "OpaqueFd | "; + if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32 ) + result += "OpaqueWin32 | "; + if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt ) + result += "OpaqueWin32Kmt | "; + if ( value & ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence ) + result += "D3D12Fence | "; + if ( value & ExternalSemaphoreHandleTypeFlagBits::eSyncFd ) + result += "SyncFd | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using FenceCreateFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(FenceCreateFlagBits::eSignaled) + allFlags = VkFlags( FenceCreateFlagBits::eSignaled ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator|( FenceCreateFlagBits bit0, FenceCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator|( FenceCreateFlagBits bit0, + FenceCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return FenceCreateFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator&( FenceCreateFlagBits bit0, FenceCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator&( FenceCreateFlagBits bit0, + FenceCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return FenceCreateFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator^( FenceCreateFlagBits bit0, FenceCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator^( FenceCreateFlagBits bit0, + FenceCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return FenceCreateFlags( bit0 ) ^ bit1; } @@ -10434,36 +12362,42 @@ namespace VULKAN_HPP_NAMESPACE return ~( FenceCreateFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( FenceCreateFlags value ) + VULKAN_HPP_INLINE std::string to_string( FenceCreateFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & FenceCreateFlagBits::eSignaled ) result += "Signaled | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & FenceCreateFlagBits::eSignaled ) + result += "Signaled | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using FenceImportFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(FenceImportFlagBits::eTemporary) + allFlags = VkFlags( FenceImportFlagBits::eTemporary ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator|( FenceImportFlagBits bit0, FenceImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator|( FenceImportFlagBits bit0, + FenceImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return FenceImportFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator&( FenceImportFlagBits bit0, FenceImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator&( FenceImportFlagBits bit0, + FenceImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return FenceImportFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator^( FenceImportFlagBits bit0, FenceImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator^( FenceImportFlagBits bit0, + FenceImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return FenceImportFlags( bit0 ) ^ bit1; } @@ -10475,36 +12409,60 @@ namespace VULKAN_HPP_NAMESPACE using FenceImportFlagsKHR = FenceImportFlags; - VULKAN_HPP_INLINE std::string to_string( FenceImportFlags value ) + VULKAN_HPP_INLINE std::string to_string( FenceImportFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & FenceImportFlagBits::eTemporary ) result += "Temporary | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & FenceImportFlagBits::eTemporary ) + result += "Temporary | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using FormatFeatureFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(FormatFeatureFlagBits::eSampledImage) | VkFlags(FormatFeatureFlagBits::eStorageImage) | VkFlags(FormatFeatureFlagBits::eStorageImageAtomic) | VkFlags(FormatFeatureFlagBits::eUniformTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBufferAtomic) | VkFlags(FormatFeatureFlagBits::eVertexBuffer) | VkFlags(FormatFeatureFlagBits::eColorAttachment) | VkFlags(FormatFeatureFlagBits::eColorAttachmentBlend) | VkFlags(FormatFeatureFlagBits::eDepthStencilAttachment) | VkFlags(FormatFeatureFlagBits::eBlitSrc) | VkFlags(FormatFeatureFlagBits::eBlitDst) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterLinear) | VkFlags(FormatFeatureFlagBits::eTransferSrc) | VkFlags(FormatFeatureFlagBits::eTransferDst) | VkFlags(FormatFeatureFlagBits::eMidpointChromaSamples) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable) | VkFlags(FormatFeatureFlagBits::eDisjoint) | VkFlags(FormatFeatureFlagBits::eCositedChromaSamples) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterMinmax) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterCubicIMG) | VkFlags(FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR) | VkFlags(FormatFeatureFlagBits::eFragmentDensityMapEXT) + allFlags = + VkFlags( FormatFeatureFlagBits::eSampledImage ) | VkFlags( FormatFeatureFlagBits::eStorageImage ) | + VkFlags( FormatFeatureFlagBits::eStorageImageAtomic ) | VkFlags( FormatFeatureFlagBits::eUniformTexelBuffer ) | + VkFlags( FormatFeatureFlagBits::eStorageTexelBuffer ) | + VkFlags( FormatFeatureFlagBits::eStorageTexelBufferAtomic ) | VkFlags( FormatFeatureFlagBits::eVertexBuffer ) | + VkFlags( FormatFeatureFlagBits::eColorAttachment ) | VkFlags( FormatFeatureFlagBits::eColorAttachmentBlend ) | + VkFlags( FormatFeatureFlagBits::eDepthStencilAttachment ) | VkFlags( FormatFeatureFlagBits::eBlitSrc ) | + VkFlags( FormatFeatureFlagBits::eBlitDst ) | VkFlags( FormatFeatureFlagBits::eSampledImageFilterLinear ) | + VkFlags( FormatFeatureFlagBits::eTransferSrc ) | VkFlags( FormatFeatureFlagBits::eTransferDst ) | + VkFlags( FormatFeatureFlagBits::eMidpointChromaSamples ) | + VkFlags( FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter ) | + VkFlags( FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter ) | + VkFlags( FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit ) | + VkFlags( FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable ) | + VkFlags( FormatFeatureFlagBits::eDisjoint ) | VkFlags( FormatFeatureFlagBits::eCositedChromaSamples ) | + VkFlags( FormatFeatureFlagBits::eSampledImageFilterMinmax ) | + VkFlags( FormatFeatureFlagBits::eSampledImageFilterCubicIMG ) | + VkFlags( FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR ) | + VkFlags( FormatFeatureFlagBits::eFragmentDensityMapEXT ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator|( FormatFeatureFlagBits bit0, FormatFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator|( FormatFeatureFlagBits bit0, + FormatFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return FormatFeatureFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator&( FormatFeatureFlagBits bit0, FormatFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator&( FormatFeatureFlagBits bit0, + FormatFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return FormatFeatureFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator^( FormatFeatureFlagBits bit0, FormatFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator^( FormatFeatureFlagBits bit0, + FormatFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return FormatFeatureFlags( bit0 ) ^ bit1; } @@ -10514,100 +12472,138 @@ namespace VULKAN_HPP_NAMESPACE return ~( FormatFeatureFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlags value ) + VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & FormatFeatureFlagBits::eSampledImage ) result += "SampledImage | "; - if ( value & FormatFeatureFlagBits::eStorageImage ) result += "StorageImage | "; - if ( value & FormatFeatureFlagBits::eStorageImageAtomic ) result += "StorageImageAtomic | "; - if ( value & FormatFeatureFlagBits::eUniformTexelBuffer ) result += "UniformTexelBuffer | "; - if ( value & FormatFeatureFlagBits::eStorageTexelBuffer ) result += "StorageTexelBuffer | "; - if ( value & FormatFeatureFlagBits::eStorageTexelBufferAtomic ) result += "StorageTexelBufferAtomic | "; - if ( value & FormatFeatureFlagBits::eVertexBuffer ) result += "VertexBuffer | "; - if ( value & FormatFeatureFlagBits::eColorAttachment ) result += "ColorAttachment | "; - if ( value & FormatFeatureFlagBits::eColorAttachmentBlend ) result += "ColorAttachmentBlend | "; - if ( value & FormatFeatureFlagBits::eDepthStencilAttachment ) result += "DepthStencilAttachment | "; - if ( value & FormatFeatureFlagBits::eBlitSrc ) result += "BlitSrc | "; - if ( value & FormatFeatureFlagBits::eBlitDst ) result += "BlitDst | "; - if ( value & FormatFeatureFlagBits::eSampledImageFilterLinear ) result += "SampledImageFilterLinear | "; - if ( value & FormatFeatureFlagBits::eTransferSrc ) result += "TransferSrc | "; - if ( value & FormatFeatureFlagBits::eTransferDst ) result += "TransferDst | "; - if ( value & FormatFeatureFlagBits::eMidpointChromaSamples ) result += "MidpointChromaSamples | "; - if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter ) result += "SampledImageYcbcrConversionLinearFilter | "; - if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter ) result += "SampledImageYcbcrConversionSeparateReconstructionFilter | "; - if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit ) result += "SampledImageYcbcrConversionChromaReconstructionExplicit | "; - if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable ) result += "SampledImageYcbcrConversionChromaReconstructionExplicitForceable | "; - if ( value & FormatFeatureFlagBits::eDisjoint ) result += "Disjoint | "; - if ( value & FormatFeatureFlagBits::eCositedChromaSamples ) result += "CositedChromaSamples | "; - if ( value & FormatFeatureFlagBits::eSampledImageFilterMinmax ) result += "SampledImageFilterMinmax | "; - if ( value & FormatFeatureFlagBits::eSampledImageFilterCubicIMG ) result += "SampledImageFilterCubicIMG | "; - if ( value & FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR ) result += "AccelerationStructureVertexBufferKHR | "; - if ( value & FormatFeatureFlagBits::eFragmentDensityMapEXT ) result += "FragmentDensityMapEXT | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & FormatFeatureFlagBits::eSampledImage ) + result += "SampledImage | "; + if ( value & FormatFeatureFlagBits::eStorageImage ) + result += "StorageImage | "; + if ( value & FormatFeatureFlagBits::eStorageImageAtomic ) + result += "StorageImageAtomic | "; + if ( value & FormatFeatureFlagBits::eUniformTexelBuffer ) + result += "UniformTexelBuffer | "; + if ( value & FormatFeatureFlagBits::eStorageTexelBuffer ) + result += "StorageTexelBuffer | "; + if ( value & FormatFeatureFlagBits::eStorageTexelBufferAtomic ) + result += "StorageTexelBufferAtomic | "; + if ( value & FormatFeatureFlagBits::eVertexBuffer ) + result += "VertexBuffer | "; + if ( value & FormatFeatureFlagBits::eColorAttachment ) + result += "ColorAttachment | "; + if ( value & FormatFeatureFlagBits::eColorAttachmentBlend ) + result += "ColorAttachmentBlend | "; + if ( value & FormatFeatureFlagBits::eDepthStencilAttachment ) + result += "DepthStencilAttachment | "; + if ( value & FormatFeatureFlagBits::eBlitSrc ) + result += "BlitSrc | "; + if ( value & FormatFeatureFlagBits::eBlitDst ) + result += "BlitDst | "; + if ( value & FormatFeatureFlagBits::eSampledImageFilterLinear ) + result += "SampledImageFilterLinear | "; + if ( value & FormatFeatureFlagBits::eTransferSrc ) + result += "TransferSrc | "; + if ( value & FormatFeatureFlagBits::eTransferDst ) + result += "TransferDst | "; + if ( value & FormatFeatureFlagBits::eMidpointChromaSamples ) + result += "MidpointChromaSamples | "; + if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter ) + result += "SampledImageYcbcrConversionLinearFilter | "; + if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter ) + result += "SampledImageYcbcrConversionSeparateReconstructionFilter | "; + if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit ) + result += "SampledImageYcbcrConversionChromaReconstructionExplicit | "; + if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable ) + result += "SampledImageYcbcrConversionChromaReconstructionExplicitForceable | "; + if ( value & FormatFeatureFlagBits::eDisjoint ) + result += "Disjoint | "; + if ( value & FormatFeatureFlagBits::eCositedChromaSamples ) + result += "CositedChromaSamples | "; + if ( value & FormatFeatureFlagBits::eSampledImageFilterMinmax ) + result += "SampledImageFilterMinmax | "; + if ( value & FormatFeatureFlagBits::eSampledImageFilterCubicIMG ) + result += "SampledImageFilterCubicIMG | "; + if ( value & FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR ) + result += "AccelerationStructureVertexBufferKHR | "; + if ( value & FormatFeatureFlagBits::eFragmentDensityMapEXT ) + result += "FragmentDensityMapEXT | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using FramebufferCreateFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(FramebufferCreateFlagBits::eImageless) + allFlags = VkFlags( FramebufferCreateFlagBits::eImageless ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags operator|( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags + operator|( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return FramebufferCreateFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags operator&( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags + operator&( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return FramebufferCreateFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags operator^( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags + operator^( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return FramebufferCreateFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags operator~( FramebufferCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags operator~( FramebufferCreateFlagBits bits ) + VULKAN_HPP_NOEXCEPT { return ~( FramebufferCreateFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( FramebufferCreateFlags value ) + VULKAN_HPP_INLINE std::string to_string( FramebufferCreateFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & FramebufferCreateFlagBits::eImageless ) result += "Imageless | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & FramebufferCreateFlagBits::eImageless ) + result += "Imageless | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using GeometryFlagsKHR = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(GeometryFlagBitsKHR::eOpaque) | VkFlags(GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation) + allFlags = VkFlags( GeometryFlagBitsKHR::eOpaque ) | VkFlags( GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator|( GeometryFlagBitsKHR bit0, GeometryFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator|( GeometryFlagBitsKHR bit0, + GeometryFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { return GeometryFlagsKHR( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator&( GeometryFlagBitsKHR bit0, GeometryFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator&( GeometryFlagBitsKHR bit0, + GeometryFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { return GeometryFlagsKHR( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator^( GeometryFlagBitsKHR bit0, GeometryFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator^( GeometryFlagBitsKHR bit0, + GeometryFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { return GeometryFlagsKHR( bit0 ) ^ bit1; } @@ -10619,62 +12615,79 @@ namespace VULKAN_HPP_NAMESPACE using GeometryFlagsNV = GeometryFlagsKHR; - VULKAN_HPP_INLINE std::string to_string( GeometryFlagsKHR value ) + VULKAN_HPP_INLINE std::string to_string( GeometryFlagsKHR value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & GeometryFlagBitsKHR::eOpaque ) result += "Opaque | "; - if ( value & GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation ) result += "NoDuplicateAnyHitInvocation | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & GeometryFlagBitsKHR::eOpaque ) + result += "Opaque | "; + if ( value & GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation ) + result += "NoDuplicateAnyHitInvocation | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using GeometryInstanceFlagsKHR = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable) | VkFlags(GeometryInstanceFlagBitsKHR::eTriangleFrontCounterclockwise) | VkFlags(GeometryInstanceFlagBitsKHR::eForceOpaque) | VkFlags(GeometryInstanceFlagBitsKHR::eForceNoOpaque) + allFlags = VkFlags( GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable ) | + VkFlags( GeometryInstanceFlagBitsKHR::eTriangleFrontCounterclockwise ) | + VkFlags( GeometryInstanceFlagBitsKHR::eForceOpaque ) | + VkFlags( GeometryInstanceFlagBitsKHR::eForceNoOpaque ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR operator|( GeometryInstanceFlagBitsKHR bit0, GeometryInstanceFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR + operator|( GeometryInstanceFlagBitsKHR bit0, GeometryInstanceFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { return GeometryInstanceFlagsKHR( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR operator&( GeometryInstanceFlagBitsKHR bit0, GeometryInstanceFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR + operator&( GeometryInstanceFlagBitsKHR bit0, GeometryInstanceFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { return GeometryInstanceFlagsKHR( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR operator^( GeometryInstanceFlagBitsKHR bit0, GeometryInstanceFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR + operator^( GeometryInstanceFlagBitsKHR bit0, GeometryInstanceFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { return GeometryInstanceFlagsKHR( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR operator~( GeometryInstanceFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR operator~( GeometryInstanceFlagBitsKHR bits ) + VULKAN_HPP_NOEXCEPT { return ~( GeometryInstanceFlagsKHR( bits ) ); } using GeometryInstanceFlagsNV = GeometryInstanceFlagsKHR; - VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagsKHR value ) + VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagsKHR value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable ) result += "TriangleFacingCullDisable | "; - if ( value & GeometryInstanceFlagBitsKHR::eTriangleFrontCounterclockwise ) result += "TriangleFrontCounterclockwise | "; - if ( value & GeometryInstanceFlagBitsKHR::eForceOpaque ) result += "ForceOpaque | "; - if ( value & GeometryInstanceFlagBitsKHR::eForceNoOpaque ) result += "ForceNoOpaque | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable ) + result += "TriangleFacingCullDisable | "; + if ( value & GeometryInstanceFlagBitsKHR::eTriangleFrontCounterclockwise ) + result += "TriangleFrontCounterclockwise | "; + if ( value & GeometryInstanceFlagBitsKHR::eForceOpaque ) + result += "ForceOpaque | "; + if ( value & GeometryInstanceFlagBitsKHR::eForceNoOpaque ) + result += "ForceNoOpaque | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } enum class HeadlessSurfaceCreateFlagBitsEXT : VkFlags - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( HeadlessSurfaceCreateFlagBitsEXT ) { @@ -10683,14 +12696,15 @@ namespace VULKAN_HPP_NAMESPACE using HeadlessSurfaceCreateFlagsEXT = Flags; - VULKAN_HPP_INLINE std::string to_string( HeadlessSurfaceCreateFlagsEXT ) + VULKAN_HPP_INLINE std::string to_string( HeadlessSurfaceCreateFlagsEXT ) { return "{}"; } #ifdef VK_USE_PLATFORM_IOS_MVK enum class IOSSurfaceCreateFlagBitsMVK : VkFlags - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( IOSSurfaceCreateFlagBitsMVK ) { @@ -10699,7 +12713,7 @@ namespace VULKAN_HPP_NAMESPACE using IOSSurfaceCreateFlagsMVK = Flags; - VULKAN_HPP_INLINE std::string to_string( IOSSurfaceCreateFlagsMVK ) + VULKAN_HPP_INLINE std::string to_string( IOSSurfaceCreateFlagsMVK ) { return "{}"; } @@ -10707,25 +12721,34 @@ namespace VULKAN_HPP_NAMESPACE using ImageAspectFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(ImageAspectFlagBits::eColor) | VkFlags(ImageAspectFlagBits::eDepth) | VkFlags(ImageAspectFlagBits::eStencil) | VkFlags(ImageAspectFlagBits::eMetadata) | VkFlags(ImageAspectFlagBits::ePlane0) | VkFlags(ImageAspectFlagBits::ePlane1) | VkFlags(ImageAspectFlagBits::ePlane2) | VkFlags(ImageAspectFlagBits::eMemoryPlane0EXT) | VkFlags(ImageAspectFlagBits::eMemoryPlane1EXT) | VkFlags(ImageAspectFlagBits::eMemoryPlane2EXT) | VkFlags(ImageAspectFlagBits::eMemoryPlane3EXT) + allFlags = VkFlags( ImageAspectFlagBits::eColor ) | VkFlags( ImageAspectFlagBits::eDepth ) | + VkFlags( ImageAspectFlagBits::eStencil ) | VkFlags( ImageAspectFlagBits::eMetadata ) | + VkFlags( ImageAspectFlagBits::ePlane0 ) | VkFlags( ImageAspectFlagBits::ePlane1 ) | + VkFlags( ImageAspectFlagBits::ePlane2 ) | VkFlags( ImageAspectFlagBits::eMemoryPlane0EXT ) | + VkFlags( ImageAspectFlagBits::eMemoryPlane1EXT ) | VkFlags( ImageAspectFlagBits::eMemoryPlane2EXT ) | + VkFlags( ImageAspectFlagBits::eMemoryPlane3EXT ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator|( ImageAspectFlagBits bit0, ImageAspectFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator|( ImageAspectFlagBits bit0, + ImageAspectFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ImageAspectFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator&( ImageAspectFlagBits bit0, ImageAspectFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator&( ImageAspectFlagBits bit0, + ImageAspectFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ImageAspectFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator^( ImageAspectFlagBits bit0, ImageAspectFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator^( ImageAspectFlagBits bit0, + ImageAspectFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ImageAspectFlags( bit0 ) ^ bit1; } @@ -10735,46 +12758,71 @@ namespace VULKAN_HPP_NAMESPACE return ~( ImageAspectFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( ImageAspectFlags value ) + VULKAN_HPP_INLINE std::string to_string( ImageAspectFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & ImageAspectFlagBits::eColor ) result += "Color | "; - if ( value & ImageAspectFlagBits::eDepth ) result += "Depth | "; - if ( value & ImageAspectFlagBits::eStencil ) result += "Stencil | "; - if ( value & ImageAspectFlagBits::eMetadata ) result += "Metadata | "; - if ( value & ImageAspectFlagBits::ePlane0 ) result += "Plane0 | "; - if ( value & ImageAspectFlagBits::ePlane1 ) result += "Plane1 | "; - if ( value & ImageAspectFlagBits::ePlane2 ) result += "Plane2 | "; - if ( value & ImageAspectFlagBits::eMemoryPlane0EXT ) result += "MemoryPlane0EXT | "; - if ( value & ImageAspectFlagBits::eMemoryPlane1EXT ) result += "MemoryPlane1EXT | "; - if ( value & ImageAspectFlagBits::eMemoryPlane2EXT ) result += "MemoryPlane2EXT | "; - if ( value & ImageAspectFlagBits::eMemoryPlane3EXT ) result += "MemoryPlane3EXT | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & ImageAspectFlagBits::eColor ) + result += "Color | "; + if ( value & ImageAspectFlagBits::eDepth ) + result += "Depth | "; + if ( value & ImageAspectFlagBits::eStencil ) + result += "Stencil | "; + if ( value & ImageAspectFlagBits::eMetadata ) + result += "Metadata | "; + if ( value & ImageAspectFlagBits::ePlane0 ) + result += "Plane0 | "; + if ( value & ImageAspectFlagBits::ePlane1 ) + result += "Plane1 | "; + if ( value & ImageAspectFlagBits::ePlane2 ) + result += "Plane2 | "; + if ( value & ImageAspectFlagBits::eMemoryPlane0EXT ) + result += "MemoryPlane0EXT | "; + if ( value & ImageAspectFlagBits::eMemoryPlane1EXT ) + result += "MemoryPlane1EXT | "; + if ( value & ImageAspectFlagBits::eMemoryPlane2EXT ) + result += "MemoryPlane2EXT | "; + if ( value & ImageAspectFlagBits::eMemoryPlane3EXT ) + result += "MemoryPlane3EXT | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using ImageCreateFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(ImageCreateFlagBits::eSparseBinding) | VkFlags(ImageCreateFlagBits::eSparseResidency) | VkFlags(ImageCreateFlagBits::eSparseAliased) | VkFlags(ImageCreateFlagBits::eMutableFormat) | VkFlags(ImageCreateFlagBits::eCubeCompatible) | VkFlags(ImageCreateFlagBits::eAlias) | VkFlags(ImageCreateFlagBits::eSplitInstanceBindRegions) | VkFlags(ImageCreateFlagBits::e2DArrayCompatible) | VkFlags(ImageCreateFlagBits::eBlockTexelViewCompatible) | VkFlags(ImageCreateFlagBits::eExtendedUsage) | VkFlags(ImageCreateFlagBits::eProtected) | VkFlags(ImageCreateFlagBits::eDisjoint) | VkFlags(ImageCreateFlagBits::eCornerSampledNV) | VkFlags(ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT) | VkFlags(ImageCreateFlagBits::eSubsampledEXT) + allFlags = VkFlags( ImageCreateFlagBits::eSparseBinding ) | VkFlags( ImageCreateFlagBits::eSparseResidency ) | + VkFlags( ImageCreateFlagBits::eSparseAliased ) | VkFlags( ImageCreateFlagBits::eMutableFormat ) | + VkFlags( ImageCreateFlagBits::eCubeCompatible ) | VkFlags( ImageCreateFlagBits::eAlias ) | + VkFlags( ImageCreateFlagBits::eSplitInstanceBindRegions ) | + VkFlags( ImageCreateFlagBits::e2DArrayCompatible ) | + VkFlags( ImageCreateFlagBits::eBlockTexelViewCompatible ) | + VkFlags( ImageCreateFlagBits::eExtendedUsage ) | VkFlags( ImageCreateFlagBits::eProtected ) | + VkFlags( ImageCreateFlagBits::eDisjoint ) | VkFlags( ImageCreateFlagBits::eCornerSampledNV ) | + VkFlags( ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT ) | + VkFlags( ImageCreateFlagBits::eSubsampledEXT ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator|( ImageCreateFlagBits bit0, ImageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator|( ImageCreateFlagBits bit0, + ImageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ImageCreateFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator&( ImageCreateFlagBits bit0, ImageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator&( ImageCreateFlagBits bit0, + ImageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ImageCreateFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator^( ImageCreateFlagBits bit0, ImageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator^( ImageCreateFlagBits bit0, + ImageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ImageCreateFlags( bit0 ) ^ bit1; } @@ -10784,32 +12832,49 @@ namespace VULKAN_HPP_NAMESPACE return ~( ImageCreateFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( ImageCreateFlags value ) + VULKAN_HPP_INLINE std::string to_string( ImageCreateFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & ImageCreateFlagBits::eSparseBinding ) result += "SparseBinding | "; - if ( value & ImageCreateFlagBits::eSparseResidency ) result += "SparseResidency | "; - if ( value & ImageCreateFlagBits::eSparseAliased ) result += "SparseAliased | "; - if ( value & ImageCreateFlagBits::eMutableFormat ) result += "MutableFormat | "; - if ( value & ImageCreateFlagBits::eCubeCompatible ) result += "CubeCompatible | "; - if ( value & ImageCreateFlagBits::eAlias ) result += "Alias | "; - if ( value & ImageCreateFlagBits::eSplitInstanceBindRegions ) result += "SplitInstanceBindRegions | "; - if ( value & ImageCreateFlagBits::e2DArrayCompatible ) result += "2DArrayCompatible | "; - if ( value & ImageCreateFlagBits::eBlockTexelViewCompatible ) result += "BlockTexelViewCompatible | "; - if ( value & ImageCreateFlagBits::eExtendedUsage ) result += "ExtendedUsage | "; - if ( value & ImageCreateFlagBits::eProtected ) result += "Protected | "; - if ( value & ImageCreateFlagBits::eDisjoint ) result += "Disjoint | "; - if ( value & ImageCreateFlagBits::eCornerSampledNV ) result += "CornerSampledNV | "; - if ( value & ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT ) result += "SampleLocationsCompatibleDepthEXT | "; - if ( value & ImageCreateFlagBits::eSubsampledEXT ) result += "SubsampledEXT | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & ImageCreateFlagBits::eSparseBinding ) + result += "SparseBinding | "; + if ( value & ImageCreateFlagBits::eSparseResidency ) + result += "SparseResidency | "; + if ( value & ImageCreateFlagBits::eSparseAliased ) + result += "SparseAliased | "; + if ( value & ImageCreateFlagBits::eMutableFormat ) + result += "MutableFormat | "; + if ( value & ImageCreateFlagBits::eCubeCompatible ) + result += "CubeCompatible | "; + if ( value & ImageCreateFlagBits::eAlias ) + result += "Alias | "; + if ( value & ImageCreateFlagBits::eSplitInstanceBindRegions ) + result += "SplitInstanceBindRegions | "; + if ( value & ImageCreateFlagBits::e2DArrayCompatible ) + result += "2DArrayCompatible | "; + if ( value & ImageCreateFlagBits::eBlockTexelViewCompatible ) + result += "BlockTexelViewCompatible | "; + if ( value & ImageCreateFlagBits::eExtendedUsage ) + result += "ExtendedUsage | "; + if ( value & ImageCreateFlagBits::eProtected ) + result += "Protected | "; + if ( value & ImageCreateFlagBits::eDisjoint ) + result += "Disjoint | "; + if ( value & ImageCreateFlagBits::eCornerSampledNV ) + result += "CornerSampledNV | "; + if ( value & ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT ) + result += "SampleLocationsCompatibleDepthEXT | "; + if ( value & ImageCreateFlagBits::eSubsampledEXT ) + result += "SubsampledEXT | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } #ifdef VK_USE_PLATFORM_FUCHSIA enum class ImagePipeSurfaceCreateFlagBitsFUCHSIA : VkFlags - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( ImagePipeSurfaceCreateFlagBitsFUCHSIA ) { @@ -10818,7 +12883,7 @@ namespace VULKAN_HPP_NAMESPACE using ImagePipeSurfaceCreateFlagsFUCHSIA = Flags; - VULKAN_HPP_INLINE std::string to_string( ImagePipeSurfaceCreateFlagsFUCHSIA ) + VULKAN_HPP_INLINE std::string to_string( ImagePipeSurfaceCreateFlagsFUCHSIA ) { return "{}"; } @@ -10826,25 +12891,34 @@ namespace VULKAN_HPP_NAMESPACE using ImageUsageFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(ImageUsageFlagBits::eTransferSrc) | VkFlags(ImageUsageFlagBits::eTransferDst) | VkFlags(ImageUsageFlagBits::eSampled) | VkFlags(ImageUsageFlagBits::eStorage) | VkFlags(ImageUsageFlagBits::eColorAttachment) | VkFlags(ImageUsageFlagBits::eDepthStencilAttachment) | VkFlags(ImageUsageFlagBits::eTransientAttachment) | VkFlags(ImageUsageFlagBits::eInputAttachment) | VkFlags(ImageUsageFlagBits::eShadingRateImageNV) | VkFlags(ImageUsageFlagBits::eFragmentDensityMapEXT) + allFlags = + VkFlags( ImageUsageFlagBits::eTransferSrc ) | VkFlags( ImageUsageFlagBits::eTransferDst ) | + VkFlags( ImageUsageFlagBits::eSampled ) | VkFlags( ImageUsageFlagBits::eStorage ) | + VkFlags( ImageUsageFlagBits::eColorAttachment ) | VkFlags( ImageUsageFlagBits::eDepthStencilAttachment ) | + VkFlags( ImageUsageFlagBits::eTransientAttachment ) | VkFlags( ImageUsageFlagBits::eInputAttachment ) | + VkFlags( ImageUsageFlagBits::eShadingRateImageNV ) | VkFlags( ImageUsageFlagBits::eFragmentDensityMapEXT ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator|( ImageUsageFlagBits bit0, ImageUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator|( ImageUsageFlagBits bit0, + ImageUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ImageUsageFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator&( ImageUsageFlagBits bit0, ImageUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator&( ImageUsageFlagBits bit0, + ImageUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ImageUsageFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator^( ImageUsageFlagBits bit0, ImageUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator^( ImageUsageFlagBits bit0, + ImageUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ImageUsageFlags( bit0 ) ^ bit1; } @@ -10854,153 +12928,190 @@ namespace VULKAN_HPP_NAMESPACE return ~( ImageUsageFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( ImageUsageFlags value ) + VULKAN_HPP_INLINE std::string to_string( ImageUsageFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & ImageUsageFlagBits::eTransferSrc ) result += "TransferSrc | "; - if ( value & ImageUsageFlagBits::eTransferDst ) result += "TransferDst | "; - if ( value & ImageUsageFlagBits::eSampled ) result += "Sampled | "; - if ( value & ImageUsageFlagBits::eStorage ) result += "Storage | "; - if ( value & ImageUsageFlagBits::eColorAttachment ) result += "ColorAttachment | "; - if ( value & ImageUsageFlagBits::eDepthStencilAttachment ) result += "DepthStencilAttachment | "; - if ( value & ImageUsageFlagBits::eTransientAttachment ) result += "TransientAttachment | "; - if ( value & ImageUsageFlagBits::eInputAttachment ) result += "InputAttachment | "; - if ( value & ImageUsageFlagBits::eShadingRateImageNV ) result += "ShadingRateImageNV | "; - if ( value & ImageUsageFlagBits::eFragmentDensityMapEXT ) result += "FragmentDensityMapEXT | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & ImageUsageFlagBits::eTransferSrc ) + result += "TransferSrc | "; + if ( value & ImageUsageFlagBits::eTransferDst ) + result += "TransferDst | "; + if ( value & ImageUsageFlagBits::eSampled ) + result += "Sampled | "; + if ( value & ImageUsageFlagBits::eStorage ) + result += "Storage | "; + if ( value & ImageUsageFlagBits::eColorAttachment ) + result += "ColorAttachment | "; + if ( value & ImageUsageFlagBits::eDepthStencilAttachment ) + result += "DepthStencilAttachment | "; + if ( value & ImageUsageFlagBits::eTransientAttachment ) + result += "TransientAttachment | "; + if ( value & ImageUsageFlagBits::eInputAttachment ) + result += "InputAttachment | "; + if ( value & ImageUsageFlagBits::eShadingRateImageNV ) + result += "ShadingRateImageNV | "; + if ( value & ImageUsageFlagBits::eFragmentDensityMapEXT ) + result += "FragmentDensityMapEXT | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using ImageViewCreateFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT) + allFlags = VkFlags( ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags operator|( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags + operator|( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ImageViewCreateFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags operator&( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags + operator&( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ImageViewCreateFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags operator^( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags + operator^( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ImageViewCreateFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags operator~( ImageViewCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags operator~( ImageViewCreateFlagBits bits ) + VULKAN_HPP_NOEXCEPT { return ~( ImageViewCreateFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( ImageViewCreateFlags value ) + VULKAN_HPP_INLINE std::string to_string( ImageViewCreateFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT ) result += "FragmentDensityMapDynamicEXT | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT ) + result += "FragmentDensityMapDynamicEXT | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using IndirectCommandsLayoutUsageFlagsNV = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences) + allFlags = VkFlags( IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess ) | + VkFlags( IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences ) | + VkFlags( IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator|( IndirectCommandsLayoutUsageFlagBitsNV bit0, IndirectCommandsLayoutUsageFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator|( + IndirectCommandsLayoutUsageFlagBitsNV bit0, IndirectCommandsLayoutUsageFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT { return IndirectCommandsLayoutUsageFlagsNV( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator&( IndirectCommandsLayoutUsageFlagBitsNV bit0, IndirectCommandsLayoutUsageFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator&( + IndirectCommandsLayoutUsageFlagBitsNV bit0, IndirectCommandsLayoutUsageFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT { return IndirectCommandsLayoutUsageFlagsNV( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator^( IndirectCommandsLayoutUsageFlagBitsNV bit0, IndirectCommandsLayoutUsageFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator^( + IndirectCommandsLayoutUsageFlagBitsNV bit0, IndirectCommandsLayoutUsageFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT { return IndirectCommandsLayoutUsageFlagsNV( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator~( IndirectCommandsLayoutUsageFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV + operator~( IndirectCommandsLayoutUsageFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT { return ~( IndirectCommandsLayoutUsageFlagsNV( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagsNV value ) + VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagsNV value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess ) result += "ExplicitPreprocess | "; - if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences ) result += "IndexedSequences | "; - if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences ) result += "UnorderedSequences | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess ) + result += "ExplicitPreprocess | "; + if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences ) + result += "IndexedSequences | "; + if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences ) + result += "UnorderedSequences | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using IndirectStateFlagsNV = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(IndirectStateFlagBitsNV::eFlagFrontface) + allFlags = VkFlags( IndirectStateFlagBitsNV::eFlagFrontface ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV operator|( IndirectStateFlagBitsNV bit0, IndirectStateFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV + operator|( IndirectStateFlagBitsNV bit0, IndirectStateFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT { return IndirectStateFlagsNV( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV operator&( IndirectStateFlagBitsNV bit0, IndirectStateFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV + operator&( IndirectStateFlagBitsNV bit0, IndirectStateFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT { return IndirectStateFlagsNV( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV operator^( IndirectStateFlagBitsNV bit0, IndirectStateFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV + operator^( IndirectStateFlagBitsNV bit0, IndirectStateFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT { return IndirectStateFlagsNV( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV operator~( IndirectStateFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV operator~( IndirectStateFlagBitsNV bits ) + VULKAN_HPP_NOEXCEPT { return ~( IndirectStateFlagsNV( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( IndirectStateFlagsNV value ) + VULKAN_HPP_INLINE std::string to_string( IndirectStateFlagsNV value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & IndirectStateFlagBitsNV::eFlagFrontface ) result += "FlagFrontface | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & IndirectStateFlagBitsNV::eFlagFrontface ) + result += "FlagFrontface | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using InstanceCreateFlags = Flags; - VULKAN_HPP_INLINE std::string to_string( InstanceCreateFlags ) + VULKAN_HPP_INLINE std::string to_string( InstanceCreateFlags ) { return "{}"; } #ifdef VK_USE_PLATFORM_MACOS_MVK enum class MacOSSurfaceCreateFlagBitsMVK : VkFlags - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( MacOSSurfaceCreateFlagBitsMVK ) { @@ -11009,7 +13120,7 @@ namespace VULKAN_HPP_NAMESPACE using MacOSSurfaceCreateFlagsMVK = Flags; - VULKAN_HPP_INLINE std::string to_string( MacOSSurfaceCreateFlagsMVK ) + VULKAN_HPP_INLINE std::string to_string( MacOSSurfaceCreateFlagsMVK ) { return "{}"; } @@ -11017,68 +13128,82 @@ namespace VULKAN_HPP_NAMESPACE using MemoryAllocateFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(MemoryAllocateFlagBits::eDeviceMask) | VkFlags(MemoryAllocateFlagBits::eDeviceAddress) | VkFlags(MemoryAllocateFlagBits::eDeviceAddressCaptureReplay) + allFlags = VkFlags( MemoryAllocateFlagBits::eDeviceMask ) | VkFlags( MemoryAllocateFlagBits::eDeviceAddress ) | + VkFlags( MemoryAllocateFlagBits::eDeviceAddressCaptureReplay ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags operator|( MemoryAllocateFlagBits bit0, MemoryAllocateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags + operator|( MemoryAllocateFlagBits bit0, MemoryAllocateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return MemoryAllocateFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags operator&( MemoryAllocateFlagBits bit0, MemoryAllocateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags + operator&( MemoryAllocateFlagBits bit0, MemoryAllocateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return MemoryAllocateFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags operator^( MemoryAllocateFlagBits bit0, MemoryAllocateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags + operator^( MemoryAllocateFlagBits bit0, MemoryAllocateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return MemoryAllocateFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags operator~( MemoryAllocateFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags operator~( MemoryAllocateFlagBits bits ) + VULKAN_HPP_NOEXCEPT { return ~( MemoryAllocateFlags( bits ) ); } using MemoryAllocateFlagsKHR = MemoryAllocateFlags; - VULKAN_HPP_INLINE std::string to_string( MemoryAllocateFlags value ) + VULKAN_HPP_INLINE std::string to_string( MemoryAllocateFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & MemoryAllocateFlagBits::eDeviceMask ) result += "DeviceMask | "; - if ( value & MemoryAllocateFlagBits::eDeviceAddress ) result += "DeviceAddress | "; - if ( value & MemoryAllocateFlagBits::eDeviceAddressCaptureReplay ) result += "DeviceAddressCaptureReplay | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & MemoryAllocateFlagBits::eDeviceMask ) + result += "DeviceMask | "; + if ( value & MemoryAllocateFlagBits::eDeviceAddress ) + result += "DeviceAddress | "; + if ( value & MemoryAllocateFlagBits::eDeviceAddressCaptureReplay ) + result += "DeviceAddressCaptureReplay | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using MemoryHeapFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(MemoryHeapFlagBits::eDeviceLocal) | VkFlags(MemoryHeapFlagBits::eMultiInstance) + allFlags = VkFlags( MemoryHeapFlagBits::eDeviceLocal ) | VkFlags( MemoryHeapFlagBits::eMultiInstance ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator|( MemoryHeapFlagBits bit0, MemoryHeapFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator|( MemoryHeapFlagBits bit0, + MemoryHeapFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return MemoryHeapFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator&( MemoryHeapFlagBits bit0, MemoryHeapFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator&( MemoryHeapFlagBits bit0, + MemoryHeapFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return MemoryHeapFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator^( MemoryHeapFlagBits bit0, MemoryHeapFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator^( MemoryHeapFlagBits bit0, + MemoryHeapFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return MemoryHeapFlags( bit0 ) ^ bit1; } @@ -11088,18 +13213,22 @@ namespace VULKAN_HPP_NAMESPACE return ~( MemoryHeapFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( MemoryHeapFlags value ) + VULKAN_HPP_INLINE std::string to_string( MemoryHeapFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & MemoryHeapFlagBits::eDeviceLocal ) result += "DeviceLocal | "; - if ( value & MemoryHeapFlagBits::eMultiInstance ) result += "MultiInstance | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & MemoryHeapFlagBits::eDeviceLocal ) + result += "DeviceLocal | "; + if ( value & MemoryHeapFlagBits::eMultiInstance ) + result += "MultiInstance | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } enum class MemoryMapFlagBits : VkFlags - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( MemoryMapFlagBits ) { @@ -11108,60 +13237,79 @@ namespace VULKAN_HPP_NAMESPACE using MemoryMapFlags = Flags; - VULKAN_HPP_INLINE std::string to_string( MemoryMapFlags ) + VULKAN_HPP_INLINE std::string to_string( MemoryMapFlags ) { return "{}"; } using MemoryPropertyFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(MemoryPropertyFlagBits::eDeviceLocal) | VkFlags(MemoryPropertyFlagBits::eHostVisible) | VkFlags(MemoryPropertyFlagBits::eHostCoherent) | VkFlags(MemoryPropertyFlagBits::eHostCached) | VkFlags(MemoryPropertyFlagBits::eLazilyAllocated) | VkFlags(MemoryPropertyFlagBits::eProtected) | VkFlags(MemoryPropertyFlagBits::eDeviceCoherentAMD) | VkFlags(MemoryPropertyFlagBits::eDeviceUncachedAMD) + allFlags = VkFlags( MemoryPropertyFlagBits::eDeviceLocal ) | VkFlags( MemoryPropertyFlagBits::eHostVisible ) | + VkFlags( MemoryPropertyFlagBits::eHostCoherent ) | VkFlags( MemoryPropertyFlagBits::eHostCached ) | + VkFlags( MemoryPropertyFlagBits::eLazilyAllocated ) | VkFlags( MemoryPropertyFlagBits::eProtected ) | + VkFlags( MemoryPropertyFlagBits::eDeviceCoherentAMD ) | + VkFlags( MemoryPropertyFlagBits::eDeviceUncachedAMD ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags operator|( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags + operator|( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return MemoryPropertyFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags operator&( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags + operator&( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return MemoryPropertyFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags operator^( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags + operator^( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return MemoryPropertyFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags operator~( MemoryPropertyFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags operator~( MemoryPropertyFlagBits bits ) + VULKAN_HPP_NOEXCEPT { return ~( MemoryPropertyFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( MemoryPropertyFlags value ) + VULKAN_HPP_INLINE std::string to_string( MemoryPropertyFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & MemoryPropertyFlagBits::eDeviceLocal ) result += "DeviceLocal | "; - if ( value & MemoryPropertyFlagBits::eHostVisible ) result += "HostVisible | "; - if ( value & MemoryPropertyFlagBits::eHostCoherent ) result += "HostCoherent | "; - if ( value & MemoryPropertyFlagBits::eHostCached ) result += "HostCached | "; - if ( value & MemoryPropertyFlagBits::eLazilyAllocated ) result += "LazilyAllocated | "; - if ( value & MemoryPropertyFlagBits::eProtected ) result += "Protected | "; - if ( value & MemoryPropertyFlagBits::eDeviceCoherentAMD ) result += "DeviceCoherentAMD | "; - if ( value & MemoryPropertyFlagBits::eDeviceUncachedAMD ) result += "DeviceUncachedAMD | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & MemoryPropertyFlagBits::eDeviceLocal ) + result += "DeviceLocal | "; + if ( value & MemoryPropertyFlagBits::eHostVisible ) + result += "HostVisible | "; + if ( value & MemoryPropertyFlagBits::eHostCoherent ) + result += "HostCoherent | "; + if ( value & MemoryPropertyFlagBits::eHostCached ) + result += "HostCached | "; + if ( value & MemoryPropertyFlagBits::eLazilyAllocated ) + result += "LazilyAllocated | "; + if ( value & MemoryPropertyFlagBits::eProtected ) + result += "Protected | "; + if ( value & MemoryPropertyFlagBits::eDeviceCoherentAMD ) + result += "DeviceCoherentAMD | "; + if ( value & MemoryPropertyFlagBits::eDeviceUncachedAMD ) + result += "DeviceUncachedAMD | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } #ifdef VK_USE_PLATFORM_METAL_EXT enum class MetalSurfaceCreateFlagBitsEXT : VkFlags - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( MetalSurfaceCreateFlagBitsEXT ) { @@ -11170,7 +13318,7 @@ namespace VULKAN_HPP_NAMESPACE using MetalSurfaceCreateFlagsEXT = Flags; - VULKAN_HPP_INLINE std::string to_string( MetalSurfaceCreateFlagsEXT ) + VULKAN_HPP_INLINE std::string to_string( MetalSurfaceCreateFlagsEXT ) { return "{}"; } @@ -11178,143 +13326,171 @@ namespace VULKAN_HPP_NAMESPACE using PeerMemoryFeatureFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(PeerMemoryFeatureFlagBits::eCopySrc) | VkFlags(PeerMemoryFeatureFlagBits::eCopyDst) | VkFlags(PeerMemoryFeatureFlagBits::eGenericSrc) | VkFlags(PeerMemoryFeatureFlagBits::eGenericDst) + allFlags = VkFlags( PeerMemoryFeatureFlagBits::eCopySrc ) | VkFlags( PeerMemoryFeatureFlagBits::eCopyDst ) | + VkFlags( PeerMemoryFeatureFlagBits::eGenericSrc ) | VkFlags( PeerMemoryFeatureFlagBits::eGenericDst ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags operator|( PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags + operator|( PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return PeerMemoryFeatureFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags operator&( PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags + operator&( PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return PeerMemoryFeatureFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags operator^( PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags + operator^( PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return PeerMemoryFeatureFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags operator~( PeerMemoryFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags operator~( PeerMemoryFeatureFlagBits bits ) + VULKAN_HPP_NOEXCEPT { return ~( PeerMemoryFeatureFlags( bits ) ); } using PeerMemoryFeatureFlagsKHR = PeerMemoryFeatureFlags; - VULKAN_HPP_INLINE std::string to_string( PeerMemoryFeatureFlags value ) + VULKAN_HPP_INLINE std::string to_string( PeerMemoryFeatureFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & PeerMemoryFeatureFlagBits::eCopySrc ) result += "CopySrc | "; - if ( value & PeerMemoryFeatureFlagBits::eCopyDst ) result += "CopyDst | "; - if ( value & PeerMemoryFeatureFlagBits::eGenericSrc ) result += "GenericSrc | "; - if ( value & PeerMemoryFeatureFlagBits::eGenericDst ) result += "GenericDst | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & PeerMemoryFeatureFlagBits::eCopySrc ) + result += "CopySrc | "; + if ( value & PeerMemoryFeatureFlagBits::eCopyDst ) + result += "CopyDst | "; + if ( value & PeerMemoryFeatureFlagBits::eGenericSrc ) + result += "GenericSrc | "; + if ( value & PeerMemoryFeatureFlagBits::eGenericDst ) + result += "GenericDst | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using PerformanceCounterDescriptionFlagsKHR = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting) | VkFlags(PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted) + allFlags = VkFlags( PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting ) | + VkFlags( PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator|( PerformanceCounterDescriptionFlagBitsKHR bit0, PerformanceCounterDescriptionFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator|( + PerformanceCounterDescriptionFlagBitsKHR bit0, PerformanceCounterDescriptionFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { return PerformanceCounterDescriptionFlagsKHR( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator&( PerformanceCounterDescriptionFlagBitsKHR bit0, PerformanceCounterDescriptionFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator&( + PerformanceCounterDescriptionFlagBitsKHR bit0, PerformanceCounterDescriptionFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { return PerformanceCounterDescriptionFlagsKHR( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator^( PerformanceCounterDescriptionFlagBitsKHR bit0, PerformanceCounterDescriptionFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator^( + PerformanceCounterDescriptionFlagBitsKHR bit0, PerformanceCounterDescriptionFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { return PerformanceCounterDescriptionFlagsKHR( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator~( PerformanceCounterDescriptionFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR + operator~( PerformanceCounterDescriptionFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT { return ~( PerformanceCounterDescriptionFlagsKHR( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( PerformanceCounterDescriptionFlagsKHR value ) + VULKAN_HPP_INLINE std::string to_string( PerformanceCounterDescriptionFlagsKHR value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting ) result += "PerformanceImpacting | "; - if ( value & PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted ) result += "ConcurrentlyImpacted | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting ) + result += "PerformanceImpacting | "; + if ( value & PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted ) + result += "ConcurrentlyImpacted | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using PipelineCacheCreateFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(PipelineCacheCreateFlagBits::eExternallySynchronizedEXT) + allFlags = VkFlags( PipelineCacheCreateFlagBits::eExternallySynchronizedEXT ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags operator|( PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags + operator|( PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return PipelineCacheCreateFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags operator&( PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags + operator&( PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return PipelineCacheCreateFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags operator^( PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags + operator^( PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return PipelineCacheCreateFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags operator~( PipelineCacheCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags operator~( PipelineCacheCreateFlagBits bits ) + VULKAN_HPP_NOEXCEPT { return ~( PipelineCacheCreateFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlags value ) + VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & PipelineCacheCreateFlagBits::eExternallySynchronizedEXT ) result += "ExternallySynchronizedEXT | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & PipelineCacheCreateFlagBits::eExternallySynchronizedEXT ) + result += "ExternallySynchronizedEXT | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using PipelineColorBlendStateCreateFlags = Flags; - VULKAN_HPP_INLINE std::string to_string( PipelineColorBlendStateCreateFlags ) + VULKAN_HPP_INLINE std::string to_string( PipelineColorBlendStateCreateFlags ) { return "{}"; } using PipelineCompilerControlFlagsAMD = Flags; - VULKAN_HPP_INLINE std::string to_string( PipelineCompilerControlFlagsAMD ) + VULKAN_HPP_INLINE std::string to_string( PipelineCompilerControlFlagsAMD ) { return "{}"; } enum class PipelineCoverageModulationStateCreateFlagBitsNV : VkFlags - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( PipelineCoverageModulationStateCreateFlagBitsNV ) { @@ -11323,13 +13499,14 @@ namespace VULKAN_HPP_NAMESPACE using PipelineCoverageModulationStateCreateFlagsNV = Flags; - VULKAN_HPP_INLINE std::string to_string( PipelineCoverageModulationStateCreateFlagsNV ) + VULKAN_HPP_INLINE std::string to_string( PipelineCoverageModulationStateCreateFlagsNV ) { return "{}"; } enum class PipelineCoverageReductionStateCreateFlagBitsNV : VkFlags - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( PipelineCoverageReductionStateCreateFlagBitsNV ) { @@ -11338,13 +13515,14 @@ namespace VULKAN_HPP_NAMESPACE using PipelineCoverageReductionStateCreateFlagsNV = Flags; - VULKAN_HPP_INLINE std::string to_string( PipelineCoverageReductionStateCreateFlagsNV ) + VULKAN_HPP_INLINE std::string to_string( PipelineCoverageReductionStateCreateFlagsNV ) { return "{}"; } enum class PipelineCoverageToColorStateCreateFlagBitsNV : VkFlags - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( PipelineCoverageToColorStateCreateFlagBitsNV ) { @@ -11353,117 +13531,167 @@ namespace VULKAN_HPP_NAMESPACE using PipelineCoverageToColorStateCreateFlagsNV = Flags; - VULKAN_HPP_INLINE std::string to_string( PipelineCoverageToColorStateCreateFlagsNV ) + VULKAN_HPP_INLINE std::string to_string( PipelineCoverageToColorStateCreateFlagsNV ) { return "{}"; } using PipelineCreateFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(PipelineCreateFlagBits::eDisableOptimization) | VkFlags(PipelineCreateFlagBits::eAllowDerivatives) | VkFlags(PipelineCreateFlagBits::eDerivative) | VkFlags(PipelineCreateFlagBits::eViewIndexFromDeviceIndex) | VkFlags(PipelineCreateFlagBits::eDispatchBase) | VkFlags(PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR) | VkFlags(PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR) | VkFlags(PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR) | VkFlags(PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR) | VkFlags(PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR) | VkFlags(PipelineCreateFlagBits::eRayTracingSkipAabbsKHR) | VkFlags(PipelineCreateFlagBits::eDeferCompileNV) | VkFlags(PipelineCreateFlagBits::eCaptureStatisticsKHR) | VkFlags(PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR) | VkFlags(PipelineCreateFlagBits::eIndirectBindableNV) | VkFlags(PipelineCreateFlagBits::eLibraryKHR) | VkFlags(PipelineCreateFlagBits::eFailOnPipelineCompileRequiredEXT) | VkFlags(PipelineCreateFlagBits::eEarlyReturnOnFailureEXT) + allFlags = + VkFlags( PipelineCreateFlagBits::eDisableOptimization ) | VkFlags( PipelineCreateFlagBits::eAllowDerivatives ) | + VkFlags( PipelineCreateFlagBits::eDerivative ) | VkFlags( PipelineCreateFlagBits::eViewIndexFromDeviceIndex ) | + VkFlags( PipelineCreateFlagBits::eDispatchBase ) | + VkFlags( PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR ) | + VkFlags( PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR ) | + VkFlags( PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR ) | + VkFlags( PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR ) | + VkFlags( PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR ) | + VkFlags( PipelineCreateFlagBits::eRayTracingSkipAabbsKHR ) | + VkFlags( PipelineCreateFlagBits::eDeferCompileNV ) | VkFlags( PipelineCreateFlagBits::eCaptureStatisticsKHR ) | + VkFlags( PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR ) | + VkFlags( PipelineCreateFlagBits::eIndirectBindableNV ) | VkFlags( PipelineCreateFlagBits::eLibraryKHR ) | + VkFlags( PipelineCreateFlagBits::eFailOnPipelineCompileRequiredEXT ) | + VkFlags( PipelineCreateFlagBits::eEarlyReturnOnFailureEXT ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags operator|( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags + operator|( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return PipelineCreateFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags operator&( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags + operator&( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return PipelineCreateFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags operator^( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags + operator^( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return PipelineCreateFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags operator~( PipelineCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags operator~( PipelineCreateFlagBits bits ) + VULKAN_HPP_NOEXCEPT { return ~( PipelineCreateFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlags value ) + VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & PipelineCreateFlagBits::eDisableOptimization ) result += "DisableOptimization | "; - if ( value & PipelineCreateFlagBits::eAllowDerivatives ) result += "AllowDerivatives | "; - if ( value & PipelineCreateFlagBits::eDerivative ) result += "Derivative | "; - if ( value & PipelineCreateFlagBits::eViewIndexFromDeviceIndex ) result += "ViewIndexFromDeviceIndex | "; - if ( value & PipelineCreateFlagBits::eDispatchBase ) result += "DispatchBase | "; - if ( value & PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR ) result += "RayTracingNoNullAnyHitShadersKHR | "; - if ( value & PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR ) result += "RayTracingNoNullClosestHitShadersKHR | "; - if ( value & PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR ) result += "RayTracingNoNullMissShadersKHR | "; - if ( value & PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR ) result += "RayTracingNoNullIntersectionShadersKHR | "; - if ( value & PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR ) result += "RayTracingSkipTrianglesKHR | "; - if ( value & PipelineCreateFlagBits::eRayTracingSkipAabbsKHR ) result += "RayTracingSkipAabbsKHR | "; - if ( value & PipelineCreateFlagBits::eDeferCompileNV ) result += "DeferCompileNV | "; - if ( value & PipelineCreateFlagBits::eCaptureStatisticsKHR ) result += "CaptureStatisticsKHR | "; - if ( value & PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR ) result += "CaptureInternalRepresentationsKHR | "; - if ( value & PipelineCreateFlagBits::eIndirectBindableNV ) result += "IndirectBindableNV | "; - if ( value & PipelineCreateFlagBits::eLibraryKHR ) result += "LibraryKHR | "; - if ( value & PipelineCreateFlagBits::eFailOnPipelineCompileRequiredEXT ) result += "FailOnPipelineCompileRequiredEXT | "; - if ( value & PipelineCreateFlagBits::eEarlyReturnOnFailureEXT ) result += "EarlyReturnOnFailureEXT | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & PipelineCreateFlagBits::eDisableOptimization ) + result += "DisableOptimization | "; + if ( value & PipelineCreateFlagBits::eAllowDerivatives ) + result += "AllowDerivatives | "; + if ( value & PipelineCreateFlagBits::eDerivative ) + result += "Derivative | "; + if ( value & PipelineCreateFlagBits::eViewIndexFromDeviceIndex ) + result += "ViewIndexFromDeviceIndex | "; + if ( value & PipelineCreateFlagBits::eDispatchBase ) + result += "DispatchBase | "; + if ( value & PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR ) + result += "RayTracingNoNullAnyHitShadersKHR | "; + if ( value & PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR ) + result += "RayTracingNoNullClosestHitShadersKHR | "; + if ( value & PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR ) + result += "RayTracingNoNullMissShadersKHR | "; + if ( value & PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR ) + result += "RayTracingNoNullIntersectionShadersKHR | "; + if ( value & PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR ) + result += "RayTracingSkipTrianglesKHR | "; + if ( value & PipelineCreateFlagBits::eRayTracingSkipAabbsKHR ) + result += "RayTracingSkipAabbsKHR | "; + if ( value & PipelineCreateFlagBits::eDeferCompileNV ) + result += "DeferCompileNV | "; + if ( value & PipelineCreateFlagBits::eCaptureStatisticsKHR ) + result += "CaptureStatisticsKHR | "; + if ( value & PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR ) + result += "CaptureInternalRepresentationsKHR | "; + if ( value & PipelineCreateFlagBits::eIndirectBindableNV ) + result += "IndirectBindableNV | "; + if ( value & PipelineCreateFlagBits::eLibraryKHR ) + result += "LibraryKHR | "; + if ( value & PipelineCreateFlagBits::eFailOnPipelineCompileRequiredEXT ) + result += "FailOnPipelineCompileRequiredEXT | "; + if ( value & PipelineCreateFlagBits::eEarlyReturnOnFailureEXT ) + result += "EarlyReturnOnFailureEXT | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using PipelineCreationFeedbackFlagsEXT = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(PipelineCreationFeedbackFlagBitsEXT::eValid) | VkFlags(PipelineCreationFeedbackFlagBitsEXT::eApplicationPipelineCacheHit) | VkFlags(PipelineCreationFeedbackFlagBitsEXT::eBasePipelineAcceleration) + allFlags = VkFlags( PipelineCreationFeedbackFlagBitsEXT::eValid ) | + VkFlags( PipelineCreationFeedbackFlagBitsEXT::eApplicationPipelineCacheHit ) | + VkFlags( PipelineCreationFeedbackFlagBitsEXT::eBasePipelineAcceleration ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlagsEXT operator|( PipelineCreationFeedbackFlagBitsEXT bit0, PipelineCreationFeedbackFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlagsEXT + operator|( PipelineCreationFeedbackFlagBitsEXT bit0, PipelineCreationFeedbackFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT { return PipelineCreationFeedbackFlagsEXT( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlagsEXT operator&( PipelineCreationFeedbackFlagBitsEXT bit0, PipelineCreationFeedbackFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlagsEXT + operator&( PipelineCreationFeedbackFlagBitsEXT bit0, PipelineCreationFeedbackFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT { return PipelineCreationFeedbackFlagsEXT( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlagsEXT operator^( PipelineCreationFeedbackFlagBitsEXT bit0, PipelineCreationFeedbackFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlagsEXT + operator^( PipelineCreationFeedbackFlagBitsEXT bit0, PipelineCreationFeedbackFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT { return PipelineCreationFeedbackFlagsEXT( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlagsEXT operator~( PipelineCreationFeedbackFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlagsEXT + operator~( PipelineCreationFeedbackFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT { return ~( PipelineCreationFeedbackFlagsEXT( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( PipelineCreationFeedbackFlagsEXT value ) + VULKAN_HPP_INLINE std::string to_string( PipelineCreationFeedbackFlagsEXT value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & PipelineCreationFeedbackFlagBitsEXT::eValid ) result += "Valid | "; - if ( value & PipelineCreationFeedbackFlagBitsEXT::eApplicationPipelineCacheHit ) result += "ApplicationPipelineCacheHit | "; - if ( value & PipelineCreationFeedbackFlagBitsEXT::eBasePipelineAcceleration ) result += "BasePipelineAcceleration | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & PipelineCreationFeedbackFlagBitsEXT::eValid ) + result += "Valid | "; + if ( value & PipelineCreationFeedbackFlagBitsEXT::eApplicationPipelineCacheHit ) + result += "ApplicationPipelineCacheHit | "; + if ( value & PipelineCreationFeedbackFlagBitsEXT::eBasePipelineAcceleration ) + result += "BasePipelineAcceleration | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using PipelineDepthStencilStateCreateFlags = Flags; - VULKAN_HPP_INLINE std::string to_string( PipelineDepthStencilStateCreateFlags ) + VULKAN_HPP_INLINE std::string to_string( PipelineDepthStencilStateCreateFlags ) { return "{}"; } enum class PipelineDiscardRectangleStateCreateFlagBitsEXT : VkFlags - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( PipelineDiscardRectangleStateCreateFlagBitsEXT ) { @@ -11472,56 +13700,59 @@ namespace VULKAN_HPP_NAMESPACE using PipelineDiscardRectangleStateCreateFlagsEXT = Flags; - VULKAN_HPP_INLINE std::string to_string( PipelineDiscardRectangleStateCreateFlagsEXT ) + VULKAN_HPP_INLINE std::string to_string( PipelineDiscardRectangleStateCreateFlagsEXT ) { return "{}"; } using PipelineDynamicStateCreateFlags = Flags; - VULKAN_HPP_INLINE std::string to_string( PipelineDynamicStateCreateFlags ) + VULKAN_HPP_INLINE std::string to_string( PipelineDynamicStateCreateFlags ) { return "{}"; } using PipelineInputAssemblyStateCreateFlags = Flags; - VULKAN_HPP_INLINE std::string to_string( PipelineInputAssemblyStateCreateFlags ) + VULKAN_HPP_INLINE std::string to_string( PipelineInputAssemblyStateCreateFlags ) { return "{}"; } using PipelineLayoutCreateFlags = Flags; - VULKAN_HPP_INLINE std::string to_string( PipelineLayoutCreateFlags ) + VULKAN_HPP_INLINE std::string to_string( PipelineLayoutCreateFlags ) { return "{}"; } using PipelineMultisampleStateCreateFlags = Flags; - VULKAN_HPP_INLINE std::string to_string( PipelineMultisampleStateCreateFlags ) + VULKAN_HPP_INLINE std::string to_string( PipelineMultisampleStateCreateFlags ) { return "{}"; } enum class PipelineRasterizationConservativeStateCreateFlagBitsEXT : VkFlags - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationConservativeStateCreateFlagBitsEXT ) { return "(void)"; } - using PipelineRasterizationConservativeStateCreateFlagsEXT = Flags; + using PipelineRasterizationConservativeStateCreateFlagsEXT = + Flags; - VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationConservativeStateCreateFlagsEXT ) + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationConservativeStateCreateFlagsEXT ) { return "{}"; } enum class PipelineRasterizationDepthClipStateCreateFlagBitsEXT : VkFlags - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationDepthClipStateCreateFlagBitsEXT ) { @@ -11530,20 +13761,21 @@ namespace VULKAN_HPP_NAMESPACE using PipelineRasterizationDepthClipStateCreateFlagsEXT = Flags; - VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationDepthClipStateCreateFlagsEXT ) + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationDepthClipStateCreateFlagsEXT ) { return "{}"; } using PipelineRasterizationStateCreateFlags = Flags; - VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateCreateFlags ) + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateCreateFlags ) { return "{}"; } enum class PipelineRasterizationStateStreamCreateFlagBitsEXT : VkFlags - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateStreamCreateFlagBitsEXT ) { @@ -11552,72 +13784,101 @@ namespace VULKAN_HPP_NAMESPACE using PipelineRasterizationStateStreamCreateFlagsEXT = Flags; - VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateStreamCreateFlagsEXT ) + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateStreamCreateFlagsEXT ) { return "{}"; } using PipelineShaderStageCreateFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSizeEXT) | VkFlags(PipelineShaderStageCreateFlagBits::eRequireFullSubgroupsEXT) + allFlags = VkFlags( PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSizeEXT ) | + VkFlags( PipelineShaderStageCreateFlagBits::eRequireFullSubgroupsEXT ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags operator|( PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags + operator|( PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return PipelineShaderStageCreateFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags operator&( PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags + operator&( PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return PipelineShaderStageCreateFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags operator^( PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags + operator^( PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return PipelineShaderStageCreateFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags operator~( PipelineShaderStageCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags + operator~( PipelineShaderStageCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT { return ~( PipelineShaderStageCreateFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( PipelineShaderStageCreateFlags value ) + VULKAN_HPP_INLINE std::string to_string( PipelineShaderStageCreateFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSizeEXT ) result += "AllowVaryingSubgroupSizeEXT | "; - if ( value & PipelineShaderStageCreateFlagBits::eRequireFullSubgroupsEXT ) result += "RequireFullSubgroupsEXT | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSizeEXT ) + result += "AllowVaryingSubgroupSizeEXT | "; + if ( value & PipelineShaderStageCreateFlagBits::eRequireFullSubgroupsEXT ) + result += "RequireFullSubgroupsEXT | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using PipelineStageFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(PipelineStageFlagBits::eTopOfPipe) | VkFlags(PipelineStageFlagBits::eDrawIndirect) | VkFlags(PipelineStageFlagBits::eVertexInput) | VkFlags(PipelineStageFlagBits::eVertexShader) | VkFlags(PipelineStageFlagBits::eTessellationControlShader) | VkFlags(PipelineStageFlagBits::eTessellationEvaluationShader) | VkFlags(PipelineStageFlagBits::eGeometryShader) | VkFlags(PipelineStageFlagBits::eFragmentShader) | VkFlags(PipelineStageFlagBits::eEarlyFragmentTests) | VkFlags(PipelineStageFlagBits::eLateFragmentTests) | VkFlags(PipelineStageFlagBits::eColorAttachmentOutput) | VkFlags(PipelineStageFlagBits::eComputeShader) | VkFlags(PipelineStageFlagBits::eTransfer) | VkFlags(PipelineStageFlagBits::eBottomOfPipe) | VkFlags(PipelineStageFlagBits::eHost) | VkFlags(PipelineStageFlagBits::eAllGraphics) | VkFlags(PipelineStageFlagBits::eAllCommands) | VkFlags(PipelineStageFlagBits::eTransformFeedbackEXT) | VkFlags(PipelineStageFlagBits::eConditionalRenderingEXT) | VkFlags(PipelineStageFlagBits::eRayTracingShaderKHR) | VkFlags(PipelineStageFlagBits::eAccelerationStructureBuildKHR) | VkFlags(PipelineStageFlagBits::eShadingRateImageNV) | VkFlags(PipelineStageFlagBits::eTaskShaderNV) | VkFlags(PipelineStageFlagBits::eMeshShaderNV) | VkFlags(PipelineStageFlagBits::eFragmentDensityProcessEXT) | VkFlags(PipelineStageFlagBits::eCommandPreprocessNV) + allFlags = + VkFlags( PipelineStageFlagBits::eTopOfPipe ) | VkFlags( PipelineStageFlagBits::eDrawIndirect ) | + VkFlags( PipelineStageFlagBits::eVertexInput ) | VkFlags( PipelineStageFlagBits::eVertexShader ) | + VkFlags( PipelineStageFlagBits::eTessellationControlShader ) | + VkFlags( PipelineStageFlagBits::eTessellationEvaluationShader ) | + VkFlags( PipelineStageFlagBits::eGeometryShader ) | VkFlags( PipelineStageFlagBits::eFragmentShader ) | + VkFlags( PipelineStageFlagBits::eEarlyFragmentTests ) | VkFlags( PipelineStageFlagBits::eLateFragmentTests ) | + VkFlags( PipelineStageFlagBits::eColorAttachmentOutput ) | VkFlags( PipelineStageFlagBits::eComputeShader ) | + VkFlags( PipelineStageFlagBits::eTransfer ) | VkFlags( PipelineStageFlagBits::eBottomOfPipe ) | + VkFlags( PipelineStageFlagBits::eHost ) | VkFlags( PipelineStageFlagBits::eAllGraphics ) | + VkFlags( PipelineStageFlagBits::eAllCommands ) | VkFlags( PipelineStageFlagBits::eTransformFeedbackEXT ) | + VkFlags( PipelineStageFlagBits::eConditionalRenderingEXT ) | + VkFlags( PipelineStageFlagBits::eRayTracingShaderKHR ) | + VkFlags( PipelineStageFlagBits::eAccelerationStructureBuildKHR ) | + VkFlags( PipelineStageFlagBits::eShadingRateImageNV ) | VkFlags( PipelineStageFlagBits::eTaskShaderNV ) | + VkFlags( PipelineStageFlagBits::eMeshShaderNV ) | VkFlags( PipelineStageFlagBits::eFragmentDensityProcessEXT ) | + VkFlags( PipelineStageFlagBits::eCommandPreprocessNV ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator|( PipelineStageFlagBits bit0, PipelineStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator|( PipelineStageFlagBits bit0, + PipelineStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return PipelineStageFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator&( PipelineStageFlagBits bit0, PipelineStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator&( PipelineStageFlagBits bit0, + PipelineStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return PipelineStageFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator^( PipelineStageFlagBits bit0, PipelineStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator^( PipelineStageFlagBits bit0, + PipelineStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return PipelineStageFlags( bit0 ) ^ bit1; } @@ -11627,63 +13888,91 @@ namespace VULKAN_HPP_NAMESPACE return ~( PipelineStageFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( PipelineStageFlags value ) + VULKAN_HPP_INLINE std::string to_string( PipelineStageFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & PipelineStageFlagBits::eTopOfPipe ) result += "TopOfPipe | "; - if ( value & PipelineStageFlagBits::eDrawIndirect ) result += "DrawIndirect | "; - if ( value & PipelineStageFlagBits::eVertexInput ) result += "VertexInput | "; - if ( value & PipelineStageFlagBits::eVertexShader ) result += "VertexShader | "; - if ( value & PipelineStageFlagBits::eTessellationControlShader ) result += "TessellationControlShader | "; - if ( value & PipelineStageFlagBits::eTessellationEvaluationShader ) result += "TessellationEvaluationShader | "; - if ( value & PipelineStageFlagBits::eGeometryShader ) result += "GeometryShader | "; - if ( value & PipelineStageFlagBits::eFragmentShader ) result += "FragmentShader | "; - if ( value & PipelineStageFlagBits::eEarlyFragmentTests ) result += "EarlyFragmentTests | "; - if ( value & PipelineStageFlagBits::eLateFragmentTests ) result += "LateFragmentTests | "; - if ( value & PipelineStageFlagBits::eColorAttachmentOutput ) result += "ColorAttachmentOutput | "; - if ( value & PipelineStageFlagBits::eComputeShader ) result += "ComputeShader | "; - if ( value & PipelineStageFlagBits::eTransfer ) result += "Transfer | "; - if ( value & PipelineStageFlagBits::eBottomOfPipe ) result += "BottomOfPipe | "; - if ( value & PipelineStageFlagBits::eHost ) result += "Host | "; - if ( value & PipelineStageFlagBits::eAllGraphics ) result += "AllGraphics | "; - if ( value & PipelineStageFlagBits::eAllCommands ) result += "AllCommands | "; - if ( value & PipelineStageFlagBits::eTransformFeedbackEXT ) result += "TransformFeedbackEXT | "; - if ( value & PipelineStageFlagBits::eConditionalRenderingEXT ) result += "ConditionalRenderingEXT | "; - if ( value & PipelineStageFlagBits::eRayTracingShaderKHR ) result += "RayTracingShaderKHR | "; - if ( value & PipelineStageFlagBits::eAccelerationStructureBuildKHR ) result += "AccelerationStructureBuildKHR | "; - if ( value & PipelineStageFlagBits::eShadingRateImageNV ) result += "ShadingRateImageNV | "; - if ( value & PipelineStageFlagBits::eTaskShaderNV ) result += "TaskShaderNV | "; - if ( value & PipelineStageFlagBits::eMeshShaderNV ) result += "MeshShaderNV | "; - if ( value & PipelineStageFlagBits::eFragmentDensityProcessEXT ) result += "FragmentDensityProcessEXT | "; - if ( value & PipelineStageFlagBits::eCommandPreprocessNV ) result += "CommandPreprocessNV | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & PipelineStageFlagBits::eTopOfPipe ) + result += "TopOfPipe | "; + if ( value & PipelineStageFlagBits::eDrawIndirect ) + result += "DrawIndirect | "; + if ( value & PipelineStageFlagBits::eVertexInput ) + result += "VertexInput | "; + if ( value & PipelineStageFlagBits::eVertexShader ) + result += "VertexShader | "; + if ( value & PipelineStageFlagBits::eTessellationControlShader ) + result += "TessellationControlShader | "; + if ( value & PipelineStageFlagBits::eTessellationEvaluationShader ) + result += "TessellationEvaluationShader | "; + if ( value & PipelineStageFlagBits::eGeometryShader ) + result += "GeometryShader | "; + if ( value & PipelineStageFlagBits::eFragmentShader ) + result += "FragmentShader | "; + if ( value & PipelineStageFlagBits::eEarlyFragmentTests ) + result += "EarlyFragmentTests | "; + if ( value & PipelineStageFlagBits::eLateFragmentTests ) + result += "LateFragmentTests | "; + if ( value & PipelineStageFlagBits::eColorAttachmentOutput ) + result += "ColorAttachmentOutput | "; + if ( value & PipelineStageFlagBits::eComputeShader ) + result += "ComputeShader | "; + if ( value & PipelineStageFlagBits::eTransfer ) + result += "Transfer | "; + if ( value & PipelineStageFlagBits::eBottomOfPipe ) + result += "BottomOfPipe | "; + if ( value & PipelineStageFlagBits::eHost ) + result += "Host | "; + if ( value & PipelineStageFlagBits::eAllGraphics ) + result += "AllGraphics | "; + if ( value & PipelineStageFlagBits::eAllCommands ) + result += "AllCommands | "; + if ( value & PipelineStageFlagBits::eTransformFeedbackEXT ) + result += "TransformFeedbackEXT | "; + if ( value & PipelineStageFlagBits::eConditionalRenderingEXT ) + result += "ConditionalRenderingEXT | "; + if ( value & PipelineStageFlagBits::eRayTracingShaderKHR ) + result += "RayTracingShaderKHR | "; + if ( value & PipelineStageFlagBits::eAccelerationStructureBuildKHR ) + result += "AccelerationStructureBuildKHR | "; + if ( value & PipelineStageFlagBits::eShadingRateImageNV ) + result += "ShadingRateImageNV | "; + if ( value & PipelineStageFlagBits::eTaskShaderNV ) + result += "TaskShaderNV | "; + if ( value & PipelineStageFlagBits::eMeshShaderNV ) + result += "MeshShaderNV | "; + if ( value & PipelineStageFlagBits::eFragmentDensityProcessEXT ) + result += "FragmentDensityProcessEXT | "; + if ( value & PipelineStageFlagBits::eCommandPreprocessNV ) + result += "CommandPreprocessNV | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using PipelineTessellationStateCreateFlags = Flags; - VULKAN_HPP_INLINE std::string to_string( PipelineTessellationStateCreateFlags ) + VULKAN_HPP_INLINE std::string to_string( PipelineTessellationStateCreateFlags ) { return "{}"; } using PipelineVertexInputStateCreateFlags = Flags; - VULKAN_HPP_INLINE std::string to_string( PipelineVertexInputStateCreateFlags ) + VULKAN_HPP_INLINE std::string to_string( PipelineVertexInputStateCreateFlags ) { return "{}"; } using PipelineViewportStateCreateFlags = Flags; - VULKAN_HPP_INLINE std::string to_string( PipelineViewportStateCreateFlags ) + VULKAN_HPP_INLINE std::string to_string( PipelineViewportStateCreateFlags ) { return "{}"; } enum class PipelineViewportSwizzleStateCreateFlagBitsNV : VkFlags - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( PipelineViewportSwizzleStateCreateFlagBitsNV ) { @@ -11692,32 +13981,36 @@ namespace VULKAN_HPP_NAMESPACE using PipelineViewportSwizzleStateCreateFlagsNV = Flags; - VULKAN_HPP_INLINE std::string to_string( PipelineViewportSwizzleStateCreateFlagsNV ) + VULKAN_HPP_INLINE std::string to_string( PipelineViewportSwizzleStateCreateFlagsNV ) { return "{}"; } using QueryControlFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(QueryControlFlagBits::ePrecise) + allFlags = VkFlags( QueryControlFlagBits::ePrecise ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator|( QueryControlFlagBits bit0, QueryControlFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator|( QueryControlFlagBits bit0, + QueryControlFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return QueryControlFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator&( QueryControlFlagBits bit0, QueryControlFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator&( QueryControlFlagBits bit0, + QueryControlFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return QueryControlFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator^( QueryControlFlagBits bit0, QueryControlFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator^( QueryControlFlagBits bit0, + QueryControlFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return QueryControlFlags( bit0 ) ^ bit1; } @@ -11727,92 +14020,126 @@ namespace VULKAN_HPP_NAMESPACE return ~( QueryControlFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( QueryControlFlags value ) + VULKAN_HPP_INLINE std::string to_string( QueryControlFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & QueryControlFlagBits::ePrecise ) result += "Precise | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & QueryControlFlagBits::ePrecise ) + result += "Precise | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using QueryPipelineStatisticFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(QueryPipelineStatisticFlagBits::eInputAssemblyVertices) | VkFlags(QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eVertexShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eGeometryShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eClippingInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eClippingPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eFragmentShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches) | VkFlags(QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eComputeShaderInvocations) + allFlags = VkFlags( QueryPipelineStatisticFlagBits::eInputAssemblyVertices ) | + VkFlags( QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives ) | + VkFlags( QueryPipelineStatisticFlagBits::eVertexShaderInvocations ) | + VkFlags( QueryPipelineStatisticFlagBits::eGeometryShaderInvocations ) | + VkFlags( QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives ) | + VkFlags( QueryPipelineStatisticFlagBits::eClippingInvocations ) | + VkFlags( QueryPipelineStatisticFlagBits::eClippingPrimitives ) | + VkFlags( QueryPipelineStatisticFlagBits::eFragmentShaderInvocations ) | + VkFlags( QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches ) | + VkFlags( QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations ) | + VkFlags( QueryPipelineStatisticFlagBits::eComputeShaderInvocations ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags operator|( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags + operator|( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return QueryPipelineStatisticFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags operator&( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags + operator&( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return QueryPipelineStatisticFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags operator^( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags + operator^( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return QueryPipelineStatisticFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags operator~( QueryPipelineStatisticFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags operator~( QueryPipelineStatisticFlagBits bits ) + VULKAN_HPP_NOEXCEPT { return ~( QueryPipelineStatisticFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( QueryPipelineStatisticFlags value ) + VULKAN_HPP_INLINE std::string to_string( QueryPipelineStatisticFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & QueryPipelineStatisticFlagBits::eInputAssemblyVertices ) result += "InputAssemblyVertices | "; - if ( value & QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives ) result += "InputAssemblyPrimitives | "; - if ( value & QueryPipelineStatisticFlagBits::eVertexShaderInvocations ) result += "VertexShaderInvocations | "; - if ( value & QueryPipelineStatisticFlagBits::eGeometryShaderInvocations ) result += "GeometryShaderInvocations | "; - if ( value & QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives ) result += "GeometryShaderPrimitives | "; - if ( value & QueryPipelineStatisticFlagBits::eClippingInvocations ) result += "ClippingInvocations | "; - if ( value & QueryPipelineStatisticFlagBits::eClippingPrimitives ) result += "ClippingPrimitives | "; - if ( value & QueryPipelineStatisticFlagBits::eFragmentShaderInvocations ) result += "FragmentShaderInvocations | "; - if ( value & QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches ) result += "TessellationControlShaderPatches | "; - if ( value & QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations ) result += "TessellationEvaluationShaderInvocations | "; - if ( value & QueryPipelineStatisticFlagBits::eComputeShaderInvocations ) result += "ComputeShaderInvocations | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & QueryPipelineStatisticFlagBits::eInputAssemblyVertices ) + result += "InputAssemblyVertices | "; + if ( value & QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives ) + result += "InputAssemblyPrimitives | "; + if ( value & QueryPipelineStatisticFlagBits::eVertexShaderInvocations ) + result += "VertexShaderInvocations | "; + if ( value & QueryPipelineStatisticFlagBits::eGeometryShaderInvocations ) + result += "GeometryShaderInvocations | "; + if ( value & QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives ) + result += "GeometryShaderPrimitives | "; + if ( value & QueryPipelineStatisticFlagBits::eClippingInvocations ) + result += "ClippingInvocations | "; + if ( value & QueryPipelineStatisticFlagBits::eClippingPrimitives ) + result += "ClippingPrimitives | "; + if ( value & QueryPipelineStatisticFlagBits::eFragmentShaderInvocations ) + result += "FragmentShaderInvocations | "; + if ( value & QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches ) + result += "TessellationControlShaderPatches | "; + if ( value & QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations ) + result += "TessellationEvaluationShaderInvocations | "; + if ( value & QueryPipelineStatisticFlagBits::eComputeShaderInvocations ) + result += "ComputeShaderInvocations | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using QueryPoolCreateFlags = Flags; - VULKAN_HPP_INLINE std::string to_string( QueryPoolCreateFlags ) + VULKAN_HPP_INLINE std::string to_string( QueryPoolCreateFlags ) { return "{}"; } using QueryResultFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(QueryResultFlagBits::e64) | VkFlags(QueryResultFlagBits::eWait) | VkFlags(QueryResultFlagBits::eWithAvailability) | VkFlags(QueryResultFlagBits::ePartial) + allFlags = VkFlags( QueryResultFlagBits::e64 ) | VkFlags( QueryResultFlagBits::eWait ) | + VkFlags( QueryResultFlagBits::eWithAvailability ) | VkFlags( QueryResultFlagBits::ePartial ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator|( QueryResultFlagBits bit0, QueryResultFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator|( QueryResultFlagBits bit0, + QueryResultFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return QueryResultFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator&( QueryResultFlagBits bit0, QueryResultFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator&( QueryResultFlagBits bit0, + QueryResultFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return QueryResultFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator^( QueryResultFlagBits bit0, QueryResultFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator^( QueryResultFlagBits bit0, + QueryResultFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return QueryResultFlags( bit0 ) ^ bit1; } @@ -11822,39 +14149,50 @@ namespace VULKAN_HPP_NAMESPACE return ~( QueryResultFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( QueryResultFlags value ) + VULKAN_HPP_INLINE std::string to_string( QueryResultFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & QueryResultFlagBits::e64 ) result += "64 | "; - if ( value & QueryResultFlagBits::eWait ) result += "Wait | "; - if ( value & QueryResultFlagBits::eWithAvailability ) result += "WithAvailability | "; - if ( value & QueryResultFlagBits::ePartial ) result += "Partial | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & QueryResultFlagBits::e64 ) + result += "64 | "; + if ( value & QueryResultFlagBits::eWait ) + result += "Wait | "; + if ( value & QueryResultFlagBits::eWithAvailability ) + result += "WithAvailability | "; + if ( value & QueryResultFlagBits::ePartial ) + result += "Partial | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using QueueFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(QueueFlagBits::eGraphics) | VkFlags(QueueFlagBits::eCompute) | VkFlags(QueueFlagBits::eTransfer) | VkFlags(QueueFlagBits::eSparseBinding) | VkFlags(QueueFlagBits::eProtected) + allFlags = VkFlags( QueueFlagBits::eGraphics ) | VkFlags( QueueFlagBits::eCompute ) | + VkFlags( QueueFlagBits::eTransfer ) | VkFlags( QueueFlagBits::eSparseBinding ) | + VkFlags( QueueFlagBits::eProtected ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator|( QueueFlagBits bit0, QueueFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator|( QueueFlagBits bit0, + QueueFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return QueueFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator&( QueueFlagBits bit0, QueueFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator&( QueueFlagBits bit0, + QueueFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return QueueFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator^( QueueFlagBits bit0, QueueFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator^( QueueFlagBits bit0, + QueueFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return QueueFlags( bit0 ) ^ bit1; } @@ -11864,79 +14202,98 @@ namespace VULKAN_HPP_NAMESPACE return ~( QueueFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( QueueFlags value ) + VULKAN_HPP_INLINE std::string to_string( QueueFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & QueueFlagBits::eGraphics ) result += "Graphics | "; - if ( value & QueueFlagBits::eCompute ) result += "Compute | "; - if ( value & QueueFlagBits::eTransfer ) result += "Transfer | "; - if ( value & QueueFlagBits::eSparseBinding ) result += "SparseBinding | "; - if ( value & QueueFlagBits::eProtected ) result += "Protected | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & QueueFlagBits::eGraphics ) + result += "Graphics | "; + if ( value & QueueFlagBits::eCompute ) + result += "Compute | "; + if ( value & QueueFlagBits::eTransfer ) + result += "Transfer | "; + if ( value & QueueFlagBits::eSparseBinding ) + result += "SparseBinding | "; + if ( value & QueueFlagBits::eProtected ) + result += "Protected | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using RenderPassCreateFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(RenderPassCreateFlagBits::eTransformQCOM) + allFlags = VkFlags( RenderPassCreateFlagBits::eTransformQCOM ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags operator|( RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags + operator|( RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return RenderPassCreateFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags operator&( RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags + operator&( RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return RenderPassCreateFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags operator^( RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags + operator^( RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return RenderPassCreateFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags operator~( RenderPassCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags operator~( RenderPassCreateFlagBits bits ) + VULKAN_HPP_NOEXCEPT { return ~( RenderPassCreateFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( RenderPassCreateFlags value ) + VULKAN_HPP_INLINE std::string to_string( RenderPassCreateFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & RenderPassCreateFlagBits::eTransformQCOM ) result += "TransformQCOM | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & RenderPassCreateFlagBits::eTransformQCOM ) + result += "TransformQCOM | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using ResolveModeFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(ResolveModeFlagBits::eNone) | VkFlags(ResolveModeFlagBits::eSampleZero) | VkFlags(ResolveModeFlagBits::eAverage) | VkFlags(ResolveModeFlagBits::eMin) | VkFlags(ResolveModeFlagBits::eMax) + allFlags = VkFlags( ResolveModeFlagBits::eNone ) | VkFlags( ResolveModeFlagBits::eSampleZero ) | + VkFlags( ResolveModeFlagBits::eAverage ) | VkFlags( ResolveModeFlagBits::eMin ) | + VkFlags( ResolveModeFlagBits::eMax ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlags operator|( ResolveModeFlagBits bit0, ResolveModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlags operator|( ResolveModeFlagBits bit0, + ResolveModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ResolveModeFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlags operator&( ResolveModeFlagBits bit0, ResolveModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlags operator&( ResolveModeFlagBits bit0, + ResolveModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ResolveModeFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlags operator^( ResolveModeFlagBits bit0, ResolveModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlags operator^( ResolveModeFlagBits bit0, + ResolveModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ResolveModeFlags( bit0 ) ^ bit1; } @@ -11948,39 +14305,51 @@ namespace VULKAN_HPP_NAMESPACE using ResolveModeFlagsKHR = ResolveModeFlags; - VULKAN_HPP_INLINE std::string to_string( ResolveModeFlags value ) + VULKAN_HPP_INLINE std::string to_string( ResolveModeFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & ResolveModeFlagBits::eSampleZero ) result += "SampleZero | "; - if ( value & ResolveModeFlagBits::eAverage ) result += "Average | "; - if ( value & ResolveModeFlagBits::eMin ) result += "Min | "; - if ( value & ResolveModeFlagBits::eMax ) result += "Max | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & ResolveModeFlagBits::eSampleZero ) + result += "SampleZero | "; + if ( value & ResolveModeFlagBits::eAverage ) + result += "Average | "; + if ( value & ResolveModeFlagBits::eMin ) + result += "Min | "; + if ( value & ResolveModeFlagBits::eMax ) + result += "Max | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using SampleCountFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(SampleCountFlagBits::e1) | VkFlags(SampleCountFlagBits::e2) | VkFlags(SampleCountFlagBits::e4) | VkFlags(SampleCountFlagBits::e8) | VkFlags(SampleCountFlagBits::e16) | VkFlags(SampleCountFlagBits::e32) | VkFlags(SampleCountFlagBits::e64) + allFlags = VkFlags( SampleCountFlagBits::e1 ) | VkFlags( SampleCountFlagBits::e2 ) | + VkFlags( SampleCountFlagBits::e4 ) | VkFlags( SampleCountFlagBits::e8 ) | + VkFlags( SampleCountFlagBits::e16 ) | VkFlags( SampleCountFlagBits::e32 ) | + VkFlags( SampleCountFlagBits::e64 ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator|( SampleCountFlagBits bit0, SampleCountFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator|( SampleCountFlagBits bit0, + SampleCountFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return SampleCountFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator&( SampleCountFlagBits bit0, SampleCountFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator&( SampleCountFlagBits bit0, + SampleCountFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return SampleCountFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator^( SampleCountFlagBits bit0, SampleCountFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator^( SampleCountFlagBits bit0, + SampleCountFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return SampleCountFlags( bit0 ) ^ bit1; } @@ -11990,42 +14359,55 @@ namespace VULKAN_HPP_NAMESPACE return ~( SampleCountFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( SampleCountFlags value ) + VULKAN_HPP_INLINE std::string to_string( SampleCountFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & SampleCountFlagBits::e1 ) result += "1 | "; - if ( value & SampleCountFlagBits::e2 ) result += "2 | "; - if ( value & SampleCountFlagBits::e4 ) result += "4 | "; - if ( value & SampleCountFlagBits::e8 ) result += "8 | "; - if ( value & SampleCountFlagBits::e16 ) result += "16 | "; - if ( value & SampleCountFlagBits::e32 ) result += "32 | "; - if ( value & SampleCountFlagBits::e64 ) result += "64 | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & SampleCountFlagBits::e1 ) + result += "1 | "; + if ( value & SampleCountFlagBits::e2 ) + result += "2 | "; + if ( value & SampleCountFlagBits::e4 ) + result += "4 | "; + if ( value & SampleCountFlagBits::e8 ) + result += "8 | "; + if ( value & SampleCountFlagBits::e16 ) + result += "16 | "; + if ( value & SampleCountFlagBits::e32 ) + result += "32 | "; + if ( value & SampleCountFlagBits::e64 ) + result += "64 | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using SamplerCreateFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(SamplerCreateFlagBits::eSubsampledEXT) | VkFlags(SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT) + allFlags = VkFlags( SamplerCreateFlagBits::eSubsampledEXT ) | + VkFlags( SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator|( SamplerCreateFlagBits bit0, SamplerCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator|( SamplerCreateFlagBits bit0, + SamplerCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return SamplerCreateFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator&( SamplerCreateFlagBits bit0, SamplerCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator&( SamplerCreateFlagBits bit0, + SamplerCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return SamplerCreateFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator^( SamplerCreateFlagBits bit0, SamplerCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator^( SamplerCreateFlagBits bit0, + SamplerCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return SamplerCreateFlags( bit0 ) ^ bit1; } @@ -12035,85 +14417,99 @@ namespace VULKAN_HPP_NAMESPACE return ~( SamplerCreateFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( SamplerCreateFlags value ) + VULKAN_HPP_INLINE std::string to_string( SamplerCreateFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & SamplerCreateFlagBits::eSubsampledEXT ) result += "SubsampledEXT | "; - if ( value & SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT ) result += "SubsampledCoarseReconstructionEXT | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & SamplerCreateFlagBits::eSubsampledEXT ) + result += "SubsampledEXT | "; + if ( value & SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT ) + result += "SubsampledCoarseReconstructionEXT | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using SemaphoreCreateFlags = Flags; - VULKAN_HPP_INLINE std::string to_string( SemaphoreCreateFlags ) + VULKAN_HPP_INLINE std::string to_string( SemaphoreCreateFlags ) { return "{}"; } using SemaphoreImportFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(SemaphoreImportFlagBits::eTemporary) + allFlags = VkFlags( SemaphoreImportFlagBits::eTemporary ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags operator|( SemaphoreImportFlagBits bit0, SemaphoreImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags + operator|( SemaphoreImportFlagBits bit0, SemaphoreImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return SemaphoreImportFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags operator&( SemaphoreImportFlagBits bit0, SemaphoreImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags + operator&( SemaphoreImportFlagBits bit0, SemaphoreImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return SemaphoreImportFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags operator^( SemaphoreImportFlagBits bit0, SemaphoreImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags + operator^( SemaphoreImportFlagBits bit0, SemaphoreImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return SemaphoreImportFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags operator~( SemaphoreImportFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags operator~( SemaphoreImportFlagBits bits ) + VULKAN_HPP_NOEXCEPT { return ~( SemaphoreImportFlags( bits ) ); } using SemaphoreImportFlagsKHR = SemaphoreImportFlags; - VULKAN_HPP_INLINE std::string to_string( SemaphoreImportFlags value ) + VULKAN_HPP_INLINE std::string to_string( SemaphoreImportFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & SemaphoreImportFlagBits::eTemporary ) result += "Temporary | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & SemaphoreImportFlagBits::eTemporary ) + result += "Temporary | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using SemaphoreWaitFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(SemaphoreWaitFlagBits::eAny) + allFlags = VkFlags( SemaphoreWaitFlagBits::eAny ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlags operator|( SemaphoreWaitFlagBits bit0, SemaphoreWaitFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlags operator|( SemaphoreWaitFlagBits bit0, + SemaphoreWaitFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return SemaphoreWaitFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlags operator&( SemaphoreWaitFlagBits bit0, SemaphoreWaitFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlags operator&( SemaphoreWaitFlagBits bit0, + SemaphoreWaitFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return SemaphoreWaitFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlags operator^( SemaphoreWaitFlagBits bit0, SemaphoreWaitFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlags operator^( SemaphoreWaitFlagBits bit0, + SemaphoreWaitFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return SemaphoreWaitFlags( bit0 ) ^ bit1; } @@ -12125,50 +14521,63 @@ namespace VULKAN_HPP_NAMESPACE using SemaphoreWaitFlagsKHR = SemaphoreWaitFlags; - VULKAN_HPP_INLINE std::string to_string( SemaphoreWaitFlags value ) + VULKAN_HPP_INLINE std::string to_string( SemaphoreWaitFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & SemaphoreWaitFlagBits::eAny ) result += "Any | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & SemaphoreWaitFlagBits::eAny ) + result += "Any | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using ShaderCorePropertiesFlagsAMD = Flags; - VULKAN_HPP_INLINE std::string to_string( ShaderCorePropertiesFlagsAMD ) + VULKAN_HPP_INLINE std::string to_string( ShaderCorePropertiesFlagsAMD ) { return "{}"; } using ShaderModuleCreateFlags = Flags; - VULKAN_HPP_INLINE std::string to_string( ShaderModuleCreateFlags ) + VULKAN_HPP_INLINE std::string to_string( ShaderModuleCreateFlags ) { return "{}"; } using ShaderStageFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(ShaderStageFlagBits::eVertex) | VkFlags(ShaderStageFlagBits::eTessellationControl) | VkFlags(ShaderStageFlagBits::eTessellationEvaluation) | VkFlags(ShaderStageFlagBits::eGeometry) | VkFlags(ShaderStageFlagBits::eFragment) | VkFlags(ShaderStageFlagBits::eCompute) | VkFlags(ShaderStageFlagBits::eAllGraphics) | VkFlags(ShaderStageFlagBits::eAll) | VkFlags(ShaderStageFlagBits::eRaygenKHR) | VkFlags(ShaderStageFlagBits::eAnyHitKHR) | VkFlags(ShaderStageFlagBits::eClosestHitKHR) | VkFlags(ShaderStageFlagBits::eMissKHR) | VkFlags(ShaderStageFlagBits::eIntersectionKHR) | VkFlags(ShaderStageFlagBits::eCallableKHR) | VkFlags(ShaderStageFlagBits::eTaskNV) | VkFlags(ShaderStageFlagBits::eMeshNV) + allFlags = VkFlags( ShaderStageFlagBits::eVertex ) | VkFlags( ShaderStageFlagBits::eTessellationControl ) | + VkFlags( ShaderStageFlagBits::eTessellationEvaluation ) | VkFlags( ShaderStageFlagBits::eGeometry ) | + VkFlags( ShaderStageFlagBits::eFragment ) | VkFlags( ShaderStageFlagBits::eCompute ) | + VkFlags( ShaderStageFlagBits::eAllGraphics ) | VkFlags( ShaderStageFlagBits::eAll ) | + VkFlags( ShaderStageFlagBits::eRaygenKHR ) | VkFlags( ShaderStageFlagBits::eAnyHitKHR ) | + VkFlags( ShaderStageFlagBits::eClosestHitKHR ) | VkFlags( ShaderStageFlagBits::eMissKHR ) | + VkFlags( ShaderStageFlagBits::eIntersectionKHR ) | VkFlags( ShaderStageFlagBits::eCallableKHR ) | + VkFlags( ShaderStageFlagBits::eTaskNV ) | VkFlags( ShaderStageFlagBits::eMeshNV ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator|( ShaderStageFlagBits bit0, ShaderStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator|( ShaderStageFlagBits bit0, + ShaderStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ShaderStageFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator&( ShaderStageFlagBits bit0, ShaderStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator&( ShaderStageFlagBits bit0, + ShaderStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ShaderStageFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator^( ShaderStageFlagBits bit0, ShaderStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator^( ShaderStageFlagBits bit0, + ShaderStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ShaderStageFlags( bit0 ) ^ bit1; } @@ -12178,129 +14587,167 @@ namespace VULKAN_HPP_NAMESPACE return ~( ShaderStageFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( ShaderStageFlags value ) + VULKAN_HPP_INLINE std::string to_string( ShaderStageFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & ShaderStageFlagBits::eVertex ) result += "Vertex | "; - if ( value & ShaderStageFlagBits::eTessellationControl ) result += "TessellationControl | "; - if ( value & ShaderStageFlagBits::eTessellationEvaluation ) result += "TessellationEvaluation | "; - if ( value & ShaderStageFlagBits::eGeometry ) result += "Geometry | "; - if ( value & ShaderStageFlagBits::eFragment ) result += "Fragment | "; - if ( value & ShaderStageFlagBits::eCompute ) result += "Compute | "; - if ( value & ShaderStageFlagBits::eRaygenKHR ) result += "RaygenKHR | "; - if ( value & ShaderStageFlagBits::eAnyHitKHR ) result += "AnyHitKHR | "; - if ( value & ShaderStageFlagBits::eClosestHitKHR ) result += "ClosestHitKHR | "; - if ( value & ShaderStageFlagBits::eMissKHR ) result += "MissKHR | "; - if ( value & ShaderStageFlagBits::eIntersectionKHR ) result += "IntersectionKHR | "; - if ( value & ShaderStageFlagBits::eCallableKHR ) result += "CallableKHR | "; - if ( value & ShaderStageFlagBits::eTaskNV ) result += "TaskNV | "; - if ( value & ShaderStageFlagBits::eMeshNV ) result += "MeshNV | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & ShaderStageFlagBits::eVertex ) + result += "Vertex | "; + if ( value & ShaderStageFlagBits::eTessellationControl ) + result += "TessellationControl | "; + if ( value & ShaderStageFlagBits::eTessellationEvaluation ) + result += "TessellationEvaluation | "; + if ( value & ShaderStageFlagBits::eGeometry ) + result += "Geometry | "; + if ( value & ShaderStageFlagBits::eFragment ) + result += "Fragment | "; + if ( value & ShaderStageFlagBits::eCompute ) + result += "Compute | "; + if ( value & ShaderStageFlagBits::eRaygenKHR ) + result += "RaygenKHR | "; + if ( value & ShaderStageFlagBits::eAnyHitKHR ) + result += "AnyHitKHR | "; + if ( value & ShaderStageFlagBits::eClosestHitKHR ) + result += "ClosestHitKHR | "; + if ( value & ShaderStageFlagBits::eMissKHR ) + result += "MissKHR | "; + if ( value & ShaderStageFlagBits::eIntersectionKHR ) + result += "IntersectionKHR | "; + if ( value & ShaderStageFlagBits::eCallableKHR ) + result += "CallableKHR | "; + if ( value & ShaderStageFlagBits::eTaskNV ) + result += "TaskNV | "; + if ( value & ShaderStageFlagBits::eMeshNV ) + result += "MeshNV | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using SparseImageFormatFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(SparseImageFormatFlagBits::eSingleMiptail) | VkFlags(SparseImageFormatFlagBits::eAlignedMipSize) | VkFlags(SparseImageFormatFlagBits::eNonstandardBlockSize) + allFlags = VkFlags( SparseImageFormatFlagBits::eSingleMiptail ) | + VkFlags( SparseImageFormatFlagBits::eAlignedMipSize ) | + VkFlags( SparseImageFormatFlagBits::eNonstandardBlockSize ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags operator|( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags + operator|( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return SparseImageFormatFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags operator&( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags + operator&( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return SparseImageFormatFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags operator^( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags + operator^( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return SparseImageFormatFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags operator~( SparseImageFormatFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags operator~( SparseImageFormatFlagBits bits ) + VULKAN_HPP_NOEXCEPT { return ~( SparseImageFormatFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( SparseImageFormatFlags value ) + VULKAN_HPP_INLINE std::string to_string( SparseImageFormatFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & SparseImageFormatFlagBits::eSingleMiptail ) result += "SingleMiptail | "; - if ( value & SparseImageFormatFlagBits::eAlignedMipSize ) result += "AlignedMipSize | "; - if ( value & SparseImageFormatFlagBits::eNonstandardBlockSize ) result += "NonstandardBlockSize | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & SparseImageFormatFlagBits::eSingleMiptail ) + result += "SingleMiptail | "; + if ( value & SparseImageFormatFlagBits::eAlignedMipSize ) + result += "AlignedMipSize | "; + if ( value & SparseImageFormatFlagBits::eNonstandardBlockSize ) + result += "NonstandardBlockSize | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using SparseMemoryBindFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(SparseMemoryBindFlagBits::eMetadata) + allFlags = VkFlags( SparseMemoryBindFlagBits::eMetadata ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags operator|( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags + operator|( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return SparseMemoryBindFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags operator&( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags + operator&( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return SparseMemoryBindFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags operator^( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags + operator^( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return SparseMemoryBindFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags operator~( SparseMemoryBindFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags operator~( SparseMemoryBindFlagBits bits ) + VULKAN_HPP_NOEXCEPT { return ~( SparseMemoryBindFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( SparseMemoryBindFlags value ) + VULKAN_HPP_INLINE std::string to_string( SparseMemoryBindFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & SparseMemoryBindFlagBits::eMetadata ) result += "Metadata | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & SparseMemoryBindFlagBits::eMetadata ) + result += "Metadata | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using StencilFaceFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(StencilFaceFlagBits::eFront) | VkFlags(StencilFaceFlagBits::eBack) | VkFlags(StencilFaceFlagBits::eFrontAndBack) + allFlags = VkFlags( StencilFaceFlagBits::eFront ) | VkFlags( StencilFaceFlagBits::eBack ) | + VkFlags( StencilFaceFlagBits::eFrontAndBack ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator|( StencilFaceFlagBits bit0, StencilFaceFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator|( StencilFaceFlagBits bit0, + StencilFaceFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return StencilFaceFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator&( StencilFaceFlagBits bit0, StencilFaceFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator&( StencilFaceFlagBits bit0, + StencilFaceFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return StencilFaceFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator^( StencilFaceFlagBits bit0, StencilFaceFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator^( StencilFaceFlagBits bit0, + StencilFaceFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return StencilFaceFlags( bit0 ) ^ bit1; } @@ -12310,19 +14757,23 @@ namespace VULKAN_HPP_NAMESPACE return ~( StencilFaceFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( StencilFaceFlags value ) + VULKAN_HPP_INLINE std::string to_string( StencilFaceFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & StencilFaceFlagBits::eFront ) result += "Front | "; - if ( value & StencilFaceFlagBits::eBack ) result += "Back | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & StencilFaceFlagBits::eFront ) + result += "Front | "; + if ( value & StencilFaceFlagBits::eBack ) + result += "Back | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } #ifdef VK_USE_PLATFORM_GGP enum class StreamDescriptorSurfaceCreateFlagBitsGGP : VkFlags - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( StreamDescriptorSurfaceCreateFlagBitsGGP ) { @@ -12331,7 +14782,7 @@ namespace VULKAN_HPP_NAMESPACE using StreamDescriptorSurfaceCreateFlagsGGP = Flags; - VULKAN_HPP_INLINE std::string to_string( StreamDescriptorSurfaceCreateFlagsGGP ) + VULKAN_HPP_INLINE std::string to_string( StreamDescriptorSurfaceCreateFlagsGGP ) { return "{}"; } @@ -12339,265 +14790,350 @@ namespace VULKAN_HPP_NAMESPACE using SubgroupFeatureFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(SubgroupFeatureFlagBits::eBasic) | VkFlags(SubgroupFeatureFlagBits::eVote) | VkFlags(SubgroupFeatureFlagBits::eArithmetic) | VkFlags(SubgroupFeatureFlagBits::eBallot) | VkFlags(SubgroupFeatureFlagBits::eShuffle) | VkFlags(SubgroupFeatureFlagBits::eShuffleRelative) | VkFlags(SubgroupFeatureFlagBits::eClustered) | VkFlags(SubgroupFeatureFlagBits::eQuad) | VkFlags(SubgroupFeatureFlagBits::ePartitionedNV) + allFlags = VkFlags( SubgroupFeatureFlagBits::eBasic ) | VkFlags( SubgroupFeatureFlagBits::eVote ) | + VkFlags( SubgroupFeatureFlagBits::eArithmetic ) | VkFlags( SubgroupFeatureFlagBits::eBallot ) | + VkFlags( SubgroupFeatureFlagBits::eShuffle ) | VkFlags( SubgroupFeatureFlagBits::eShuffleRelative ) | + VkFlags( SubgroupFeatureFlagBits::eClustered ) | VkFlags( SubgroupFeatureFlagBits::eQuad ) | + VkFlags( SubgroupFeatureFlagBits::ePartitionedNV ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags operator|( SubgroupFeatureFlagBits bit0, SubgroupFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags + operator|( SubgroupFeatureFlagBits bit0, SubgroupFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return SubgroupFeatureFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags operator&( SubgroupFeatureFlagBits bit0, SubgroupFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags + operator&( SubgroupFeatureFlagBits bit0, SubgroupFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return SubgroupFeatureFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags operator^( SubgroupFeatureFlagBits bit0, SubgroupFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags + operator^( SubgroupFeatureFlagBits bit0, SubgroupFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return SubgroupFeatureFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags operator~( SubgroupFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags operator~( SubgroupFeatureFlagBits bits ) + VULKAN_HPP_NOEXCEPT { return ~( SubgroupFeatureFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( SubgroupFeatureFlags value ) + VULKAN_HPP_INLINE std::string to_string( SubgroupFeatureFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & SubgroupFeatureFlagBits::eBasic ) result += "Basic | "; - if ( value & SubgroupFeatureFlagBits::eVote ) result += "Vote | "; - if ( value & SubgroupFeatureFlagBits::eArithmetic ) result += "Arithmetic | "; - if ( value & SubgroupFeatureFlagBits::eBallot ) result += "Ballot | "; - if ( value & SubgroupFeatureFlagBits::eShuffle ) result += "Shuffle | "; - if ( value & SubgroupFeatureFlagBits::eShuffleRelative ) result += "ShuffleRelative | "; - if ( value & SubgroupFeatureFlagBits::eClustered ) result += "Clustered | "; - if ( value & SubgroupFeatureFlagBits::eQuad ) result += "Quad | "; - if ( value & SubgroupFeatureFlagBits::ePartitionedNV ) result += "PartitionedNV | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & SubgroupFeatureFlagBits::eBasic ) + result += "Basic | "; + if ( value & SubgroupFeatureFlagBits::eVote ) + result += "Vote | "; + if ( value & SubgroupFeatureFlagBits::eArithmetic ) + result += "Arithmetic | "; + if ( value & SubgroupFeatureFlagBits::eBallot ) + result += "Ballot | "; + if ( value & SubgroupFeatureFlagBits::eShuffle ) + result += "Shuffle | "; + if ( value & SubgroupFeatureFlagBits::eShuffleRelative ) + result += "ShuffleRelative | "; + if ( value & SubgroupFeatureFlagBits::eClustered ) + result += "Clustered | "; + if ( value & SubgroupFeatureFlagBits::eQuad ) + result += "Quad | "; + if ( value & SubgroupFeatureFlagBits::ePartitionedNV ) + result += "PartitionedNV | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using SubpassDescriptionFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(SubpassDescriptionFlagBits::ePerViewAttributesNVX) | VkFlags(SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX) + allFlags = VkFlags( SubpassDescriptionFlagBits::ePerViewAttributesNVX ) | + VkFlags( SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags operator|( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags + operator|( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return SubpassDescriptionFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags operator&( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags + operator&( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return SubpassDescriptionFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags operator^( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags + operator^( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return SubpassDescriptionFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags operator~( SubpassDescriptionFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags operator~( SubpassDescriptionFlagBits bits ) + VULKAN_HPP_NOEXCEPT { return ~( SubpassDescriptionFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( SubpassDescriptionFlags value ) + VULKAN_HPP_INLINE std::string to_string( SubpassDescriptionFlags value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & SubpassDescriptionFlagBits::ePerViewAttributesNVX ) result += "PerViewAttributesNVX | "; - if ( value & SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX ) result += "PerViewPositionXOnlyNVX | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & SubpassDescriptionFlagBits::ePerViewAttributesNVX ) + result += "PerViewAttributesNVX | "; + if ( value & SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX ) + result += "PerViewPositionXOnlyNVX | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using SurfaceCounterFlagsEXT = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(SurfaceCounterFlagBitsEXT::eVblank) + allFlags = VkFlags( SurfaceCounterFlagBitsEXT::eVblank ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT operator|( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT + operator|( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT { return SurfaceCounterFlagsEXT( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT operator&( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT + operator&( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT { return SurfaceCounterFlagsEXT( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT operator^( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT + operator^( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT { return SurfaceCounterFlagsEXT( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT operator~( SurfaceCounterFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT operator~( SurfaceCounterFlagBitsEXT bits ) + VULKAN_HPP_NOEXCEPT { return ~( SurfaceCounterFlagsEXT( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( SurfaceCounterFlagsEXT value ) + VULKAN_HPP_INLINE std::string to_string( SurfaceCounterFlagsEXT value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & SurfaceCounterFlagBitsEXT::eVblank ) result += "Vblank | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & SurfaceCounterFlagBitsEXT::eVblank ) + result += "Vblank | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using SurfaceTransformFlagsKHR = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(SurfaceTransformFlagBitsKHR::eIdentity) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate90) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate180) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate270) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirror) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270) | VkFlags(SurfaceTransformFlagBitsKHR::eInherit) + allFlags = VkFlags( SurfaceTransformFlagBitsKHR::eIdentity ) | VkFlags( SurfaceTransformFlagBitsKHR::eRotate90 ) | + VkFlags( SurfaceTransformFlagBitsKHR::eRotate180 ) | + VkFlags( SurfaceTransformFlagBitsKHR::eRotate270 ) | + VkFlags( SurfaceTransformFlagBitsKHR::eHorizontalMirror ) | + VkFlags( SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90 ) | + VkFlags( SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180 ) | + VkFlags( SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270 ) | + VkFlags( SurfaceTransformFlagBitsKHR::eInherit ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR operator|( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR + operator|( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { return SurfaceTransformFlagsKHR( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR operator&( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR + operator&( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { return SurfaceTransformFlagsKHR( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR operator^( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR + operator^( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { return SurfaceTransformFlagsKHR( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR operator~( SurfaceTransformFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR operator~( SurfaceTransformFlagBitsKHR bits ) + VULKAN_HPP_NOEXCEPT { return ~( SurfaceTransformFlagsKHR( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( SurfaceTransformFlagsKHR value ) + VULKAN_HPP_INLINE std::string to_string( SurfaceTransformFlagsKHR value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & SurfaceTransformFlagBitsKHR::eIdentity ) result += "Identity | "; - if ( value & SurfaceTransformFlagBitsKHR::eRotate90 ) result += "Rotate90 | "; - if ( value & SurfaceTransformFlagBitsKHR::eRotate180 ) result += "Rotate180 | "; - if ( value & SurfaceTransformFlagBitsKHR::eRotate270 ) result += "Rotate270 | "; - if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirror ) result += "HorizontalMirror | "; - if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90 ) result += "HorizontalMirrorRotate90 | "; - if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180 ) result += "HorizontalMirrorRotate180 | "; - if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270 ) result += "HorizontalMirrorRotate270 | "; - if ( value & SurfaceTransformFlagBitsKHR::eInherit ) result += "Inherit | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & SurfaceTransformFlagBitsKHR::eIdentity ) + result += "Identity | "; + if ( value & SurfaceTransformFlagBitsKHR::eRotate90 ) + result += "Rotate90 | "; + if ( value & SurfaceTransformFlagBitsKHR::eRotate180 ) + result += "Rotate180 | "; + if ( value & SurfaceTransformFlagBitsKHR::eRotate270 ) + result += "Rotate270 | "; + if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirror ) + result += "HorizontalMirror | "; + if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90 ) + result += "HorizontalMirrorRotate90 | "; + if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180 ) + result += "HorizontalMirrorRotate180 | "; + if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270 ) + result += "HorizontalMirrorRotate270 | "; + if ( value & SurfaceTransformFlagBitsKHR::eInherit ) + result += "Inherit | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using SwapchainCreateFlagsKHR = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions) | VkFlags(SwapchainCreateFlagBitsKHR::eProtected) | VkFlags(SwapchainCreateFlagBitsKHR::eMutableFormat) + allFlags = VkFlags( SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions ) | + VkFlags( SwapchainCreateFlagBitsKHR::eProtected ) | + VkFlags( SwapchainCreateFlagBitsKHR::eMutableFormat ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR operator|( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR + operator|( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { return SwapchainCreateFlagsKHR( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR operator&( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR + operator&( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { return SwapchainCreateFlagsKHR( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR operator^( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR + operator^( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { return SwapchainCreateFlagsKHR( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR operator~( SwapchainCreateFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR operator~( SwapchainCreateFlagBitsKHR bits ) + VULKAN_HPP_NOEXCEPT { return ~( SwapchainCreateFlagsKHR( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( SwapchainCreateFlagsKHR value ) + VULKAN_HPP_INLINE std::string to_string( SwapchainCreateFlagsKHR value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions ) result += "SplitInstanceBindRegions | "; - if ( value & SwapchainCreateFlagBitsKHR::eProtected ) result += "Protected | "; - if ( value & SwapchainCreateFlagBitsKHR::eMutableFormat ) result += "MutableFormat | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions ) + result += "SplitInstanceBindRegions | "; + if ( value & SwapchainCreateFlagBitsKHR::eProtected ) + result += "Protected | "; + if ( value & SwapchainCreateFlagBitsKHR::eMutableFormat ) + result += "MutableFormat | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } using ToolPurposeFlagsEXT = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(ToolPurposeFlagBitsEXT::eValidation) | VkFlags(ToolPurposeFlagBitsEXT::eProfiling) | VkFlags(ToolPurposeFlagBitsEXT::eTracing) | VkFlags(ToolPurposeFlagBitsEXT::eAdditionalFeatures) | VkFlags(ToolPurposeFlagBitsEXT::eModifyingFeatures) | VkFlags(ToolPurposeFlagBitsEXT::eDebugReporting) | VkFlags(ToolPurposeFlagBitsEXT::eDebugMarkers) + allFlags = VkFlags( ToolPurposeFlagBitsEXT::eValidation ) | VkFlags( ToolPurposeFlagBitsEXT::eProfiling ) | + VkFlags( ToolPurposeFlagBitsEXT::eTracing ) | VkFlags( ToolPurposeFlagBitsEXT::eAdditionalFeatures ) | + VkFlags( ToolPurposeFlagBitsEXT::eModifyingFeatures ) | + VkFlags( ToolPurposeFlagBitsEXT::eDebugReporting ) | VkFlags( ToolPurposeFlagBitsEXT::eDebugMarkers ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlagsEXT operator|( ToolPurposeFlagBitsEXT bit0, ToolPurposeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlagsEXT + operator|( ToolPurposeFlagBitsEXT bit0, ToolPurposeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT { return ToolPurposeFlagsEXT( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlagsEXT operator&( ToolPurposeFlagBitsEXT bit0, ToolPurposeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlagsEXT + operator&( ToolPurposeFlagBitsEXT bit0, ToolPurposeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT { return ToolPurposeFlagsEXT( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlagsEXT operator^( ToolPurposeFlagBitsEXT bit0, ToolPurposeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlagsEXT + operator^( ToolPurposeFlagBitsEXT bit0, ToolPurposeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT { return ToolPurposeFlagsEXT( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlagsEXT operator~( ToolPurposeFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlagsEXT operator~( ToolPurposeFlagBitsEXT bits ) + VULKAN_HPP_NOEXCEPT { return ~( ToolPurposeFlagsEXT( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( ToolPurposeFlagsEXT value ) + VULKAN_HPP_INLINE std::string to_string( ToolPurposeFlagsEXT value ) { - if ( !value ) return "{}"; + if ( !value ) + return "{}"; std::string result; - if ( value & ToolPurposeFlagBitsEXT::eValidation ) result += "Validation | "; - if ( value & ToolPurposeFlagBitsEXT::eProfiling ) result += "Profiling | "; - if ( value & ToolPurposeFlagBitsEXT::eTracing ) result += "Tracing | "; - if ( value & ToolPurposeFlagBitsEXT::eAdditionalFeatures ) result += "AdditionalFeatures | "; - if ( value & ToolPurposeFlagBitsEXT::eModifyingFeatures ) result += "ModifyingFeatures | "; - if ( value & ToolPurposeFlagBitsEXT::eDebugReporting ) result += "DebugReporting | "; - if ( value & ToolPurposeFlagBitsEXT::eDebugMarkers ) result += "DebugMarkers | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & ToolPurposeFlagBitsEXT::eValidation ) + result += "Validation | "; + if ( value & ToolPurposeFlagBitsEXT::eProfiling ) + result += "Profiling | "; + if ( value & ToolPurposeFlagBitsEXT::eTracing ) + result += "Tracing | "; + if ( value & ToolPurposeFlagBitsEXT::eAdditionalFeatures ) + result += "AdditionalFeatures | "; + if ( value & ToolPurposeFlagBitsEXT::eModifyingFeatures ) + result += "ModifyingFeatures | "; + if ( value & ToolPurposeFlagBitsEXT::eDebugReporting ) + result += "DebugReporting | "; + if ( value & ToolPurposeFlagBitsEXT::eDebugMarkers ) + result += "DebugMarkers | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } enum class ValidationCacheCreateFlagBitsEXT : VkFlags - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( ValidationCacheCreateFlagBitsEXT ) { @@ -12606,14 +15142,15 @@ namespace VULKAN_HPP_NAMESPACE using ValidationCacheCreateFlagsEXT = Flags; - VULKAN_HPP_INLINE std::string to_string( ValidationCacheCreateFlagsEXT ) + VULKAN_HPP_INLINE std::string to_string( ValidationCacheCreateFlagsEXT ) { return "{}"; } #ifdef VK_USE_PLATFORM_VI_NN enum class ViSurfaceCreateFlagBitsNN : VkFlags - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( ViSurfaceCreateFlagBitsNN ) { @@ -12622,7 +15159,7 @@ namespace VULKAN_HPP_NAMESPACE using ViSurfaceCreateFlagsNN = Flags; - VULKAN_HPP_INLINE std::string to_string( ViSurfaceCreateFlagsNN ) + VULKAN_HPP_INLINE std::string to_string( ViSurfaceCreateFlagsNN ) { return "{}"; } @@ -12630,7 +15167,8 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VK_USE_PLATFORM_WAYLAND_KHR enum class WaylandSurfaceCreateFlagBitsKHR : VkFlags - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( WaylandSurfaceCreateFlagBitsKHR ) { @@ -12639,7 +15177,7 @@ namespace VULKAN_HPP_NAMESPACE using WaylandSurfaceCreateFlagsKHR = Flags; - VULKAN_HPP_INLINE std::string to_string( WaylandSurfaceCreateFlagsKHR ) + VULKAN_HPP_INLINE std::string to_string( WaylandSurfaceCreateFlagsKHR ) { return "{}"; } @@ -12647,7 +15185,8 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VK_USE_PLATFORM_WIN32_KHR enum class Win32SurfaceCreateFlagBitsKHR : VkFlags - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( Win32SurfaceCreateFlagBitsKHR ) { @@ -12656,7 +15195,7 @@ namespace VULKAN_HPP_NAMESPACE using Win32SurfaceCreateFlagsKHR = Flags; - VULKAN_HPP_INLINE std::string to_string( Win32SurfaceCreateFlagsKHR ) + VULKAN_HPP_INLINE std::string to_string( Win32SurfaceCreateFlagsKHR ) { return "{}"; } @@ -12664,7 +15203,8 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VK_USE_PLATFORM_XCB_KHR enum class XcbSurfaceCreateFlagBitsKHR : VkFlags - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( XcbSurfaceCreateFlagBitsKHR ) { @@ -12673,7 +15213,7 @@ namespace VULKAN_HPP_NAMESPACE using XcbSurfaceCreateFlagsKHR = Flags; - VULKAN_HPP_INLINE std::string to_string( XcbSurfaceCreateFlagsKHR ) + VULKAN_HPP_INLINE std::string to_string( XcbSurfaceCreateFlagsKHR ) { return "{}"; } @@ -12681,7 +15221,8 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VK_USE_PLATFORM_XLIB_KHR enum class XlibSurfaceCreateFlagBitsKHR : VkFlags - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( XlibSurfaceCreateFlagBitsKHR ) { @@ -12690,12 +15231,12 @@ namespace VULKAN_HPP_NAMESPACE using XlibSurfaceCreateFlagsKHR = Flags; - VULKAN_HPP_INLINE std::string to_string( XlibSurfaceCreateFlagsKHR ) + VULKAN_HPP_INLINE std::string to_string( XlibSurfaceCreateFlagsKHR ) { return "{}"; } #endif /*VK_USE_PLATFORM_XLIB_KHR*/ -} // namespace VULKAN_HPP_NAMESPACE +} // namespace VULKAN_HPP_NAMESPACE #ifndef VULKAN_HPP_NO_EXCEPTIONS namespace std @@ -12703,7 +15244,7 @@ namespace std template <> struct is_error_code_enum : public true_type {}; -} +} // namespace std #endif namespace VULKAN_HPP_NAMESPACE @@ -12711,308 +15252,364 @@ namespace VULKAN_HPP_NAMESPACE #ifndef VULKAN_HPP_NO_EXCEPTIONS class ErrorCategoryImpl : public std::error_category { - public: - virtual const char* name() const VULKAN_HPP_NOEXCEPT override { return VULKAN_HPP_NAMESPACE_STRING"::Result"; } - virtual std::string message(int ev) const override { return to_string(static_cast(ev)); } + public: + virtual const char * name() const VULKAN_HPP_NOEXCEPT override + { + return VULKAN_HPP_NAMESPACE_STRING "::Result"; + } + virtual std::string message( int ev ) const override + { + return to_string( static_cast( ev ) ); + } }; class Error { - public: - Error() VULKAN_HPP_NOEXCEPT = default; - Error(const Error&) VULKAN_HPP_NOEXCEPT = default; - virtual ~Error() VULKAN_HPP_NOEXCEPT = default; + public: + Error() VULKAN_HPP_NOEXCEPT = default; + Error( const Error & ) VULKAN_HPP_NOEXCEPT = default; + virtual ~Error() VULKAN_HPP_NOEXCEPT = default; - virtual const char* what() const VULKAN_HPP_NOEXCEPT = 0; + virtual const char * what() const VULKAN_HPP_NOEXCEPT = 0; }; - class LogicError : public Error, public std::logic_error + class LogicError + : public Error + , public std::logic_error { - public: - explicit LogicError( const std::string& what ) - : Error(), std::logic_error(what) {} - explicit LogicError( char const * what ) - : Error(), std::logic_error(what) {} + public: + explicit LogicError( const std::string & what ) : Error(), std::logic_error( what ) {} + explicit LogicError( char const * what ) : Error(), std::logic_error( what ) {} - virtual const char* what() const VULKAN_HPP_NOEXCEPT { return std::logic_error::what(); } + virtual const char * what() const VULKAN_HPP_NOEXCEPT + { + return std::logic_error::what(); + } }; - class SystemError : public Error, public std::system_error + class SystemError + : public Error + , public std::system_error { - public: - SystemError( std::error_code ec ) - : Error(), std::system_error(ec) {} - SystemError( std::error_code ec, std::string const& what ) - : Error(), std::system_error(ec, what) {} - SystemError( std::error_code ec, char const * what ) - : Error(), std::system_error(ec, what) {} - SystemError( int ev, std::error_category const& ecat ) - : Error(), std::system_error(ev, ecat) {} - SystemError( int ev, std::error_category const& ecat, std::string const& what) - : Error(), std::system_error(ev, ecat, what) {} - SystemError( int ev, std::error_category const& ecat, char const * what) - : Error(), std::system_error(ev, ecat, what) {} + public: + SystemError( std::error_code ec ) : Error(), std::system_error( ec ) {} + SystemError( std::error_code ec, std::string const & what ) : Error(), std::system_error( ec, what ) {} + SystemError( std::error_code ec, char const * what ) : Error(), std::system_error( ec, what ) {} + SystemError( int ev, std::error_category const & ecat ) : Error(), std::system_error( ev, ecat ) {} + SystemError( int ev, std::error_category const & ecat, std::string const & what ) + : Error(), std::system_error( ev, ecat, what ) + {} + SystemError( int ev, std::error_category const & ecat, char const * what ) + : Error(), std::system_error( ev, ecat, what ) + {} - virtual const char* what() const VULKAN_HPP_NOEXCEPT { return std::system_error::what(); } + virtual const char * what() const VULKAN_HPP_NOEXCEPT + { + return std::system_error::what(); + } }; - VULKAN_HPP_INLINE const std::error_category& errorCategory() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE const std::error_category & errorCategory() VULKAN_HPP_NOEXCEPT { static ErrorCategoryImpl instance; return instance; } - VULKAN_HPP_INLINE std::error_code make_error_code(Result e) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE std::error_code make_error_code( Result e ) VULKAN_HPP_NOEXCEPT { - return std::error_code(static_cast(e), errorCategory()); + return std::error_code( static_cast( e ), errorCategory() ); } - VULKAN_HPP_INLINE std::error_condition make_error_condition(Result e) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE std::error_condition make_error_condition( Result e ) VULKAN_HPP_NOEXCEPT { - return std::error_condition(static_cast(e), errorCategory()); + return std::error_condition( static_cast( e ), errorCategory() ); } class OutOfHostMemoryError : public SystemError { public: - OutOfHostMemoryError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) {} + OutOfHostMemoryError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) + {} OutOfHostMemoryError( char const * message ) - : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) {} + : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) + {} }; class OutOfDeviceMemoryError : public SystemError { public: - OutOfDeviceMemoryError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) {} + OutOfDeviceMemoryError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) + {} OutOfDeviceMemoryError( char const * message ) - : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) {} + : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) + {} }; class InitializationFailedError : public SystemError { public: - InitializationFailedError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) {} + InitializationFailedError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) + {} InitializationFailedError( char const * message ) - : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) {} + : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) + {} }; class DeviceLostError : public SystemError { public: - DeviceLostError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) {} - DeviceLostError( char const * message ) - : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) {} + DeviceLostError( std::string const & message ) : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) + {} + DeviceLostError( char const * message ) : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) {} }; class MemoryMapFailedError : public SystemError { public: - MemoryMapFailedError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) {} + MemoryMapFailedError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) + {} MemoryMapFailedError( char const * message ) - : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) {} + : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) + {} }; class LayerNotPresentError : public SystemError { public: - LayerNotPresentError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) {} + LayerNotPresentError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) + {} LayerNotPresentError( char const * message ) - : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) {} + : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) + {} }; class ExtensionNotPresentError : public SystemError { public: - ExtensionNotPresentError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) {} + ExtensionNotPresentError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) + {} ExtensionNotPresentError( char const * message ) - : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) {} + : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) + {} }; class FeatureNotPresentError : public SystemError { public: - FeatureNotPresentError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) {} + FeatureNotPresentError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) + {} FeatureNotPresentError( char const * message ) - : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) {} + : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) + {} }; class IncompatibleDriverError : public SystemError { public: - IncompatibleDriverError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) {} + IncompatibleDriverError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) + {} IncompatibleDriverError( char const * message ) - : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) {} + : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) + {} }; class TooManyObjectsError : public SystemError { public: - TooManyObjectsError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) {} + TooManyObjectsError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) + {} TooManyObjectsError( char const * message ) - : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) {} + : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) + {} }; class FormatNotSupportedError : public SystemError { public: - FormatNotSupportedError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) {} + FormatNotSupportedError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) + {} FormatNotSupportedError( char const * message ) - : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) {} + : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) + {} }; class FragmentedPoolError : public SystemError { public: - FragmentedPoolError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) {} + FragmentedPoolError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) + {} FragmentedPoolError( char const * message ) - : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) {} + : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) + {} }; class UnknownError : public SystemError { public: - UnknownError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorUnknown ), message ) {} - UnknownError( char const * message ) - : SystemError( make_error_code( Result::eErrorUnknown ), message ) {} + UnknownError( std::string const & message ) : SystemError( make_error_code( Result::eErrorUnknown ), message ) {} + UnknownError( char const * message ) : SystemError( make_error_code( Result::eErrorUnknown ), message ) {} }; class OutOfPoolMemoryError : public SystemError { public: - OutOfPoolMemoryError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message ) {} + OutOfPoolMemoryError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message ) + {} OutOfPoolMemoryError( char const * message ) - : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message ) {} + : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message ) + {} }; class InvalidExternalHandleError : public SystemError { public: - InvalidExternalHandleError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) {} + InvalidExternalHandleError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) + {} InvalidExternalHandleError( char const * message ) - : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) {} + : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) + {} }; class FragmentationError : public SystemError { public: - FragmentationError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorFragmentation ), message ) {} - FragmentationError( char const * message ) - : SystemError( make_error_code( Result::eErrorFragmentation ), message ) {} + FragmentationError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorFragmentation ), message ) + {} + FragmentationError( char const * message ) : SystemError( make_error_code( Result::eErrorFragmentation ), message ) + {} }; class InvalidOpaqueCaptureAddressError : public SystemError { public: - InvalidOpaqueCaptureAddressError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorInvalidOpaqueCaptureAddress ), message ) {} + InvalidOpaqueCaptureAddressError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorInvalidOpaqueCaptureAddress ), message ) + {} InvalidOpaqueCaptureAddressError( char const * message ) - : SystemError( make_error_code( Result::eErrorInvalidOpaqueCaptureAddress ), message ) {} + : SystemError( make_error_code( Result::eErrorInvalidOpaqueCaptureAddress ), message ) + {} }; class SurfaceLostKHRError : public SystemError { public: - SurfaceLostKHRError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) {} + SurfaceLostKHRError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) + {} SurfaceLostKHRError( char const * message ) - : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) {} + : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) + {} }; class NativeWindowInUseKHRError : public SystemError { public: - NativeWindowInUseKHRError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) {} + NativeWindowInUseKHRError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) + {} NativeWindowInUseKHRError( char const * message ) - : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) {} + : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) + {} }; class OutOfDateKHRError : public SystemError { public: - OutOfDateKHRError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) {} - OutOfDateKHRError( char const * message ) - : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) {} + OutOfDateKHRError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) + {} + OutOfDateKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) {} }; class IncompatibleDisplayKHRError : public SystemError { public: - IncompatibleDisplayKHRError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) {} + IncompatibleDisplayKHRError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) + {} IncompatibleDisplayKHRError( char const * message ) - : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) {} + : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) + {} }; class ValidationFailedEXTError : public SystemError { public: - ValidationFailedEXTError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) {} + ValidationFailedEXTError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) + {} ValidationFailedEXTError( char const * message ) - : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) {} + : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) + {} }; class InvalidShaderNVError : public SystemError { public: - InvalidShaderNVError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) {} + InvalidShaderNVError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) + {} InvalidShaderNVError( char const * message ) - : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) {} + : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) + {} }; class IncompatibleVersionKHRError : public SystemError { public: - IncompatibleVersionKHRError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorIncompatibleVersionKHR ), message ) {} + IncompatibleVersionKHRError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorIncompatibleVersionKHR ), message ) + {} IncompatibleVersionKHRError( char const * message ) - : SystemError( make_error_code( Result::eErrorIncompatibleVersionKHR ), message ) {} + : SystemError( make_error_code( Result::eErrorIncompatibleVersionKHR ), message ) + {} }; class InvalidDrmFormatModifierPlaneLayoutEXTError : public SystemError { public: - InvalidDrmFormatModifierPlaneLayoutEXTError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT ), message ) {} + InvalidDrmFormatModifierPlaneLayoutEXTError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT ), message ) + {} InvalidDrmFormatModifierPlaneLayoutEXTError( char const * message ) - : SystemError( make_error_code( Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT ), message ) {} + : SystemError( make_error_code( Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT ), message ) + {} }; class NotPermittedEXTError : public SystemError { public: - NotPermittedEXTError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorNotPermittedEXT ), message ) {} + NotPermittedEXTError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorNotPermittedEXT ), message ) + {} NotPermittedEXTError( char const * message ) - : SystemError( make_error_code( Result::eErrorNotPermittedEXT ), message ) {} + : SystemError( make_error_code( Result::eErrorNotPermittedEXT ), message ) + {} }; class FullScreenExclusiveModeLostEXTError : public SystemError { public: - FullScreenExclusiveModeLostEXTError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorFullScreenExclusiveModeLostEXT ), message ) {} + FullScreenExclusiveModeLostEXTError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorFullScreenExclusiveModeLostEXT ), message ) + {} FullScreenExclusiveModeLostEXTError( char const * message ) - : SystemError( make_error_code( Result::eErrorFullScreenExclusiveModeLostEXT ), message ) {} + : SystemError( make_error_code( Result::eErrorFullScreenExclusiveModeLostEXT ), message ) + {} }; [[noreturn]] static void throwResultException( Result result, char const * message ) @@ -13043,7 +15640,8 @@ namespace VULKAN_HPP_NAMESPACE case Result::eErrorValidationFailedEXT: throw ValidationFailedEXTError( message ); case Result::eErrorInvalidShaderNV: throw InvalidShaderNVError( message ); case Result::eErrorIncompatibleVersionKHR: throw IncompatibleVersionKHRError( message ); - case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT: throw InvalidDrmFormatModifierPlaneLayoutEXTError( message ); + case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT: + throw InvalidDrmFormatModifierPlaneLayoutEXTError( message ); case Result::eErrorNotPermittedEXT: throw NotPermittedEXTError( message ); case Result::eErrorFullScreenExclusiveModeLostEXT: throw FullScreenExclusiveModeLostEXTError( message ); default: throw SystemError( make_error_code( result ) ); @@ -13051,77 +15649,83 @@ namespace VULKAN_HPP_NAMESPACE } #endif - template void ignore(T const&) VULKAN_HPP_NOEXCEPT {} + template + void ignore( T const & ) VULKAN_HPP_NOEXCEPT + {} template struct ResultValue { #ifdef VULKAN_HPP_HAS_NOEXCEPT - ResultValue( Result r, T & v ) VULKAN_HPP_NOEXCEPT(VULKAN_HPP_NOEXCEPT(T(v))) + ResultValue( Result r, T & v ) VULKAN_HPP_NOEXCEPT( VULKAN_HPP_NOEXCEPT( T( v ) ) ) #else ResultValue( Result r, T & v ) #endif - : result( r ) - , value( v ) + : result( r ), value( v ) {} #ifdef VULKAN_HPP_HAS_NOEXCEPT - ResultValue( Result r, T && v ) VULKAN_HPP_NOEXCEPT(VULKAN_HPP_NOEXCEPT(T(std::move(v)))) + ResultValue( Result r, T && v ) VULKAN_HPP_NOEXCEPT( VULKAN_HPP_NOEXCEPT( T( std::move( v ) ) ) ) #else ResultValue( Result r, T && v ) #endif - : result( r ) - , value( std::move( v ) ) + : result( r ), value( std::move( v ) ) {} - Result result; - T value; + Result result; + T value; - operator std::tuple() VULKAN_HPP_NOEXCEPT { return std::tuple(result, value); } + operator std::tuple() VULKAN_HPP_NOEXCEPT + { + return std::tuple( result, value ); + } -#if !defined(VULKAN_HPP_DISABLE_IMPLICIT_RESULT_VALUE_CAST) - operator T const& () const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_IMPLICIT_RESULT_VALUE_CAST ) + operator T const &() const VULKAN_HPP_NOEXCEPT { return value; } - operator T& () VULKAN_HPP_NOEXCEPT + operator T &() VULKAN_HPP_NOEXCEPT { return value; } #endif }; -#if !defined(VULKAN_HPP_DISABLE_IMPLICIT_RESULT_VALUE_CAST) +#if !defined( VULKAN_HPP_DISABLE_IMPLICIT_RESULT_VALUE_CAST ) template - struct ResultValue> + struct ResultValue> { -#ifdef VULKAN_HPP_HAS_NOEXCEPT - ResultValue(Result r, UniqueHandle & v) VULKAN_HPP_NOEXCEPT -#else +# ifdef VULKAN_HPP_HAS_NOEXCEPT + ResultValue( Result r, UniqueHandle & v ) VULKAN_HPP_NOEXCEPT +# else ResultValue(Result r, UniqueHandle& v) -#endif - : result(r) - , value(v) +# endif + : result( r ) + , value( v ) {} -#ifdef VULKAN_HPP_HAS_NOEXCEPT - ResultValue(Result r, UniqueHandle && v) VULKAN_HPP_NOEXCEPT -#else +# ifdef VULKAN_HPP_HAS_NOEXCEPT + ResultValue( Result r, UniqueHandle && v ) VULKAN_HPP_NOEXCEPT +# else ResultValue(Result r, UniqueHandle && v) -#endif - : result(r) - , value(std::move(v)) +# endif + : result( r ) + , value( std::move( v ) ) {} - Result result; - UniqueHandle value; + Result result; + UniqueHandle value; - operator std::tuple&>() VULKAN_HPP_NOEXCEPT { return std::tuple&>(result, value); } + operator std::tuple &>() VULKAN_HPP_NOEXCEPT + { + return std::tuple &>( result, value ); + } operator UniqueHandle() VULKAN_HPP_NOEXCEPT { - return std::move(value); + return std::move( value ); } }; #endif @@ -13130,9 +15734,9 @@ namespace VULKAN_HPP_NAMESPACE struct ResultValueType { #ifdef VULKAN_HPP_NO_EXCEPTIONS - typedef ResultValue type; + typedef ResultValue type; #else - typedef T type; + typedef T type; #endif }; @@ -13142,14 +15746,14 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VULKAN_HPP_NO_EXCEPTIONS typedef Result type; #else - typedef void type; + typedef void type; #endif }; VULKAN_HPP_INLINE ResultValueType::type createResultValue( Result result, char const * message ) { #ifdef VULKAN_HPP_NO_EXCEPTIONS - ignore(message); + ignore( message ); VULKAN_HPP_ASSERT( result == Result::eSuccess ); return result; #else @@ -13164,7 +15768,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE typename ResultValueType::type createResultValue( Result result, T & data, char const * message ) { #ifdef VULKAN_HPP_NO_EXCEPTIONS - ignore(message); + ignore( message ); VULKAN_HPP_ASSERT( result == Result::eSuccess ); return ResultValue( result, std::move( data ) ); #else @@ -13176,10 +15780,12 @@ namespace VULKAN_HPP_NAMESPACE #endif } - VULKAN_HPP_INLINE Result createResultValue( Result result, char const * message, std::initializer_list successCodes ) + VULKAN_HPP_INLINE Result createResultValue( Result result, + char const * message, + std::initializer_list successCodes ) { #ifdef VULKAN_HPP_NO_EXCEPTIONS - ignore(message); + ignore( message ); VULKAN_HPP_ASSERT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() ); #else if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() ) @@ -13191,10 +15797,11 @@ namespace VULKAN_HPP_NAMESPACE } template - VULKAN_HPP_INLINE ResultValue createResultValue( Result result, T & data, char const * message, std::initializer_list successCodes ) + VULKAN_HPP_INLINE ResultValue + createResultValue( Result result, T & data, char const * message, std::initializer_list successCodes ) { #ifdef VULKAN_HPP_NO_EXCEPTIONS - ignore(message); + ignore( message ); VULKAN_HPP_ASSERT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() ); #else if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() ) @@ -13207,35 +15814,41 @@ namespace VULKAN_HPP_NAMESPACE #ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE typename ResultValueType>::type createResultValue( Result result, T & data, char const * message, typename UniqueHandleTraits::deleter const& deleter ) + VULKAN_HPP_INLINE typename ResultValueType>::type createResultValue( + Result result, T & data, char const * message, typename UniqueHandleTraits::deleter const & deleter ) { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - ignore(message); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + ignore( message ); VULKAN_HPP_ASSERT( result == Result::eSuccess ); - return ResultValue>( result, UniqueHandle(data, deleter) ); -#else + return ResultValue>( result, UniqueHandle( data, deleter ) ); +# else if ( result != Result::eSuccess ) { throwResultException( result, message ); } - return UniqueHandle(data, deleter); -#endif + return UniqueHandle( data, deleter ); +# endif } template - VULKAN_HPP_INLINE ResultValue> createResultValue( Result result, T & data, char const * message, std::initializer_list successCodes, typename UniqueHandleTraits::deleter const& deleter ) + VULKAN_HPP_INLINE ResultValue> + createResultValue( Result result, + T & data, + char const * message, + std::initializer_list successCodes, + typename UniqueHandleTraits::deleter const & deleter ) { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - ignore(message); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + ignore( message ); VULKAN_HPP_ASSERT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() ); - return ResultValue>( result, UniqueHandle(data, deleter) ); -#else + return ResultValue>( result, UniqueHandle( data, deleter ) ); +# else if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() ) { throwResultException( result, message ); } - return ResultValue>( result, UniqueHandle(data, deleter) ); -#endif + return ResultValue>( result, UniqueHandle( data, deleter ) ); +# endif } #endif @@ -13764,7 +16377,7 @@ namespace VULKAN_HPP_NAMESPACE struct PhysicalDeviceShaderDrawParametersFeatures; using PhysicalDeviceShaderDrawParameterFeatures = PhysicalDeviceShaderDrawParametersFeatures; struct PhysicalDeviceShaderFloat16Int8Features; - using PhysicalDeviceFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features; + using PhysicalDeviceFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features; using PhysicalDeviceShaderFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features; struct PhysicalDeviceShaderImageFootprintFeaturesNV; struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; @@ -13794,8 +16407,8 @@ namespace VULKAN_HPP_NAMESPACE struct PhysicalDeviceUniformBufferStandardLayoutFeatures; using PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = PhysicalDeviceUniformBufferStandardLayoutFeatures; struct PhysicalDeviceVariablePointersFeatures; - using PhysicalDeviceVariablePointerFeatures = PhysicalDeviceVariablePointersFeatures; - using PhysicalDeviceVariablePointerFeaturesKHR = PhysicalDeviceVariablePointersFeatures; + using PhysicalDeviceVariablePointerFeatures = PhysicalDeviceVariablePointersFeatures; + using PhysicalDeviceVariablePointerFeaturesKHR = PhysicalDeviceVariablePointersFeatures; using PhysicalDeviceVariablePointersFeaturesKHR = PhysicalDeviceVariablePointersFeatures; struct PhysicalDeviceVertexAttributeDivisorFeaturesEXT; struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT; @@ -14031,20 +16644,15 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eSurfaceKHR; public: - VULKAN_HPP_CONSTEXPR SurfaceKHR() VULKAN_HPP_NOEXCEPT - : m_surfaceKHR(VK_NULL_HANDLE) + VULKAN_HPP_CONSTEXPR SurfaceKHR() VULKAN_HPP_NOEXCEPT : m_surfaceKHR( VK_NULL_HANDLE ) {} + + VULKAN_HPP_CONSTEXPR SurfaceKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT : m_surfaceKHR( VK_NULL_HANDLE ) {} + + VULKAN_HPP_TYPESAFE_EXPLICIT SurfaceKHR( VkSurfaceKHR surfaceKHR ) VULKAN_HPP_NOEXCEPT : m_surfaceKHR( surfaceKHR ) {} - VULKAN_HPP_CONSTEXPR SurfaceKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_surfaceKHR(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT SurfaceKHR( VkSurfaceKHR surfaceKHR ) VULKAN_HPP_NOEXCEPT - : m_surfaceKHR( surfaceKHR ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - SurfaceKHR & operator=(VkSurfaceKHR surfaceKHR) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + SurfaceKHR & operator=( VkSurfaceKHR surfaceKHR ) VULKAN_HPP_NOEXCEPT { m_surfaceKHR = surfaceKHR; return *this; @@ -14057,20 +16665,20 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SurfaceKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceKHR const & ) const = default; #else bool operator==( SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_surfaceKHR == rhs.m_surfaceKHR; } - bool operator!=(SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_surfaceKHR != rhs.m_surfaceKHR; } - bool operator<(SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator<( SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_surfaceKHR < rhs.m_surfaceKHR; } @@ -14110,20 +16718,19 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDebugReportCallbackEXT; public: - VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT() VULKAN_HPP_NOEXCEPT - : m_debugReportCallbackEXT(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT() VULKAN_HPP_NOEXCEPT : m_debugReportCallbackEXT( VK_NULL_HANDLE ) {} VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_debugReportCallbackEXT(VK_NULL_HANDLE) + : m_debugReportCallbackEXT( VK_NULL_HANDLE ) {} - VULKAN_HPP_TYPESAFE_EXPLICIT DebugReportCallbackEXT( VkDebugReportCallbackEXT debugReportCallbackEXT ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_TYPESAFE_EXPLICIT + DebugReportCallbackEXT( VkDebugReportCallbackEXT debugReportCallbackEXT ) VULKAN_HPP_NOEXCEPT : m_debugReportCallbackEXT( debugReportCallbackEXT ) {} -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - DebugReportCallbackEXT & operator=(VkDebugReportCallbackEXT debugReportCallbackEXT) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DebugReportCallbackEXT & operator=( VkDebugReportCallbackEXT debugReportCallbackEXT ) VULKAN_HPP_NOEXCEPT { m_debugReportCallbackEXT = debugReportCallbackEXT; return *this; @@ -14136,20 +16743,20 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DebugReportCallbackEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugReportCallbackEXT const & ) const = default; #else bool operator==( DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_debugReportCallbackEXT == rhs.m_debugReportCallbackEXT; } - bool operator!=(DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_debugReportCallbackEXT != rhs.m_debugReportCallbackEXT; } - bool operator<(DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator<( DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_debugReportCallbackEXT < rhs.m_debugReportCallbackEXT; } @@ -14173,7 +16780,8 @@ namespace VULKAN_HPP_NAMESPACE private: VkDebugReportCallbackEXT m_debugReportCallbackEXT; }; - static_assert( sizeof( DebugReportCallbackEXT ) == sizeof( VkDebugReportCallbackEXT ), "handle and wrapper have different size!" ); + static_assert( sizeof( DebugReportCallbackEXT ) == sizeof( VkDebugReportCallbackEXT ), + "handle and wrapper have different size!" ); template <> struct cpp_type @@ -14189,20 +16797,19 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDebugUtilsMessengerEXT; public: - VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT() VULKAN_HPP_NOEXCEPT - : m_debugUtilsMessengerEXT(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT() VULKAN_HPP_NOEXCEPT : m_debugUtilsMessengerEXT( VK_NULL_HANDLE ) {} VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_debugUtilsMessengerEXT(VK_NULL_HANDLE) + : m_debugUtilsMessengerEXT( VK_NULL_HANDLE ) {} - VULKAN_HPP_TYPESAFE_EXPLICIT DebugUtilsMessengerEXT( VkDebugUtilsMessengerEXT debugUtilsMessengerEXT ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_TYPESAFE_EXPLICIT + DebugUtilsMessengerEXT( VkDebugUtilsMessengerEXT debugUtilsMessengerEXT ) VULKAN_HPP_NOEXCEPT : m_debugUtilsMessengerEXT( debugUtilsMessengerEXT ) {} -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - DebugUtilsMessengerEXT & operator=(VkDebugUtilsMessengerEXT debugUtilsMessengerEXT) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DebugUtilsMessengerEXT & operator=( VkDebugUtilsMessengerEXT debugUtilsMessengerEXT ) VULKAN_HPP_NOEXCEPT { m_debugUtilsMessengerEXT = debugUtilsMessengerEXT; return *this; @@ -14215,20 +16822,20 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DebugUtilsMessengerEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugUtilsMessengerEXT const & ) const = default; #else bool operator==( DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_debugUtilsMessengerEXT == rhs.m_debugUtilsMessengerEXT; } - bool operator!=(DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_debugUtilsMessengerEXT != rhs.m_debugUtilsMessengerEXT; } - bool operator<(DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator<( DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_debugUtilsMessengerEXT < rhs.m_debugUtilsMessengerEXT; } @@ -14252,7 +16859,8 @@ namespace VULKAN_HPP_NAMESPACE private: VkDebugUtilsMessengerEXT m_debugUtilsMessengerEXT; }; - static_assert( sizeof( DebugUtilsMessengerEXT ) == sizeof( VkDebugUtilsMessengerEXT ), "handle and wrapper have different size!" ); + static_assert( sizeof( DebugUtilsMessengerEXT ) == sizeof( VkDebugUtilsMessengerEXT ), + "handle and wrapper have different size!" ); template <> struct cpp_type @@ -14268,20 +16876,15 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDisplayKHR; public: - VULKAN_HPP_CONSTEXPR DisplayKHR() VULKAN_HPP_NOEXCEPT - : m_displayKHR(VK_NULL_HANDLE) + VULKAN_HPP_CONSTEXPR DisplayKHR() VULKAN_HPP_NOEXCEPT : m_displayKHR( VK_NULL_HANDLE ) {} + + VULKAN_HPP_CONSTEXPR DisplayKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT : m_displayKHR( VK_NULL_HANDLE ) {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DisplayKHR( VkDisplayKHR displayKHR ) VULKAN_HPP_NOEXCEPT : m_displayKHR( displayKHR ) {} - VULKAN_HPP_CONSTEXPR DisplayKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_displayKHR(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT DisplayKHR( VkDisplayKHR displayKHR ) VULKAN_HPP_NOEXCEPT - : m_displayKHR( displayKHR ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - DisplayKHR & operator=(VkDisplayKHR displayKHR) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DisplayKHR & operator=( VkDisplayKHR displayKHR ) VULKAN_HPP_NOEXCEPT { m_displayKHR = displayKHR; return *this; @@ -14294,20 +16897,20 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DisplayKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayKHR const & ) const = default; #else bool operator==( DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_displayKHR == rhs.m_displayKHR; } - bool operator!=(DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_displayKHR != rhs.m_displayKHR; } - bool operator<(DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator<( DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_displayKHR < rhs.m_displayKHR; } @@ -14347,20 +16950,16 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eSwapchainKHR; public: - VULKAN_HPP_CONSTEXPR SwapchainKHR() VULKAN_HPP_NOEXCEPT - : m_swapchainKHR(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR SwapchainKHR() VULKAN_HPP_NOEXCEPT : m_swapchainKHR( VK_NULL_HANDLE ) {} - VULKAN_HPP_CONSTEXPR SwapchainKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_swapchainKHR(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR SwapchainKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT : m_swapchainKHR( VK_NULL_HANDLE ) {} VULKAN_HPP_TYPESAFE_EXPLICIT SwapchainKHR( VkSwapchainKHR swapchainKHR ) VULKAN_HPP_NOEXCEPT : m_swapchainKHR( swapchainKHR ) {} -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - SwapchainKHR & operator=(VkSwapchainKHR swapchainKHR) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + SwapchainKHR & operator=( VkSwapchainKHR swapchainKHR ) VULKAN_HPP_NOEXCEPT { m_swapchainKHR = swapchainKHR; return *this; @@ -14373,20 +16972,20 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SwapchainKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SwapchainKHR const & ) const = default; #else bool operator==( SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_swapchainKHR == rhs.m_swapchainKHR; } - bool operator!=(SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_swapchainKHR != rhs.m_swapchainKHR; } - bool operator<(SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator<( SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_swapchainKHR < rhs.m_swapchainKHR; } @@ -14426,20 +17025,14 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eSemaphore; public: - VULKAN_HPP_CONSTEXPR Semaphore() VULKAN_HPP_NOEXCEPT - : m_semaphore(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR Semaphore() VULKAN_HPP_NOEXCEPT : m_semaphore( VK_NULL_HANDLE ) {} - VULKAN_HPP_CONSTEXPR Semaphore( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_semaphore(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR Semaphore( std::nullptr_t ) VULKAN_HPP_NOEXCEPT : m_semaphore( VK_NULL_HANDLE ) {} - VULKAN_HPP_TYPESAFE_EXPLICIT Semaphore( VkSemaphore semaphore ) VULKAN_HPP_NOEXCEPT - : m_semaphore( semaphore ) - {} + VULKAN_HPP_TYPESAFE_EXPLICIT Semaphore( VkSemaphore semaphore ) VULKAN_HPP_NOEXCEPT : m_semaphore( semaphore ) {} -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - Semaphore & operator=(VkSemaphore semaphore) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Semaphore & operator=( VkSemaphore semaphore ) VULKAN_HPP_NOEXCEPT { m_semaphore = semaphore; return *this; @@ -14452,20 +17045,20 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( Semaphore const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Semaphore const & ) const = default; #else bool operator==( Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_semaphore == rhs.m_semaphore; } - bool operator!=(Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_semaphore != rhs.m_semaphore; } - bool operator<(Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator<( Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_semaphore < rhs.m_semaphore; } @@ -14505,20 +17098,14 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eFence; public: - VULKAN_HPP_CONSTEXPR Fence() VULKAN_HPP_NOEXCEPT - : m_fence(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR Fence() VULKAN_HPP_NOEXCEPT : m_fence( VK_NULL_HANDLE ) {} - VULKAN_HPP_CONSTEXPR Fence( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_fence(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR Fence( std::nullptr_t ) VULKAN_HPP_NOEXCEPT : m_fence( VK_NULL_HANDLE ) {} - VULKAN_HPP_TYPESAFE_EXPLICIT Fence( VkFence fence ) VULKAN_HPP_NOEXCEPT - : m_fence( fence ) - {} + VULKAN_HPP_TYPESAFE_EXPLICIT Fence( VkFence fence ) VULKAN_HPP_NOEXCEPT : m_fence( fence ) {} -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - Fence & operator=(VkFence fence) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Fence & operator=( VkFence fence ) VULKAN_HPP_NOEXCEPT { m_fence = fence; return *this; @@ -14531,20 +17118,20 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( Fence const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Fence const & ) const = default; #else bool operator==( Fence const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_fence == rhs.m_fence; } - bool operator!=(Fence const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( Fence const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_fence != rhs.m_fence; } - bool operator<(Fence const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator<( Fence const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_fence < rhs.m_fence; } @@ -14585,19 +17172,21 @@ namespace VULKAN_HPP_NAMESPACE public: VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL() VULKAN_HPP_NOEXCEPT - : m_performanceConfigurationINTEL(VK_NULL_HANDLE) + : m_performanceConfigurationINTEL( VK_NULL_HANDLE ) {} VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_performanceConfigurationINTEL(VK_NULL_HANDLE) + : m_performanceConfigurationINTEL( VK_NULL_HANDLE ) {} - VULKAN_HPP_TYPESAFE_EXPLICIT PerformanceConfigurationINTEL( VkPerformanceConfigurationINTEL performanceConfigurationINTEL ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_TYPESAFE_EXPLICIT + PerformanceConfigurationINTEL( VkPerformanceConfigurationINTEL performanceConfigurationINTEL ) VULKAN_HPP_NOEXCEPT : m_performanceConfigurationINTEL( performanceConfigurationINTEL ) {} -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - PerformanceConfigurationINTEL & operator=(VkPerformanceConfigurationINTEL performanceConfigurationINTEL) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + PerformanceConfigurationINTEL & + operator=( VkPerformanceConfigurationINTEL performanceConfigurationINTEL ) VULKAN_HPP_NOEXCEPT { m_performanceConfigurationINTEL = performanceConfigurationINTEL; return *this; @@ -14610,20 +17199,20 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PerformanceConfigurationINTEL const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceConfigurationINTEL const & ) const = default; #else bool operator==( PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_performanceConfigurationINTEL == rhs.m_performanceConfigurationINTEL; } - bool operator!=(PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_performanceConfigurationINTEL != rhs.m_performanceConfigurationINTEL; } - bool operator<(PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator<( PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_performanceConfigurationINTEL < rhs.m_performanceConfigurationINTEL; } @@ -14647,7 +17236,8 @@ namespace VULKAN_HPP_NAMESPACE private: VkPerformanceConfigurationINTEL m_performanceConfigurationINTEL; }; - static_assert( sizeof( PerformanceConfigurationINTEL ) == sizeof( VkPerformanceConfigurationINTEL ), "handle and wrapper have different size!" ); + static_assert( sizeof( PerformanceConfigurationINTEL ) == sizeof( VkPerformanceConfigurationINTEL ), + "handle and wrapper have different size!" ); template <> struct cpp_type @@ -14663,20 +17253,14 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eQueryPool; public: - VULKAN_HPP_CONSTEXPR QueryPool() VULKAN_HPP_NOEXCEPT - : m_queryPool(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR QueryPool() VULKAN_HPP_NOEXCEPT : m_queryPool( VK_NULL_HANDLE ) {} - VULKAN_HPP_CONSTEXPR QueryPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_queryPool(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR QueryPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT : m_queryPool( VK_NULL_HANDLE ) {} - VULKAN_HPP_TYPESAFE_EXPLICIT QueryPool( VkQueryPool queryPool ) VULKAN_HPP_NOEXCEPT - : m_queryPool( queryPool ) - {} + VULKAN_HPP_TYPESAFE_EXPLICIT QueryPool( VkQueryPool queryPool ) VULKAN_HPP_NOEXCEPT : m_queryPool( queryPool ) {} -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - QueryPool & operator=(VkQueryPool queryPool) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + QueryPool & operator=( VkQueryPool queryPool ) VULKAN_HPP_NOEXCEPT { m_queryPool = queryPool; return *this; @@ -14689,20 +17273,20 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( QueryPool const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueryPool const & ) const = default; #else bool operator==( QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_queryPool == rhs.m_queryPool; } - bool operator!=(QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_queryPool != rhs.m_queryPool; } - bool operator<(QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator<( QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_queryPool < rhs.m_queryPool; } @@ -14742,20 +17326,14 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eBuffer; public: - VULKAN_HPP_CONSTEXPR Buffer() VULKAN_HPP_NOEXCEPT - : m_buffer(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR Buffer() VULKAN_HPP_NOEXCEPT : m_buffer( VK_NULL_HANDLE ) {} - VULKAN_HPP_CONSTEXPR Buffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_buffer(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR Buffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT : m_buffer( VK_NULL_HANDLE ) {} - VULKAN_HPP_TYPESAFE_EXPLICIT Buffer( VkBuffer buffer ) VULKAN_HPP_NOEXCEPT - : m_buffer( buffer ) - {} + VULKAN_HPP_TYPESAFE_EXPLICIT Buffer( VkBuffer buffer ) VULKAN_HPP_NOEXCEPT : m_buffer( buffer ) {} -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - Buffer & operator=(VkBuffer buffer) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Buffer & operator=( VkBuffer buffer ) VULKAN_HPP_NOEXCEPT { m_buffer = buffer; return *this; @@ -14768,20 +17346,20 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( Buffer const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Buffer const & ) const = default; #else bool operator==( Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_buffer == rhs.m_buffer; } - bool operator!=(Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_buffer != rhs.m_buffer; } - bool operator<(Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator<( Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_buffer < rhs.m_buffer; } @@ -14821,20 +17399,16 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::ePipelineLayout; public: - VULKAN_HPP_CONSTEXPR PipelineLayout() VULKAN_HPP_NOEXCEPT - : m_pipelineLayout(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR PipelineLayout() VULKAN_HPP_NOEXCEPT : m_pipelineLayout( VK_NULL_HANDLE ) {} - VULKAN_HPP_CONSTEXPR PipelineLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_pipelineLayout(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR PipelineLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT : m_pipelineLayout( VK_NULL_HANDLE ) {} VULKAN_HPP_TYPESAFE_EXPLICIT PipelineLayout( VkPipelineLayout pipelineLayout ) VULKAN_HPP_NOEXCEPT : m_pipelineLayout( pipelineLayout ) {} -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - PipelineLayout & operator=(VkPipelineLayout pipelineLayout) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + PipelineLayout & operator=( VkPipelineLayout pipelineLayout ) VULKAN_HPP_NOEXCEPT { m_pipelineLayout = pipelineLayout; return *this; @@ -14847,20 +17421,20 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineLayout const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineLayout const & ) const = default; #else bool operator==( PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_pipelineLayout == rhs.m_pipelineLayout; } - bool operator!=(PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_pipelineLayout != rhs.m_pipelineLayout; } - bool operator<(PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator<( PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_pipelineLayout < rhs.m_pipelineLayout; } @@ -14900,20 +17474,16 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDescriptorSet; public: - VULKAN_HPP_CONSTEXPR DescriptorSet() VULKAN_HPP_NOEXCEPT - : m_descriptorSet(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR DescriptorSet() VULKAN_HPP_NOEXCEPT : m_descriptorSet( VK_NULL_HANDLE ) {} - VULKAN_HPP_CONSTEXPR DescriptorSet( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_descriptorSet(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR DescriptorSet( std::nullptr_t ) VULKAN_HPP_NOEXCEPT : m_descriptorSet( VK_NULL_HANDLE ) {} VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSet( VkDescriptorSet descriptorSet ) VULKAN_HPP_NOEXCEPT : m_descriptorSet( descriptorSet ) {} -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - DescriptorSet & operator=(VkDescriptorSet descriptorSet) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DescriptorSet & operator=( VkDescriptorSet descriptorSet ) VULKAN_HPP_NOEXCEPT { m_descriptorSet = descriptorSet; return *this; @@ -14926,20 +17496,20 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DescriptorSet const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSet const & ) const = default; #else bool operator==( DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_descriptorSet == rhs.m_descriptorSet; } - bool operator!=(DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_descriptorSet != rhs.m_descriptorSet; } - bool operator<(DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator<( DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_descriptorSet < rhs.m_descriptorSet; } @@ -14979,20 +17549,14 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::ePipeline; public: - VULKAN_HPP_CONSTEXPR Pipeline() VULKAN_HPP_NOEXCEPT - : m_pipeline(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR Pipeline() VULKAN_HPP_NOEXCEPT : m_pipeline( VK_NULL_HANDLE ) {} - VULKAN_HPP_CONSTEXPR Pipeline( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_pipeline(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR Pipeline( std::nullptr_t ) VULKAN_HPP_NOEXCEPT : m_pipeline( VK_NULL_HANDLE ) {} - VULKAN_HPP_TYPESAFE_EXPLICIT Pipeline( VkPipeline pipeline ) VULKAN_HPP_NOEXCEPT - : m_pipeline( pipeline ) - {} + VULKAN_HPP_TYPESAFE_EXPLICIT Pipeline( VkPipeline pipeline ) VULKAN_HPP_NOEXCEPT : m_pipeline( pipeline ) {} -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - Pipeline & operator=(VkPipeline pipeline) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Pipeline & operator=( VkPipeline pipeline ) VULKAN_HPP_NOEXCEPT { m_pipeline = pipeline; return *this; @@ -15005,20 +17569,20 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( Pipeline const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Pipeline const & ) const = default; #else bool operator==( Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_pipeline == rhs.m_pipeline; } - bool operator!=(Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_pipeline != rhs.m_pipeline; } - bool operator<(Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator<( Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_pipeline < rhs.m_pipeline; } @@ -15058,20 +17622,14 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eImageView; public: - VULKAN_HPP_CONSTEXPR ImageView() VULKAN_HPP_NOEXCEPT - : m_imageView(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR ImageView() VULKAN_HPP_NOEXCEPT : m_imageView( VK_NULL_HANDLE ) {} - VULKAN_HPP_CONSTEXPR ImageView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_imageView(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR ImageView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT : m_imageView( VK_NULL_HANDLE ) {} - VULKAN_HPP_TYPESAFE_EXPLICIT ImageView( VkImageView imageView ) VULKAN_HPP_NOEXCEPT - : m_imageView( imageView ) - {} + VULKAN_HPP_TYPESAFE_EXPLICIT ImageView( VkImageView imageView ) VULKAN_HPP_NOEXCEPT : m_imageView( imageView ) {} -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - ImageView & operator=(VkImageView imageView) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + ImageView & operator=( VkImageView imageView ) VULKAN_HPP_NOEXCEPT { m_imageView = imageView; return *this; @@ -15084,20 +17642,20 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageView const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageView const & ) const = default; #else bool operator==( ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_imageView == rhs.m_imageView; } - bool operator!=(ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_imageView != rhs.m_imageView; } - bool operator<(ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator<( ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_imageView < rhs.m_imageView; } @@ -15137,20 +17695,14 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eImage; public: - VULKAN_HPP_CONSTEXPR Image() VULKAN_HPP_NOEXCEPT - : m_image(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR Image() VULKAN_HPP_NOEXCEPT : m_image( VK_NULL_HANDLE ) {} - VULKAN_HPP_CONSTEXPR Image( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_image(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR Image( std::nullptr_t ) VULKAN_HPP_NOEXCEPT : m_image( VK_NULL_HANDLE ) {} - VULKAN_HPP_TYPESAFE_EXPLICIT Image( VkImage image ) VULKAN_HPP_NOEXCEPT - : m_image( image ) - {} + VULKAN_HPP_TYPESAFE_EXPLICIT Image( VkImage image ) VULKAN_HPP_NOEXCEPT : m_image( image ) {} -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - Image & operator=(VkImage image) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Image & operator=( VkImage image ) VULKAN_HPP_NOEXCEPT { m_image = image; return *this; @@ -15163,20 +17715,20 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( Image const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Image const & ) const = default; #else bool operator==( Image const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_image == rhs.m_image; } - bool operator!=(Image const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( Image const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_image != rhs.m_image; } - bool operator<(Image const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator<( Image const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_image < rhs.m_image; } @@ -15216,20 +17768,20 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eAccelerationStructureKHR; public: - VULKAN_HPP_CONSTEXPR AccelerationStructureKHR() VULKAN_HPP_NOEXCEPT - : m_accelerationStructureKHR(VK_NULL_HANDLE) + VULKAN_HPP_CONSTEXPR AccelerationStructureKHR() VULKAN_HPP_NOEXCEPT : m_accelerationStructureKHR( VK_NULL_HANDLE ) {} VULKAN_HPP_CONSTEXPR AccelerationStructureKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_accelerationStructureKHR(VK_NULL_HANDLE) + : m_accelerationStructureKHR( VK_NULL_HANDLE ) {} - VULKAN_HPP_TYPESAFE_EXPLICIT AccelerationStructureKHR( VkAccelerationStructureKHR accelerationStructureKHR ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_TYPESAFE_EXPLICIT + AccelerationStructureKHR( VkAccelerationStructureKHR accelerationStructureKHR ) VULKAN_HPP_NOEXCEPT : m_accelerationStructureKHR( accelerationStructureKHR ) {} -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - AccelerationStructureKHR & operator=(VkAccelerationStructureKHR accelerationStructureKHR) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + AccelerationStructureKHR & operator=( VkAccelerationStructureKHR accelerationStructureKHR ) VULKAN_HPP_NOEXCEPT { m_accelerationStructureKHR = accelerationStructureKHR; return *this; @@ -15242,20 +17794,20 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AccelerationStructureKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureKHR const & ) const = default; #else bool operator==( AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_accelerationStructureKHR == rhs.m_accelerationStructureKHR; } - bool operator!=(AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_accelerationStructureKHR != rhs.m_accelerationStructureKHR; } - bool operator<(AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator<( AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_accelerationStructureKHR < rhs.m_accelerationStructureKHR; } @@ -15279,7 +17831,8 @@ namespace VULKAN_HPP_NAMESPACE private: VkAccelerationStructureKHR m_accelerationStructureKHR; }; - static_assert( sizeof( AccelerationStructureKHR ) == sizeof( VkAccelerationStructureKHR ), "handle and wrapper have different size!" ); + static_assert( sizeof( AccelerationStructureKHR ) == sizeof( VkAccelerationStructureKHR ), + "handle and wrapper have different size!" ); template <> struct cpp_type @@ -15296,20 +17849,20 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDescriptorUpdateTemplate; public: - VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate() VULKAN_HPP_NOEXCEPT - : m_descriptorUpdateTemplate(VK_NULL_HANDLE) + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate() VULKAN_HPP_NOEXCEPT : m_descriptorUpdateTemplate( VK_NULL_HANDLE ) {} VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_descriptorUpdateTemplate(VK_NULL_HANDLE) + : m_descriptorUpdateTemplate( VK_NULL_HANDLE ) {} - VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorUpdateTemplate( VkDescriptorUpdateTemplate descriptorUpdateTemplate ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_TYPESAFE_EXPLICIT + DescriptorUpdateTemplate( VkDescriptorUpdateTemplate descriptorUpdateTemplate ) VULKAN_HPP_NOEXCEPT : m_descriptorUpdateTemplate( descriptorUpdateTemplate ) {} -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - DescriptorUpdateTemplate & operator=(VkDescriptorUpdateTemplate descriptorUpdateTemplate) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DescriptorUpdateTemplate & operator=( VkDescriptorUpdateTemplate descriptorUpdateTemplate ) VULKAN_HPP_NOEXCEPT { m_descriptorUpdateTemplate = descriptorUpdateTemplate; return *this; @@ -15322,20 +17875,20 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DescriptorUpdateTemplate const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorUpdateTemplate const & ) const = default; #else bool operator==( DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_descriptorUpdateTemplate == rhs.m_descriptorUpdateTemplate; } - bool operator!=(DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_descriptorUpdateTemplate != rhs.m_descriptorUpdateTemplate; } - bool operator<(DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator<( DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_descriptorUpdateTemplate < rhs.m_descriptorUpdateTemplate; } @@ -15359,7 +17912,8 @@ namespace VULKAN_HPP_NAMESPACE private: VkDescriptorUpdateTemplate m_descriptorUpdateTemplate; }; - static_assert( sizeof( DescriptorUpdateTemplate ) == sizeof( VkDescriptorUpdateTemplate ), "handle and wrapper have different size!" ); + static_assert( sizeof( DescriptorUpdateTemplate ) == sizeof( VkDescriptorUpdateTemplate ), + "handle and wrapper have different size!" ); template <> struct cpp_type @@ -15376,20 +17930,14 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eEvent; public: - VULKAN_HPP_CONSTEXPR Event() VULKAN_HPP_NOEXCEPT - : m_event(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR Event() VULKAN_HPP_NOEXCEPT : m_event( VK_NULL_HANDLE ) {} - VULKAN_HPP_CONSTEXPR Event( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_event(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR Event( std::nullptr_t ) VULKAN_HPP_NOEXCEPT : m_event( VK_NULL_HANDLE ) {} - VULKAN_HPP_TYPESAFE_EXPLICIT Event( VkEvent event ) VULKAN_HPP_NOEXCEPT - : m_event( event ) - {} + VULKAN_HPP_TYPESAFE_EXPLICIT Event( VkEvent event ) VULKAN_HPP_NOEXCEPT : m_event( event ) {} -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - Event & operator=(VkEvent event) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Event & operator=( VkEvent event ) VULKAN_HPP_NOEXCEPT { m_event = event; return *this; @@ -15402,20 +17950,20 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( Event const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Event const & ) const = default; #else bool operator==( Event const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_event == rhs.m_event; } - bool operator!=(Event const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( Event const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_event != rhs.m_event; } - bool operator<(Event const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator<( Event const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_event < rhs.m_event; } @@ -15455,20 +18003,16 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eCommandBuffer; public: - VULKAN_HPP_CONSTEXPR CommandBuffer() VULKAN_HPP_NOEXCEPT - : m_commandBuffer(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR CommandBuffer() VULKAN_HPP_NOEXCEPT : m_commandBuffer( VK_NULL_HANDLE ) {} - VULKAN_HPP_CONSTEXPR CommandBuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_commandBuffer(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR CommandBuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT : m_commandBuffer( VK_NULL_HANDLE ) {} VULKAN_HPP_TYPESAFE_EXPLICIT CommandBuffer( VkCommandBuffer commandBuffer ) VULKAN_HPP_NOEXCEPT : m_commandBuffer( commandBuffer ) {} -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - CommandBuffer & operator=(VkCommandBuffer commandBuffer) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + CommandBuffer & operator=( VkCommandBuffer commandBuffer ) VULKAN_HPP_NOEXCEPT { m_commandBuffer = commandBuffer; return *this; @@ -15481,601 +18025,1147 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( CommandBuffer const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBuffer const & ) const = default; #else bool operator==( CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_commandBuffer == rhs.m_commandBuffer; } - bool operator!=(CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_commandBuffer != rhs.m_commandBuffer; } - bool operator<(CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator<( CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_commandBuffer < rhs.m_commandBuffer; } #endif - template - Result begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo* pBeginInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo * pBeginInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type begin( const CommandBufferBeginInfo & beginInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type begin( const CommandBufferBeginInfo & beginInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void beginConditionalRenderingEXT( + const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + VULKAN_HPP_NAMESPACE::QueryControlFlags flags, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, uint32_t index, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + VULKAN_HPP_NAMESPACE::QueryControlFlags flags, + uint32_t index, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, + VULKAN_HPP_NAMESPACE::SubpassContents contents, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginRenderPass( const RenderPassBeginInfo & renderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void beginRenderPass( const RenderPassBeginInfo & renderPassBegin, + VULKAN_HPP_NAMESPACE::SubpassContents contents, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginRenderPass2( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfo & subpassBeginInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void beginRenderPass2( const RenderPassBeginInfo & renderPassBegin, + const SubpassBeginInfo & subpassBeginInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfo & subpassBeginInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin, + const SubpassBeginInfo & subpassBeginInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy counterBuffers, ArrayProxy counterBufferOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, + ArrayProxy counterBuffers, + ArrayProxy counterBufferOffsets, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + uint32_t descriptorSetCount, + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + uint32_t dynamicOffsetCount, + const uint32_t * pDynamicOffsets, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, ArrayProxy descriptorSets, ArrayProxy dynamicOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + ArrayProxy descriptorSets, + ArrayProxy dynamicOffsets, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::IndexType indexType, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t groupIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t groupIndex, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void + bindTransformFeedbackBuffersEXT( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, ArrayProxy buffers, ArrayProxy offsets, ArrayProxy sizes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, + ArrayProxy buffers, + ArrayProxy offsets, + ArrayProxy sizes, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void bindVertexBuffers( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void bindVertexBuffers( uint32_t firstBinding, ArrayProxy buffers, ArrayProxy offsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + void bindVertexBuffers( uint32_t firstBinding, + ArrayProxy buffers, + ArrayProxy offsets, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageBlit* pRegions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::ImageBlit * pRegions, + VULKAN_HPP_NAMESPACE::Filter filter, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy regions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + ArrayProxy regions, + VULKAN_HPP_NAMESPACE::Filter filter, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - template - void buildAccelerationStructureIndirectKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfo, VULKAN_HPP_NAMESPACE::Buffer indirectBuffer, VULKAN_HPP_NAMESPACE::DeviceSize indirectOffset, uint32_t indirectStride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void buildAccelerationStructureIndirectKHR( const AccelerationStructureBuildGeometryInfoKHR & info, VULKAN_HPP_NAMESPACE::Buffer indirectBuffer, VULKAN_HPP_NAMESPACE::DeviceSize indirectOffset, uint32_t indirectStride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template + void buildAccelerationStructureIndirectKHR( + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfo, + VULKAN_HPP_NAMESPACE::Buffer indirectBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize indirectOffset, + uint32_t indirectStride, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void buildAccelerationStructureIndirectKHR( const AccelerationStructureBuildGeometryInfoKHR & info, + VULKAN_HPP_NAMESPACE::Buffer indirectBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize indirectOffset, + uint32_t indirectStride, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const + VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - template - void buildAccelerationStructureKHR( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfos, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildOffsetInfoKHR* const* ppOffsetInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void buildAccelerationStructureKHR( ArrayProxy infos, ArrayProxy pOffsetInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template + void buildAccelerationStructureKHR( + uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildOffsetInfoKHR * const * ppOffsetInfos, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void buildAccelerationStructureKHR( + ArrayProxy infos, + ArrayProxy pOffsetInfos, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template - void buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV* pInfo, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::Buffer instanceData, + VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, + VULKAN_HPP_NAMESPACE::Bool32 update, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, + VULKAN_HPP_NAMESPACE::Buffer scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, + VULKAN_HPP_NAMESPACE::Buffer instanceData, + VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, + VULKAN_HPP_NAMESPACE::Bool32 update, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, + VULKAN_HPP_NAMESPACE::Buffer scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void clearAttachments( uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ClearAttachment* pAttachments, uint32_t rectCount, const VULKAN_HPP_NAMESPACE::ClearRect* pRects, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void clearAttachments( uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::ClearAttachment * pAttachments, + uint32_t rectCount, + const VULKAN_HPP_NAMESPACE::ClearRect * pRects, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void clearAttachments( ArrayProxy attachments, ArrayProxy rects, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void clearAttachments( ArrayProxy attachments, + ArrayProxy rects, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearColorValue* pColor, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearColorValue * pColor, + uint32_t rangeCount, + const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy ranges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const ClearColorValue & color, + ArrayProxy ranges, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue * pDepthStencil, + uint32_t rangeCount, + const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy ranges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const ClearDepthStencilValue & depthStencil, + ArrayProxy ranges, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - template - void copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template + void copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template - void copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifdef VK_ENABLE_BETA_EXTENSIONS - template - void copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template + void copyAccelerationStructureToMemoryKHR( + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const + VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template - void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferCopy* pRegions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::BufferCopy * pRegions, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy regions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + ArrayProxy regions, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy regions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + ArrayProxy regions, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageCopy* pRegions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::ImageCopy * pRegions, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy regions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + ArrayProxy regions, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy regions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + ArrayProxy regions, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - template - void copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template + void copyMemoryToAccelerationStructureKHR( + const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const + VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template - void copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void debugMarkerEndEXT(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void debugMarkerEndEXT( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void dispatch( uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void dispatchBase( uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void dispatchBaseKHR( uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void draw( uint32_t vertexCount, + uint32_t instanceCount, + uint32_t firstVertex, + uint32_t firstInstance, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void drawIndexed( uint32_t indexCount, + uint32_t instanceCount, + uint32_t firstIndex, + int32_t vertexOffset, + uint32_t firstInstance, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, VULKAN_HPP_NAMESPACE::Buffer counterBuffer, VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void drawIndirectByteCountEXT( uint32_t instanceCount, + uint32_t firstInstance, + VULKAN_HPP_NAMESPACE::Buffer counterBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, + uint32_t counterOffset, + uint32_t vertexStride, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void drawMeshTasksNV( uint32_t taskCount, + uint32_t firstTask, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void endConditionalRenderingEXT(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void endConditionalRenderingEXT( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void endDebugUtilsLabelEXT(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void endDebugUtilsLabelEXT( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + uint32_t index, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void endRenderPass(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void endRenderPass( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void endRenderPass2( const SubpassEndInfo & subpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void endRenderPass2( const SubpassEndInfo & subpassEndInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void endRenderPass2KHR( const SubpassEndInfo & subpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void endRenderPass2KHR( const SubpassEndInfo & subpassEndInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void endTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void endTransformFeedbackEXT( uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void endTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy counterBuffers, ArrayProxy counterBufferOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + void endTransformFeedbackEXT( uint32_t firstCounterBuffer, + ArrayProxy counterBuffers, + ArrayProxy counterBufferOffsets, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void executeCommands( uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void executeCommands( uint32_t commandBufferCount, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void executeCommands( ArrayProxy commandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void executeCommands( ArrayProxy commandBuffers, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const GeneratedCommandsInfoNV & generatedCommandsInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + uint32_t data, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void nextSubpass2( const SubpassBeginInfo & subpassBeginInfo, const SubpassEndInfo & subpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void nextSubpass2( const SubpassBeginInfo & subpassBeginInfo, + const SubpassEndInfo & subpassEndInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void nextSubpass2KHR( const SubpassBeginInfo & subpassBeginInfo, const SubpassEndInfo & subpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void nextSubpass2KHR( const SubpassBeginInfo & subpassBeginInfo, + const SubpassEndInfo & subpassEndInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, + uint32_t memoryBarrierCount, + const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, ArrayProxy memoryBarriers, ArrayProxy bufferMemoryBarriers, ArrayProxy imageMemoryBarriers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, + ArrayProxy memoryBarriers, + ArrayProxy bufferMemoryBarriers, + ArrayProxy imageMemoryBarriers, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void preprocessGeneratedCommandsNV( const GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void preprocessGeneratedCommandsNV( const GeneratedCommandsInfoNV & generatedCommandsInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, + uint32_t offset, + uint32_t size, + const void * pValues, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy values, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, + uint32_t offset, + ArrayProxy values, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, ArrayProxy descriptorWrites, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + ArrayProxy descriptorWrites, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void + pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + const void * pData, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void resetEvent( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageResolve* pRegions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::ImageResolve * pRegions, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy regions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + ArrayProxy regions, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void setBlendConstants( const float blendConstants[4], Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void setBlendConstants( const float blendConstants[4], + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void setCheckpointNV( const void* pCheckpointMarker, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void setCheckpointNV( const void * pCheckpointMarker, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, + uint32_t customSampleOrderCount, + const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, ArrayProxy customSampleOrders, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, + ArrayProxy customSampleOrders, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void setDepthBias( float depthBiasConstantFactor, + float depthBiasClamp, + float depthBiasSlopeFactor, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void setDepthBounds( float minDepthBounds, + float maxDepthBounds, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void setDeviceMask( uint32_t deviceMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void setDeviceMask( uint32_t deviceMask, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void setDeviceMaskKHR( uint32_t deviceMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void setDeviceMaskKHR( uint32_t deviceMask, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, + uint32_t discardRectangleCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy discardRectangles, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, + ArrayProxy discardRectangles, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void setEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void setEvent( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void setExclusiveScissorNV( uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void setExclusiveScissorNV( uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setExclusiveScissorNV( uint32_t firstExclusiveScissor, ArrayProxy exclusiveScissors, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void setExclusiveScissorNV( uint32_t firstExclusiveScissor, + ArrayProxy exclusiveScissors, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void setLineStippleEXT( uint32_t lineStippleFactor, + uint16_t lineStipplePattern, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void setLineWidth( float lineWidth, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void setLineWidth( float lineWidth, Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - Result setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL* pMarkerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL * pMarkerInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type + setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL* pOverrideInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type + setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL* pMarkerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result + setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type + setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT* pSampleLocationsInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT * pSampleLocationsInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void setScissor( uint32_t firstScissor, uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void setScissor( uint32_t firstScissor, + uint32_t scissorCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setScissor( uint32_t firstScissor, ArrayProxy scissors, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void setScissor( uint32_t firstScissor, + ArrayProxy scissors, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t compareMask, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t reference, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t writeMask, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void setViewport( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void setViewport( uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::Viewport * pViewports, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setViewport( uint32_t firstViewport, ArrayProxy viewports, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void setViewport( uint32_t firstViewport, + ArrayProxy viewports, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void setViewportShadingRatePaletteNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void + setViewportShadingRatePaletteNV( uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setViewportShadingRatePaletteNV( uint32_t firstViewport, ArrayProxy shadingRatePalettes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void + setViewportShadingRatePaletteNV( uint32_t firstViewport, + ArrayProxy shadingRatePalettes, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void setViewportWScalingNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void setViewportWScalingNV( uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setViewportWScalingNV( uint32_t firstViewport, ArrayProxy viewportWScalings, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void setViewportWScalingNV( uint32_t firstViewport, + ArrayProxy viewportWScalings, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - template - void traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pCallableShaderBindingTable, VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void traceRaysIndirectKHR( const StridedBufferRegionKHR & raygenShaderBindingTable, const StridedBufferRegionKHR & missShaderBindingTable, const StridedBufferRegionKHR & hitShaderBindingTable, const StridedBufferRegionKHR & callableShaderBindingTable, VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template + void traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR * pRaygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR * pMissShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR * pHitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR * pCallableShaderBindingTable, + VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void traceRaysIndirectKHR( const StridedBufferRegionKHR & raygenShaderBindingTable, + const StridedBufferRegionKHR & missShaderBindingTable, + const StridedBufferRegionKHR & hitShaderBindingTable, + const StridedBufferRegionKHR & callableShaderBindingTable, + VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - template - void traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void traceRaysKHR( const StridedBufferRegionKHR & raygenShaderBindingTable, const StridedBufferRegionKHR & missShaderBindingTable, const StridedBufferRegionKHR & hitShaderBindingTable, const StridedBufferRegionKHR & callableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template + void traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR * pRaygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR * pMissShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR * pHitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR * pCallableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void traceRaysKHR( const StridedBufferRegionKHR & raygenShaderBindingTable, + const StridedBufferRegionKHR & missShaderBindingTable, + const StridedBufferRegionKHR & hitShaderBindingTable, + const StridedBufferRegionKHR & callableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template - void traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, + VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, + VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, + VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize dataSize, const void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize dataSize, + const void * pData, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, ArrayProxy data, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + ArrayProxy data, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void waitEvents( uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event* pEvents, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void waitEvents( uint32_t eventCount, + const VULKAN_HPP_NAMESPACE::Event * pEvents, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + uint32_t memoryBarrierCount, + const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void waitEvents( ArrayProxy events, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, ArrayProxy memoryBarriers, ArrayProxy bufferMemoryBarriers, ArrayProxy imageMemoryBarriers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void waitEvents( ArrayProxy events, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + ArrayProxy memoryBarriers, + ArrayProxy bufferMemoryBarriers, + ArrayProxy imageMemoryBarriers, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void writeAccelerationStructuresPropertiesKHR( + uint32_t accelerationStructureCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void writeAccelerationStructuresPropertiesKHR( ArrayProxy accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void writeAccelerationStructuresPropertiesKHR( + ArrayProxy accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void writeAccelerationStructuresPropertiesNV( + uint32_t accelerationStructureCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void writeAccelerationStructuresPropertiesNV( ArrayProxy accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void writeAccelerationStructuresPropertiesNV( + ArrayProxy accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result end(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result end( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #else - template - typename ResultValueType::type end(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type end( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #else - template - typename ResultValueType::type reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandBuffer() const VULKAN_HPP_NOEXCEPT @@ -16112,20 +19202,16 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDeviceMemory; public: - VULKAN_HPP_CONSTEXPR DeviceMemory() VULKAN_HPP_NOEXCEPT - : m_deviceMemory(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR DeviceMemory() VULKAN_HPP_NOEXCEPT : m_deviceMemory( VK_NULL_HANDLE ) {} - VULKAN_HPP_CONSTEXPR DeviceMemory( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_deviceMemory(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR DeviceMemory( std::nullptr_t ) VULKAN_HPP_NOEXCEPT : m_deviceMemory( VK_NULL_HANDLE ) {} VULKAN_HPP_TYPESAFE_EXPLICIT DeviceMemory( VkDeviceMemory deviceMemory ) VULKAN_HPP_NOEXCEPT : m_deviceMemory( deviceMemory ) {} -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - DeviceMemory & operator=(VkDeviceMemory deviceMemory) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DeviceMemory & operator=( VkDeviceMemory deviceMemory ) VULKAN_HPP_NOEXCEPT { m_deviceMemory = deviceMemory; return *this; @@ -16138,20 +19224,20 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DeviceMemory const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceMemory const & ) const = default; #else - bool operator==( DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_deviceMemory == rhs.m_deviceMemory; } - bool operator!=(DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_deviceMemory != rhs.m_deviceMemory; } - bool operator<(DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator<( DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_deviceMemory < rhs.m_deviceMemory; } @@ -16191,20 +19277,15 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eBufferView; public: - VULKAN_HPP_CONSTEXPR BufferView() VULKAN_HPP_NOEXCEPT - : m_bufferView(VK_NULL_HANDLE) + VULKAN_HPP_CONSTEXPR BufferView() VULKAN_HPP_NOEXCEPT : m_bufferView( VK_NULL_HANDLE ) {} + + VULKAN_HPP_CONSTEXPR BufferView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT : m_bufferView( VK_NULL_HANDLE ) {} + + VULKAN_HPP_TYPESAFE_EXPLICIT BufferView( VkBufferView bufferView ) VULKAN_HPP_NOEXCEPT : m_bufferView( bufferView ) {} - VULKAN_HPP_CONSTEXPR BufferView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_bufferView(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT BufferView( VkBufferView bufferView ) VULKAN_HPP_NOEXCEPT - : m_bufferView( bufferView ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - BufferView & operator=(VkBufferView bufferView) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + BufferView & operator=( VkBufferView bufferView ) VULKAN_HPP_NOEXCEPT { m_bufferView = bufferView; return *this; @@ -16217,20 +19298,20 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BufferView const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferView const & ) const = default; #else bool operator==( BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_bufferView == rhs.m_bufferView; } - bool operator!=(BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_bufferView != rhs.m_bufferView; } - bool operator<(BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator<( BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_bufferView < rhs.m_bufferView; } @@ -16270,20 +19351,16 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eCommandPool; public: - VULKAN_HPP_CONSTEXPR CommandPool() VULKAN_HPP_NOEXCEPT - : m_commandPool(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR CommandPool() VULKAN_HPP_NOEXCEPT : m_commandPool( VK_NULL_HANDLE ) {} - VULKAN_HPP_CONSTEXPR CommandPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_commandPool(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR CommandPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT : m_commandPool( VK_NULL_HANDLE ) {} VULKAN_HPP_TYPESAFE_EXPLICIT CommandPool( VkCommandPool commandPool ) VULKAN_HPP_NOEXCEPT : m_commandPool( commandPool ) {} -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - CommandPool & operator=(VkCommandPool commandPool) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + CommandPool & operator=( VkCommandPool commandPool ) VULKAN_HPP_NOEXCEPT { m_commandPool = commandPool; return *this; @@ -16296,20 +19373,20 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( CommandPool const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandPool const & ) const = default; #else bool operator==( CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_commandPool == rhs.m_commandPool; } - bool operator!=(CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_commandPool != rhs.m_commandPool; } - bool operator<(CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator<( CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_commandPool < rhs.m_commandPool; } @@ -16349,20 +19426,16 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::ePipelineCache; public: - VULKAN_HPP_CONSTEXPR PipelineCache() VULKAN_HPP_NOEXCEPT - : m_pipelineCache(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR PipelineCache() VULKAN_HPP_NOEXCEPT : m_pipelineCache( VK_NULL_HANDLE ) {} - VULKAN_HPP_CONSTEXPR PipelineCache( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_pipelineCache(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR PipelineCache( std::nullptr_t ) VULKAN_HPP_NOEXCEPT : m_pipelineCache( VK_NULL_HANDLE ) {} VULKAN_HPP_TYPESAFE_EXPLICIT PipelineCache( VkPipelineCache pipelineCache ) VULKAN_HPP_NOEXCEPT : m_pipelineCache( pipelineCache ) {} -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - PipelineCache & operator=(VkPipelineCache pipelineCache) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + PipelineCache & operator=( VkPipelineCache pipelineCache ) VULKAN_HPP_NOEXCEPT { m_pipelineCache = pipelineCache; return *this; @@ -16375,20 +19448,20 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineCache const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCache const & ) const = default; #else bool operator==( PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_pipelineCache == rhs.m_pipelineCache; } - bool operator!=(PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_pipelineCache != rhs.m_pipelineCache; } - bool operator<(PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator<( PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_pipelineCache < rhs.m_pipelineCache; } @@ -16429,25 +19502,23 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDeferredOperationKHR; public: - VULKAN_HPP_CONSTEXPR DeferredOperationKHR() VULKAN_HPP_NOEXCEPT - : m_deferredOperationKHR(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR DeferredOperationKHR() VULKAN_HPP_NOEXCEPT : m_deferredOperationKHR( VK_NULL_HANDLE ) {} VULKAN_HPP_CONSTEXPR DeferredOperationKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_deferredOperationKHR(VK_NULL_HANDLE) + : m_deferredOperationKHR( VK_NULL_HANDLE ) {} VULKAN_HPP_TYPESAFE_EXPLICIT DeferredOperationKHR( VkDeferredOperationKHR deferredOperationKHR ) VULKAN_HPP_NOEXCEPT : m_deferredOperationKHR( deferredOperationKHR ) {} -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - DeferredOperationKHR & operator=(VkDeferredOperationKHR deferredOperationKHR) VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DeferredOperationKHR & operator=( VkDeferredOperationKHR deferredOperationKHR ) VULKAN_HPP_NOEXCEPT { m_deferredOperationKHR = deferredOperationKHR; return *this; } -#endif +# endif DeferredOperationKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT { @@ -16455,24 +19526,24 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DeferredOperationKHR const& ) const = default; -#else +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeferredOperationKHR const & ) const = default; +# else bool operator==( DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_deferredOperationKHR == rhs.m_deferredOperationKHR; } - bool operator!=(DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_deferredOperationKHR != rhs.m_deferredOperationKHR; } - bool operator<(DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator<( DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_deferredOperationKHR < rhs.m_deferredOperationKHR; } -#endif +# endif VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDeferredOperationKHR() const VULKAN_HPP_NOEXCEPT { @@ -16492,7 +19563,8 @@ namespace VULKAN_HPP_NAMESPACE private: VkDeferredOperationKHR m_deferredOperationKHR; }; - static_assert( sizeof( DeferredOperationKHR ) == sizeof( VkDeferredOperationKHR ), "handle and wrapper have different size!" ); + static_assert( sizeof( DeferredOperationKHR ) == sizeof( VkDeferredOperationKHR ), + "handle and wrapper have different size!" ); template <> struct cpp_type @@ -16509,20 +19581,16 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDescriptorPool; public: - VULKAN_HPP_CONSTEXPR DescriptorPool() VULKAN_HPP_NOEXCEPT - : m_descriptorPool(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR DescriptorPool() VULKAN_HPP_NOEXCEPT : m_descriptorPool( VK_NULL_HANDLE ) {} - VULKAN_HPP_CONSTEXPR DescriptorPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_descriptorPool(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR DescriptorPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT : m_descriptorPool( VK_NULL_HANDLE ) {} VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorPool( VkDescriptorPool descriptorPool ) VULKAN_HPP_NOEXCEPT : m_descriptorPool( descriptorPool ) {} -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - DescriptorPool & operator=(VkDescriptorPool descriptorPool) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DescriptorPool & operator=( VkDescriptorPool descriptorPool ) VULKAN_HPP_NOEXCEPT { m_descriptorPool = descriptorPool; return *this; @@ -16535,20 +19603,20 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DescriptorPool const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorPool const & ) const = default; #else bool operator==( DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_descriptorPool == rhs.m_descriptorPool; } - bool operator!=(DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_descriptorPool != rhs.m_descriptorPool; } - bool operator<(DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator<( DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_descriptorPool < rhs.m_descriptorPool; } @@ -16588,20 +19656,18 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDescriptorSetLayout; public: - VULKAN_HPP_CONSTEXPR DescriptorSetLayout() VULKAN_HPP_NOEXCEPT - : m_descriptorSetLayout(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR DescriptorSetLayout() VULKAN_HPP_NOEXCEPT : m_descriptorSetLayout( VK_NULL_HANDLE ) {} VULKAN_HPP_CONSTEXPR DescriptorSetLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_descriptorSetLayout(VK_NULL_HANDLE) + : m_descriptorSetLayout( VK_NULL_HANDLE ) {} VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSetLayout( VkDescriptorSetLayout descriptorSetLayout ) VULKAN_HPP_NOEXCEPT : m_descriptorSetLayout( descriptorSetLayout ) {} -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - DescriptorSetLayout & operator=(VkDescriptorSetLayout descriptorSetLayout) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DescriptorSetLayout & operator=( VkDescriptorSetLayout descriptorSetLayout ) VULKAN_HPP_NOEXCEPT { m_descriptorSetLayout = descriptorSetLayout; return *this; @@ -16614,20 +19680,20 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DescriptorSetLayout const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetLayout const & ) const = default; #else bool operator==( DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_descriptorSetLayout == rhs.m_descriptorSetLayout; } - bool operator!=(DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_descriptorSetLayout != rhs.m_descriptorSetLayout; } - bool operator<(DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator<( DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_descriptorSetLayout < rhs.m_descriptorSetLayout; } @@ -16651,7 +19717,8 @@ namespace VULKAN_HPP_NAMESPACE private: VkDescriptorSetLayout m_descriptorSetLayout; }; - static_assert( sizeof( DescriptorSetLayout ) == sizeof( VkDescriptorSetLayout ), "handle and wrapper have different size!" ); + static_assert( sizeof( DescriptorSetLayout ) == sizeof( VkDescriptorSetLayout ), + "handle and wrapper have different size!" ); template <> struct cpp_type @@ -16667,20 +19734,16 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eFramebuffer; public: - VULKAN_HPP_CONSTEXPR Framebuffer() VULKAN_HPP_NOEXCEPT - : m_framebuffer(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR Framebuffer() VULKAN_HPP_NOEXCEPT : m_framebuffer( VK_NULL_HANDLE ) {} - VULKAN_HPP_CONSTEXPR Framebuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_framebuffer(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR Framebuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT : m_framebuffer( VK_NULL_HANDLE ) {} VULKAN_HPP_TYPESAFE_EXPLICIT Framebuffer( VkFramebuffer framebuffer ) VULKAN_HPP_NOEXCEPT : m_framebuffer( framebuffer ) {} -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - Framebuffer & operator=(VkFramebuffer framebuffer) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Framebuffer & operator=( VkFramebuffer framebuffer ) VULKAN_HPP_NOEXCEPT { m_framebuffer = framebuffer; return *this; @@ -16693,20 +19756,20 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( Framebuffer const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Framebuffer const & ) const = default; #else bool operator==( Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_framebuffer == rhs.m_framebuffer; } - bool operator!=(Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_framebuffer != rhs.m_framebuffer; } - bool operator<(Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator<( Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_framebuffer < rhs.m_framebuffer; } @@ -16746,20 +19809,20 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eIndirectCommandsLayoutNV; public: - VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV() VULKAN_HPP_NOEXCEPT - : m_indirectCommandsLayoutNV(VK_NULL_HANDLE) + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV() VULKAN_HPP_NOEXCEPT : m_indirectCommandsLayoutNV( VK_NULL_HANDLE ) {} VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_indirectCommandsLayoutNV(VK_NULL_HANDLE) + : m_indirectCommandsLayoutNV( VK_NULL_HANDLE ) {} - VULKAN_HPP_TYPESAFE_EXPLICIT IndirectCommandsLayoutNV( VkIndirectCommandsLayoutNV indirectCommandsLayoutNV ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_TYPESAFE_EXPLICIT + IndirectCommandsLayoutNV( VkIndirectCommandsLayoutNV indirectCommandsLayoutNV ) VULKAN_HPP_NOEXCEPT : m_indirectCommandsLayoutNV( indirectCommandsLayoutNV ) {} -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - IndirectCommandsLayoutNV & operator=(VkIndirectCommandsLayoutNV indirectCommandsLayoutNV) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + IndirectCommandsLayoutNV & operator=( VkIndirectCommandsLayoutNV indirectCommandsLayoutNV ) VULKAN_HPP_NOEXCEPT { m_indirectCommandsLayoutNV = indirectCommandsLayoutNV; return *this; @@ -16772,20 +19835,20 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( IndirectCommandsLayoutNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IndirectCommandsLayoutNV const & ) const = default; #else bool operator==( IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_indirectCommandsLayoutNV == rhs.m_indirectCommandsLayoutNV; } - bool operator!=(IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_indirectCommandsLayoutNV != rhs.m_indirectCommandsLayoutNV; } - bool operator<(IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator<( IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_indirectCommandsLayoutNV < rhs.m_indirectCommandsLayoutNV; } @@ -16809,7 +19872,8 @@ namespace VULKAN_HPP_NAMESPACE private: VkIndirectCommandsLayoutNV m_indirectCommandsLayoutNV; }; - static_assert( sizeof( IndirectCommandsLayoutNV ) == sizeof( VkIndirectCommandsLayoutNV ), "handle and wrapper have different size!" ); + static_assert( sizeof( IndirectCommandsLayoutNV ) == sizeof( VkIndirectCommandsLayoutNV ), + "handle and wrapper have different size!" ); template <> struct cpp_type @@ -16825,20 +19889,15 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eRenderPass; public: - VULKAN_HPP_CONSTEXPR RenderPass() VULKAN_HPP_NOEXCEPT - : m_renderPass(VK_NULL_HANDLE) + VULKAN_HPP_CONSTEXPR RenderPass() VULKAN_HPP_NOEXCEPT : m_renderPass( VK_NULL_HANDLE ) {} + + VULKAN_HPP_CONSTEXPR RenderPass( std::nullptr_t ) VULKAN_HPP_NOEXCEPT : m_renderPass( VK_NULL_HANDLE ) {} + + VULKAN_HPP_TYPESAFE_EXPLICIT RenderPass( VkRenderPass renderPass ) VULKAN_HPP_NOEXCEPT : m_renderPass( renderPass ) {} - VULKAN_HPP_CONSTEXPR RenderPass( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_renderPass(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT RenderPass( VkRenderPass renderPass ) VULKAN_HPP_NOEXCEPT - : m_renderPass( renderPass ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - RenderPass & operator=(VkRenderPass renderPass) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + RenderPass & operator=( VkRenderPass renderPass ) VULKAN_HPP_NOEXCEPT { m_renderPass = renderPass; return *this; @@ -16851,20 +19910,20 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( RenderPass const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPass const & ) const = default; #else bool operator==( RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_renderPass == rhs.m_renderPass; } - bool operator!=(RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_renderPass != rhs.m_renderPass; } - bool operator<(RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator<( RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_renderPass < rhs.m_renderPass; } @@ -16904,20 +19963,14 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eSampler; public: - VULKAN_HPP_CONSTEXPR Sampler() VULKAN_HPP_NOEXCEPT - : m_sampler(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR Sampler() VULKAN_HPP_NOEXCEPT : m_sampler( VK_NULL_HANDLE ) {} - VULKAN_HPP_CONSTEXPR Sampler( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_sampler(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR Sampler( std::nullptr_t ) VULKAN_HPP_NOEXCEPT : m_sampler( VK_NULL_HANDLE ) {} - VULKAN_HPP_TYPESAFE_EXPLICIT Sampler( VkSampler sampler ) VULKAN_HPP_NOEXCEPT - : m_sampler( sampler ) - {} + VULKAN_HPP_TYPESAFE_EXPLICIT Sampler( VkSampler sampler ) VULKAN_HPP_NOEXCEPT : m_sampler( sampler ) {} -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - Sampler & operator=(VkSampler sampler) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Sampler & operator=( VkSampler sampler ) VULKAN_HPP_NOEXCEPT { m_sampler = sampler; return *this; @@ -16930,20 +19983,20 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( Sampler const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Sampler const & ) const = default; #else bool operator==( Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_sampler == rhs.m_sampler; } - bool operator!=(Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_sampler != rhs.m_sampler; } - bool operator<(Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator<( Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_sampler < rhs.m_sampler; } @@ -16983,20 +20036,19 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eSamplerYcbcrConversion; public: - VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion() VULKAN_HPP_NOEXCEPT - : m_samplerYcbcrConversion(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion() VULKAN_HPP_NOEXCEPT : m_samplerYcbcrConversion( VK_NULL_HANDLE ) {} VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_samplerYcbcrConversion(VK_NULL_HANDLE) + : m_samplerYcbcrConversion( VK_NULL_HANDLE ) {} - VULKAN_HPP_TYPESAFE_EXPLICIT SamplerYcbcrConversion( VkSamplerYcbcrConversion samplerYcbcrConversion ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_TYPESAFE_EXPLICIT + SamplerYcbcrConversion( VkSamplerYcbcrConversion samplerYcbcrConversion ) VULKAN_HPP_NOEXCEPT : m_samplerYcbcrConversion( samplerYcbcrConversion ) {} -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - SamplerYcbcrConversion & operator=(VkSamplerYcbcrConversion samplerYcbcrConversion) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + SamplerYcbcrConversion & operator=( VkSamplerYcbcrConversion samplerYcbcrConversion ) VULKAN_HPP_NOEXCEPT { m_samplerYcbcrConversion = samplerYcbcrConversion; return *this; @@ -17009,20 +20061,20 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SamplerYcbcrConversion const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerYcbcrConversion const & ) const = default; #else bool operator==( SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_samplerYcbcrConversion == rhs.m_samplerYcbcrConversion; } - bool operator!=(SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_samplerYcbcrConversion != rhs.m_samplerYcbcrConversion; } - bool operator<(SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator<( SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_samplerYcbcrConversion < rhs.m_samplerYcbcrConversion; } @@ -17046,7 +20098,8 @@ namespace VULKAN_HPP_NAMESPACE private: VkSamplerYcbcrConversion m_samplerYcbcrConversion; }; - static_assert( sizeof( SamplerYcbcrConversion ) == sizeof( VkSamplerYcbcrConversion ), "handle and wrapper have different size!" ); + static_assert( sizeof( SamplerYcbcrConversion ) == sizeof( VkSamplerYcbcrConversion ), + "handle and wrapper have different size!" ); template <> struct cpp_type @@ -17063,20 +20116,16 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eShaderModule; public: - VULKAN_HPP_CONSTEXPR ShaderModule() VULKAN_HPP_NOEXCEPT - : m_shaderModule(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR ShaderModule() VULKAN_HPP_NOEXCEPT : m_shaderModule( VK_NULL_HANDLE ) {} - VULKAN_HPP_CONSTEXPR ShaderModule( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_shaderModule(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR ShaderModule( std::nullptr_t ) VULKAN_HPP_NOEXCEPT : m_shaderModule( VK_NULL_HANDLE ) {} VULKAN_HPP_TYPESAFE_EXPLICIT ShaderModule( VkShaderModule shaderModule ) VULKAN_HPP_NOEXCEPT : m_shaderModule( shaderModule ) {} -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - ShaderModule & operator=(VkShaderModule shaderModule) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + ShaderModule & operator=( VkShaderModule shaderModule ) VULKAN_HPP_NOEXCEPT { m_shaderModule = shaderModule; return *this; @@ -17089,20 +20138,20 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ShaderModule const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ShaderModule const & ) const = default; #else bool operator==( ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_shaderModule == rhs.m_shaderModule; } - bool operator!=(ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_shaderModule != rhs.m_shaderModule; } - bool operator<(ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator<( ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_shaderModule < rhs.m_shaderModule; } @@ -17142,20 +20191,18 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eValidationCacheEXT; public: - VULKAN_HPP_CONSTEXPR ValidationCacheEXT() VULKAN_HPP_NOEXCEPT - : m_validationCacheEXT(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR ValidationCacheEXT() VULKAN_HPP_NOEXCEPT : m_validationCacheEXT( VK_NULL_HANDLE ) {} VULKAN_HPP_CONSTEXPR ValidationCacheEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_validationCacheEXT(VK_NULL_HANDLE) + : m_validationCacheEXT( VK_NULL_HANDLE ) {} VULKAN_HPP_TYPESAFE_EXPLICIT ValidationCacheEXT( VkValidationCacheEXT validationCacheEXT ) VULKAN_HPP_NOEXCEPT : m_validationCacheEXT( validationCacheEXT ) {} -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - ValidationCacheEXT & operator=(VkValidationCacheEXT validationCacheEXT) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + ValidationCacheEXT & operator=( VkValidationCacheEXT validationCacheEXT ) VULKAN_HPP_NOEXCEPT { m_validationCacheEXT = validationCacheEXT; return *this; @@ -17168,20 +20215,20 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ValidationCacheEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ValidationCacheEXT const & ) const = default; #else bool operator==( ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_validationCacheEXT == rhs.m_validationCacheEXT; } - bool operator!=(ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_validationCacheEXT != rhs.m_validationCacheEXT; } - bool operator<(ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator<( ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_validationCacheEXT < rhs.m_validationCacheEXT; } @@ -17205,7 +20252,8 @@ namespace VULKAN_HPP_NAMESPACE private: VkValidationCacheEXT m_validationCacheEXT; }; - static_assert( sizeof( ValidationCacheEXT ) == sizeof( VkValidationCacheEXT ), "handle and wrapper have different size!" ); + static_assert( sizeof( ValidationCacheEXT ) == sizeof( VkValidationCacheEXT ), + "handle and wrapper have different size!" ); template <> struct cpp_type @@ -17221,20 +20269,14 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eQueue; public: - VULKAN_HPP_CONSTEXPR Queue() VULKAN_HPP_NOEXCEPT - : m_queue(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR Queue() VULKAN_HPP_NOEXCEPT : m_queue( VK_NULL_HANDLE ) {} - VULKAN_HPP_CONSTEXPR Queue( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_queue(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR Queue( std::nullptr_t ) VULKAN_HPP_NOEXCEPT : m_queue( VK_NULL_HANDLE ) {} - VULKAN_HPP_TYPESAFE_EXPLICIT Queue( VkQueue queue ) VULKAN_HPP_NOEXCEPT - : m_queue( queue ) - {} + VULKAN_HPP_TYPESAFE_EXPLICIT Queue( VkQueue queue ) VULKAN_HPP_NOEXCEPT : m_queue( queue ) {} -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - Queue & operator=(VkQueue queue) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Queue & operator=( VkQueue queue ) VULKAN_HPP_NOEXCEPT { m_queue = queue; return *this; @@ -17247,86 +20289,111 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( Queue const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Queue const & ) const = default; #else bool operator==( Queue const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_queue == rhs.m_queue; } - bool operator!=(Queue const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( Queue const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_queue != rhs.m_queue; } - bool operator<(Queue const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator<( Queue const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_queue < rhs.m_queue; } #endif - template - void getCheckpointDataNV( uint32_t* pCheckpointDataCount, VULKAN_HPP_NAMESPACE::CheckpointDataNV* pCheckpointData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void getCheckpointDataNV( uint32_t * pCheckpointDataCount, + VULKAN_HPP_NAMESPACE::CheckpointDataNV * pCheckpointData, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getCheckpointDataNV(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getCheckpointDataNV(Allocator const& vectorAllocator, Dispatch const &d ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector + getCheckpointDataNV( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getCheckpointDataNV( Allocator const & vectorAllocator, + Dispatch const & d ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result bindSparse( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindSparseInfo* pBindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result bindSparse( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindSparseInfo * pBindInfo, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type bindSparse( ArrayProxy bindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type bindSparse( ArrayProxy bindInfo, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void endDebugUtilsLabelEXT(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void endDebugUtilsLabelEXT( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR* pPresentInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR * pPresentInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result presentKHR( const PresentInfoKHR & presentInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + Result presentKHR( const PresentInfoKHR & presentInfo, Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result + setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #else - template - typename ResultValueType::type setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type + setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result submit( uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo* pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result submit( uint32_t submitCount, + const VULKAN_HPP_NAMESPACE::SubmitInfo * pSubmits, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type submit( ArrayProxy submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type submit( ArrayProxy submits, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result waitIdle(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result waitIdle( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #else - template - typename ResultValueType::type waitIdle(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type waitIdle( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueue() const VULKAN_HPP_NOEXCEPT @@ -17357,66 +20424,206 @@ namespace VULKAN_HPP_NAMESPACE #ifndef VULKAN_HPP_NO_SMART_HANDLE class Device; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; using UniqueAccelerationStructureKHR = UniqueHandle; - using UniqueAccelerationStructureNV = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueAccelerationStructureNV = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; using UniqueBuffer = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; using UniqueBufferView = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = PoolFree; }; + template + class UniqueHandleTraits + { + public: + using deleter = PoolFree; + }; using UniqueCommandBuffer = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; using UniqueCommandPool = UniqueHandle; -#ifdef VK_ENABLE_BETA_EXTENSIONS - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; +# ifdef VK_ENABLE_BETA_EXTENSIONS + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; using UniqueDeferredOperationKHR = UniqueHandle; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; using UniqueDescriptorPool = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = PoolFree; }; + template + class UniqueHandleTraits + { + public: + using deleter = PoolFree; + }; using UniqueDescriptorSet = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; using UniqueDescriptorSetLayout = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueDescriptorUpdateTemplate = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueDescriptorUpdateTemplate = UniqueHandle; using UniqueDescriptorUpdateTemplateKHR = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectFree; }; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectFree; + }; using UniqueDeviceMemory = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; using UniqueEvent = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; using UniqueFence = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; using UniqueFramebuffer = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; using UniqueImage = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; using UniqueImageView = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; using UniqueIndirectCommandsLayoutNV = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; using UniquePipeline = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; using UniquePipelineCache = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; using UniquePipelineLayout = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; using UniqueQueryPool = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; using UniqueRenderPass = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; using UniqueSampler = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueSamplerYcbcrConversion = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueSamplerYcbcrConversion = UniqueHandle; using UniqueSamplerYcbcrConversionKHR = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; using UniqueSemaphore = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; using UniqueShaderModule = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; using UniqueSwapchainKHR = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; using UniqueValidationCacheEXT = UniqueHandle; #endif /*VULKAN_HPP_NO_SMART_HANDLE*/ @@ -17428,20 +20635,14 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDevice; public: - VULKAN_HPP_CONSTEXPR Device() VULKAN_HPP_NOEXCEPT - : m_device(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR Device() VULKAN_HPP_NOEXCEPT : m_device( VK_NULL_HANDLE ) {} - VULKAN_HPP_CONSTEXPR Device( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_device(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR Device( std::nullptr_t ) VULKAN_HPP_NOEXCEPT : m_device( VK_NULL_HANDLE ) {} - VULKAN_HPP_TYPESAFE_EXPLICIT Device( VkDevice device ) VULKAN_HPP_NOEXCEPT - : m_device( device ) - {} + VULKAN_HPP_TYPESAFE_EXPLICIT Device( VkDevice device ) VULKAN_HPP_NOEXCEPT : m_device( device ) {} -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - Device & operator=(VkDevice device) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Device & operator=( VkDevice device ) VULKAN_HPP_NOEXCEPT { m_device = device; return *this; @@ -17454,1922 +20655,3306 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( Device const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Device const & ) const = default; #else - bool operator==( Device const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( Device const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_device == rhs.m_device; } - bool operator!=(Device const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( Device const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_device != rhs.m_device; } - bool operator<(Device const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator<( Device const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_device < rhs.m_device; } #endif #ifdef VK_USE_PLATFORM_WIN32_KHR -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - typename ResultValueType::type acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result + acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# else + template + typename ResultValueType::type + acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template - Result acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR * pAcquireInfo, + uint32_t * pImageIndex, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValue acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + ResultValue acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, uint32_t* pImageIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint64_t timeout, + VULKAN_HPP_NAMESPACE::Semaphore semaphore, + VULKAN_HPP_NAMESPACE::Fence fence, + uint32_t * pImageIndex, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValue acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + ResultValue acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint64_t timeout, + VULKAN_HPP_NAMESPACE::Semaphore semaphore, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL* pConfiguration, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result acquirePerformanceConfigurationINTEL( + const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL * pAcquireInfo, + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL * pConfiguration, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type acquirePerformanceConfigurationINTEL( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type + acquirePerformanceConfigurationINTEL( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR * pInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type acquireProfilingLockKHR( const AcquireProfilingLockInfoKHR & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type + acquireProfilingLockKHR( const AcquireProfilingLockInfoKHR & info, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo* pAllocateInfo, VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo * pAllocateInfo, + VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType,Allocator>>::type allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType,Allocator>>::type allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type allocateCommandBuffers( + const CommandBufferAllocateInfo & allocateInfo, Allocator const & vectorAllocator, Dispatch const & d ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType, Allocator>>::type + allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType, Allocator>>::type + allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, + Allocator const & vectorAllocator, + Dispatch const & d ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo * pAllocateInfo, + VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type allocateDescriptorSets( + const DescriptorSetAllocateInfo & allocateInfo, Allocator const & vectorAllocator, Dispatch const & d ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType, Allocator>>::type + allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType, Allocator>>::type + allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, + Allocator const & vectorAllocator, + Dispatch const & d ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo * pAllocateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DeviceMemory * pMemory, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + allocateMemory( const MemoryAllocateInfo & allocateInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result bindAccelerationStructureMemoryKHR( + uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoKHR * pBindInfos, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type bindAccelerationStructureMemoryKHR( + ArrayProxy bindInfos, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo* pAllocateInfo, VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result bindAccelerationStructureMemoryNV( + uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoKHR * pBindInfos, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType,Allocator>>::type allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType,Allocator>>::type allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo* pAllocateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DeviceMemory* pMemory, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type allocateMemory( const MemoryAllocateInfo & allocateInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result bindAccelerationStructureMemoryKHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoKHR* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type bindAccelerationStructureMemoryKHR( ArrayProxy bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result bindAccelerationStructureMemoryNV( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoKHR* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type bindAccelerationStructureMemoryNV( ArrayProxy bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type bindAccelerationStructureMemoryNV( + ArrayProxy bindInfos, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #else - template - typename ResultValueType::type bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result bindBufferMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result bindBufferMemory2( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type bindBufferMemory2( ArrayProxy bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type + bindBufferMemory2( ArrayProxy bindInfos, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result bindBufferMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result bindBufferMemory2KHR( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type bindBufferMemory2KHR( ArrayProxy bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type + bindBufferMemory2KHR( ArrayProxy bindInfos, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #else - template - typename ResultValueType::type bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result bindImageMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result bindImageMemory2( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type bindImageMemory2( ArrayProxy bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type + bindImageMemory2( ArrayProxy bindInfos, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result bindImageMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result bindImageMemory2KHR( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type bindImageMemory2KHR( ArrayProxy bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type + bindImageMemory2KHR( ArrayProxy bindInfos, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - template - Result buildAccelerationStructureKHR( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfos, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildOffsetInfoKHR* const* ppOffsetInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result buildAccelerationStructureKHR( ArrayProxy infos, ArrayProxy pOffsetInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template + Result buildAccelerationStructureKHR( + uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildOffsetInfoKHR * const * ppOffsetInfos, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result buildAccelerationStructureKHR( + ArrayProxy infos, + ArrayProxy pOffsetInfos, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t shader, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #else - template - typename ResultValueType::type compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t shader, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - template - Result copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template + Result copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - template - Result copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template + Result copyAccelerationStructureToMemoryKHR( + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - template - Result copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template + Result copyMemoryToAccelerationStructureKHR( + const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - template - Result createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructure, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createAccelerationStructureKHR( const AccelerationStructureCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createAccelerationStructureKHRUnique( const AccelerationStructureCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template + Result + createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructure, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createAccelerationStructureKHR( const AccelerationStructureCreateInfoKHR & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createAccelerationStructureKHRUnique( const AccelerationStructureCreateInfoKHR & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template - Result createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructure, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result + createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructure, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createAccelerationStructureNV( const AccelerationStructureCreateInfoNV & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createAccelerationStructureNVUnique( const AccelerationStructureCreateInfoNV & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + template + typename ResultValueType::type + createAccelerationStructureNV( const AccelerationStructureCreateInfoNV & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createAccelerationStructureNVUnique( const AccelerationStructureCreateInfoNV & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Buffer * pBuffer, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createBuffer( const BufferCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createBufferUnique( const BufferCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::BufferView * pView, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createBufferView( const BufferViewCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createBufferViewUnique( const BufferViewCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CommandPool * pCommandPool, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createCommandPool( const CommandPoolCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createCommandPoolUnique( const CommandPoolCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + ResultValue> + createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy createInfos, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + ResultValue> + createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy createInfos, + Optional allocator, + Allocator const & vectorAllocator, + Dispatch const & d ) const; + template + ResultValue createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const ComputePipelineCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + ResultValue, Allocator>> + createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy createInfos, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + ResultValue, Allocator>> + createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy createInfos, + Optional allocator, + Allocator const & vectorAllocator, + Dispatch const & d ) const; + template + ResultValue> + createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const ComputePipelineCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + Result createDeferredOperationKHR( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DeferredOperationKHR * pDeferredOperation, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createDeferredOperationKHR( Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createDeferredOperationKHRUnique( Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + Result createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorPool * pDescriptorPool, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayout, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result + createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result + createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Event * pEvent, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createEvent( const EventCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createEventUnique( const EventCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Fence * pFence, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createFence( const FenceCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createFenceUnique( const FenceCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Framebuffer * pFramebuffer, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createFramebuffer( const FramebufferCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createFramebufferUnique( const FramebufferCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + ResultValue> + createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy createInfos, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + ResultValue> + createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy createInfos, + Optional allocator, + Allocator const & vectorAllocator, + Dispatch const & d ) const; + template + ResultValue createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const GraphicsPipelineCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + ResultValue, Allocator>> + createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy createInfos, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + ResultValue, Allocator>> + createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy createInfos, + Optional allocator, + Allocator const & vectorAllocator, + Dispatch const & d ) const; + template + ResultValue> + createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const GraphicsPipelineCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Image * pImage, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createImage( const ImageCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createImageUnique( const ImageCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ImageView * pView, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createImageView( const ImageViewCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createImageViewUnique( const ImageViewCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result + createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV * pIndirectCommandsLayout, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createIndirectCommandsLayoutNV( const IndirectCommandsLayoutCreateInfoNV & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createIndirectCommandsLayoutNVUnique( const IndirectCommandsLayoutCreateInfoNV & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PipelineCache * pPipelineCache, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createPipelineCache( const PipelineCacheCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PipelineLayout * pPipelineLayout, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::QueryPool * pQueryPool, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createQueryPool( const QueryPoolCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createQueryPoolUnique( const QueryPoolCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + Result createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + ResultValue> + createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy createInfos, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + ResultValue> + createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy createInfos, + Optional allocator, + Allocator const & vectorAllocator, + Dispatch const & d ) const; + template + ResultValue createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const RayTracingPipelineCreateInfoKHR & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + ResultValue, Allocator>> createRayTracingPipelinesKHRUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy createInfos, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + ResultValue, Allocator>> createRayTracingPipelinesKHRUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy createInfos, + Optional allocator, + Allocator const & vectorAllocator, + Dispatch const & d ) const; + template + ResultValue> + createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const RayTracingPipelineCreateInfoKHR & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + Result createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + ResultValue> + createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy createInfos, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + ResultValue> + createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy createInfos, + Optional allocator, + Allocator const & vectorAllocator, + Dispatch const & d ) const; + template + ResultValue createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const RayTracingPipelineCreateInfoNV & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + ResultValue, Allocator>> createRayTracingPipelinesNVUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy createInfos, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + ResultValue, Allocator>> createRayTracingPipelinesNVUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy createInfos, + Optional allocator, + Allocator const & vectorAllocator, + Dispatch const & d ) const; + template + ResultValue> + createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const RayTracingPipelineCreateInfoNV & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createRenderPass( const RenderPassCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createRenderPassUnique( const RenderPassCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createRenderPass2( const RenderPassCreateInfo2 & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createRenderPass2Unique( const RenderPassCreateInfo2 & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createRenderPass2KHR( const RenderPassCreateInfo2 & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createRenderPass2KHRUnique( const RenderPassCreateInfo2 & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Sampler * pSampler, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createSampler( const SamplerCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createSamplerUnique( const SamplerCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result + createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Semaphore * pSemaphore, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createSemaphore( const SemaphoreCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createSemaphoreUnique( const SemaphoreCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ShaderModule * pShaderModule, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createShaderModule( const ShaderModuleCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createSharedSwapchainsKHR( uint32_t swapchainCount, + const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + createSharedSwapchainsKHR( ArrayProxy createInfos, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + createSharedSwapchainsKHR( ArrayProxy createInfos, + Optional allocator, + Allocator const & vectorAllocator, + Dispatch const & d ) const; + template + typename ResultValueType::type + createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType, Allocator>>::type + createSharedSwapchainsKHRUnique( ArrayProxy createInfos, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType, Allocator>>::type + createSharedSwapchainsKHRUnique( ArrayProxy createInfos, + Optional allocator, + Allocator const & vectorAllocator, + Dispatch const & d ) const; + template + typename ResultValueType>::type + createSharedSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchain, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pValidationCache, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createValidationCacheEXT( const ValidationCacheCreateInfoEXT & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createValidationCacheEXTUnique( const ValidationCacheCreateInfoEXT & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT * pNameInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Buffer* pBuffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT * pTagInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createBuffer( const BufferCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createBufferUnique( const BufferCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::BufferView* pView, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createBufferView( const BufferViewCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createBufferViewUnique( const BufferViewCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::CommandPool* pCommandPool, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createCommandPool( const CommandPoolCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createCommandPoolUnique( const CommandPoolCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - ResultValue> createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - ResultValue> createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; - template - ResultValue createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - ResultValue,Allocator>> createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - ResultValue,Allocator>> createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; - template - ResultValue> createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + template + typename ResultValueType::type + debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - template - Result createDeferredOperationKHR( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DeferredOperationKHR* pDeferredOperation, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createDeferredOperationKHR( Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createDeferredOperationKHRUnique( Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# else + template + Result deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template - Result createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorPool* pDescriptorPool, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void + destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + template + void + destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + template + void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + template + void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + template + void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Event* pEvent, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createEvent( const EventCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createEventUnique( const EventCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + template + void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createFence( const FenceCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createFenceUnique( const FenceCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + template + void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Framebuffer* pFramebuffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createFramebuffer( const FramebufferCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createFramebufferUnique( const FramebufferCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + template + void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - ResultValue> createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - ResultValue> createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; - template - ResultValue createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - ResultValue,Allocator>> createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - ResultValue,Allocator>> createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; - template - ResultValue> createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + template + void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Image* pImage, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createImage( const ImageCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createImageUnique( const ImageCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ImageView* pView, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createImageView( const ImageViewCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createImageViewUnique( const ImageViewCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV* pIndirectCommandsLayout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createIndirectCommandsLayoutNV( const IndirectCommandsLayoutCreateInfoNV & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createIndirectCommandsLayoutNVUnique( const IndirectCommandsLayoutCreateInfoNV & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PipelineCache* pPipelineCache, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createPipelineCache( const PipelineCacheCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PipelineLayout* pPipelineLayout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::QueryPool* pQueryPool, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createQueryPool( const QueryPoolCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createQueryPoolUnique( const QueryPoolCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + template + void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - template - Result createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - ResultValue> createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - ResultValue> createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; - template - ResultValue createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - ResultValue,Allocator>> createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - ResultValue,Allocator>> createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; - template - ResultValue> createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - template - Result createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - ResultValue> createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - ResultValue> createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; - template - ResultValue createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - ResultValue,Allocator>> createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - ResultValue,Allocator>> createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; - template - ResultValue> createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createRenderPass( const RenderPassCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createRenderPassUnique( const RenderPassCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createRenderPass2( const RenderPassCreateInfo2 & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createRenderPass2Unique( const RenderPassCreateInfo2 & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createRenderPass2KHR( const RenderPassCreateInfo2 & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createRenderPass2KHRUnique( const RenderPassCreateInfo2 & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Sampler* pSampler, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createSampler( const SamplerCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createSamplerUnique( const SamplerCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + template + void destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void + destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Semaphore* pSemaphore, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createSemaphore( const SemaphoreCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createSemaphoreUnique( const SemaphoreCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ShaderModule* pShaderModule, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createShaderModule( const ShaderModuleCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createSharedSwapchainsKHR( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type createSharedSwapchainsKHR( ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type createSharedSwapchainsKHR( ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; - template - typename ResultValueType::type createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType,Allocator>>::type createSharedSwapchainsKHRUnique( ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType,Allocator>>::type createSharedSwapchainsKHRUnique( ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; - template - typename ResultValueType>::type createSharedSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ValidationCacheEXT* pValidationCache, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createValidationCacheEXT( const ValidationCacheCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createValidationCacheEXTUnique( const ValidationCacheCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT* pNameInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT* pTagInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_ENABLE_BETA_EXTENSIONS -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - Result deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - template - void destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - void destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - template - void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroyEvent( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroyEvent( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyEvent( VULKAN_HPP_NAMESPACE::Event event, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroyEvent( VULKAN_HPP_NAMESPACE::Event event, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::Event event, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::Event event, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::Fence fence, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::Fence fence, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::Fence fence, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroyImage( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroyImage( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyImage( VULKAN_HPP_NAMESPACE::Image image, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroyImage( VULKAN_HPP_NAMESPACE::Image image, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::Image image, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::Image image, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void + destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void + destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void + destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void + destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result waitIdle(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result waitIdle( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #else - template - typename ResultValueType::type waitIdle(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type waitIdle( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT* pDisplayPowerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT * pDisplayPowerInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type + displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const DisplayPowerInfoEXT & displayPowerInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result flushMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange* pMemoryRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result flushMappedMemoryRanges( uint32_t memoryRangeCount, + const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type flushMappedMemoryRanges( ArrayProxy memoryRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type + flushMappedMemoryRanges( ArrayProxy memoryRanges, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + uint32_t commandBufferCount, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy commandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + ArrayProxy commandBuffers, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + uint32_t commandBufferCount, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy commandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + ArrayProxy commandBuffers, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy descriptorSets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type + freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + ArrayProxy descriptorSets, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy descriptorSets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + ArrayProxy descriptorSets, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - template - DeviceAddress getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - DeviceAddress getAccelerationStructureAddressKHR( const AccelerationStructureDeviceAddressInfoKHR & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template + DeviceAddress getAccelerationStructureAddressKHR( + const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR * pInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + DeviceAddress getAccelerationStructureAddressKHR( const AccelerationStructureDeviceAddressInfoKHR & info, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const + VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template - Result getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, size_t dataSize, void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result + getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + size_t dataSize, + void * pData, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, ArrayProxy data, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type + getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + ArrayProxy data, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - template - void getAccelerationStructureMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoKHR* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::MemoryRequirements2 getAccelerationStructureMemoryRequirementsKHR( const AccelerationStructureMemoryRequirementsInfoKHR & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getAccelerationStructureMemoryRequirementsKHR( const AccelerationStructureMemoryRequirementsInfoKHR & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template + void getAccelerationStructureMemoryRequirementsKHR( + const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoKHR * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::MemoryRequirements2 getAccelerationStructureMemoryRequirementsKHR( + const AccelerationStructureMemoryRequirementsInfoKHR & info, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain getAccelerationStructureMemoryRequirementsKHR( + const AccelerationStructureMemoryRequirementsInfoKHR & info, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template - void getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void getAccelerationStructureMemoryRequirementsNV( + const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR * pMemoryRequirements, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR getAccelerationStructureMemoryRequirementsNV( + const AccelerationStructureMemoryRequirementsInfoNV & info, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain getAccelerationStructureMemoryRequirementsNV( + const AccelerationStructureMemoryRequirementsInfoNV & info, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_USE_PLATFORM_ANDROID_KHR - template - Result getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer* buffer, VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template - typename ResultValueType>::type getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + template + Result getAndroidHardwareBufferPropertiesANDROID( + const struct AHardwareBuffer * buffer, + VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID * pProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType>::type + getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - template - DeviceAddress getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + DeviceAddress getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - DeviceAddress getBufferAddress( const BufferDeviceAddressInfo & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + DeviceAddress getBufferAddress( const BufferDeviceAddressInfo & info, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - DeviceAddress getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + DeviceAddress getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - DeviceAddress getBufferAddressEXT( const BufferDeviceAddressInfo & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + DeviceAddress getBufferAddressEXT( const BufferDeviceAddressInfo & info, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - DeviceAddress getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + DeviceAddress getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - DeviceAddress getBufferAddressKHR( const BufferDeviceAddressInfo & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + DeviceAddress getBufferAddressKHR( const BufferDeviceAddressInfo & info, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::MemoryRequirements* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::MemoryRequirements getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NAMESPACE::MemoryRequirements + getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain + getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void + getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain + getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - uint64_t getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + uint64_t + getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - uint64_t getBufferOpaqueCaptureAddress( const BufferDeviceAddressInfo & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + uint64_t + getBufferOpaqueCaptureAddress( const BufferDeviceAddressInfo & info, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - uint64_t getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + uint64_t + getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - uint64_t getBufferOpaqueCaptureAddressKHR( const BufferDeviceAddressInfo & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + uint64_t + getBufferOpaqueCaptureAddressKHR( const BufferDeviceAddressInfo & info, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result getCalibratedTimestampsEXT( uint32_t timestampCount, const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getCalibratedTimestampsEXT( uint32_t timestampCount, + const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT * pTimestampInfos, + uint64_t * pTimestamps, + uint64_t * pMaxDeviation, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getCalibratedTimestampsEXT( ArrayProxy timestampInfos, ArrayProxy timestamps, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type + getCalibratedTimestampsEXT( ArrayProxy timestampInfos, + ArrayProxy timestamps, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - template - uint32_t getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + uint32_t getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const + VULKAN_HPP_NOEXCEPT; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_ENABLE_BETA_EXTENSIONS -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - Result getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result + getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# else + template + Result getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template - void getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport* pSupport, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport + getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain + getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport* pSupport, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void + getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport + getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain + getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - template - Result getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionKHR* version, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getAccelerationStructureCompatibilityKHR( const AccelerationStructureVersionKHR & version, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template + Result getAccelerationStructureCompatibilityKHR( + const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionKHR * version, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + getAccelerationStructureCompatibilityKHR( const AccelerationStructureVersionKHR & version, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template - void getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void getGroupPeerMemoryFeatures( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags + getGroupPeerMemoryFeatures( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags + getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result getGroupPresentCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getGroupPresentCapabilitiesKHR( + VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getGroupPresentCapabilitiesKHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type + getGroupPresentCapabilitiesKHR( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_USE_PLATFORM_WIN32_KHR - template - Result getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getGroupSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + template + Result + getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + getGroupSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template - Result getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result + getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type + getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize* pCommittedMemoryInBytes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize * pCommittedMemoryInBytes, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::DeviceSize getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NAMESPACE::DeviceSize + getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - uint64_t getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + uint64_t + getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - uint64_t getMemoryOpaqueCaptureAddress( const DeviceMemoryOpaqueCaptureAddressInfo & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + uint64_t + getMemoryOpaqueCaptureAddress( const DeviceMemoryOpaqueCaptureAddressInfo & info, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - uint64_t getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + uint64_t + getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - uint64_t getMemoryOpaqueCaptureAddressKHR( const DeviceMemoryOpaqueCaptureAddressInfo & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + uint64_t + getMemoryOpaqueCaptureAddressKHR( const DeviceMemoryOpaqueCaptureAddressInfo & info, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - PFN_vkVoidFunction getProcAddr( const char* pName, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + PFN_vkVoidFunction getProcAddr( const char * pName, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - PFN_vkVoidFunction getProcAddr( const std::string & name, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + PFN_vkVoidFunction getProcAddr( const std::string & name, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, VULKAN_HPP_NAMESPACE::Queue* pQueue, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void getQueue( uint32_t queueFamilyIndex, + uint32_t queueIndex, + VULKAN_HPP_NAMESPACE::Queue * pQueue, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::Queue getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NAMESPACE::Queue + getQueue( uint32_t queueFamilyIndex, + uint32_t queueIndex, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2* pQueueInfo, VULKAN_HPP_NAMESPACE::Queue* pQueue, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 * pQueueInfo, + VULKAN_HPP_NAMESPACE::Queue * pQueue, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::Queue getQueue2( const DeviceQueueInfo2 & queueInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NAMESPACE::Queue + getQueue2( const DeviceQueueInfo2 & queueInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getEventStatus( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #else - template - Result getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + Result getEventStatus( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR * pGetFdInfo, + int * pFd, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #else - template - Result getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + Result getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_USE_PLATFORM_WIN32_KHR - template - Result getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + template + Result getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template - void getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void getGeneratedCommandsMemoryRequirementsNV( + const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::MemoryRequirements2 getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NAMESPACE::MemoryRequirements2 getGeneratedCommandsMemoryRequirementsNV( + const GeneratedCommandsMemoryRequirementsInfoNV & info, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain getGeneratedCommandsMemoryRequirementsNV( + const GeneratedCommandsMemoryRequirementsInfoNV & info, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getImageDrmFormatModifierPropertiesEXT( + VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT * pProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type + getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::MemoryRequirements* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::MemoryRequirements getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NAMESPACE::MemoryRequirements + getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain + getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain + getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements* pSparseMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void + getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements * pSparseMemoryRequirements, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Allocator const& vectorAllocator, Dispatch const &d ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector + getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getImageSparseMemoryRequirements( + VULKAN_HPP_NAMESPACE::Image image, Allocator const & vectorAllocator, Dispatch const & d ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void getImageSparseMemoryRequirements2( + const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Allocator const& vectorAllocator, Dispatch const &d ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector + getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getImageSparseMemoryRequirements2( + const ImageSparseMemoryRequirementsInfo2 & info, Allocator const & vectorAllocator, Dispatch const & d ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void getImageSparseMemoryRequirements2KHR( + const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Allocator const& vectorAllocator, Dispatch const &d ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector + getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getImageSparseMemoryRequirements2KHR( + const ImageSparseMemoryRequirementsInfo2 & info, Allocator const & vectorAllocator, Dispatch const & d ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource* pSubresource, VULKAN_HPP_NAMESPACE::SubresourceLayout* pLayout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource * pSubresource, + VULKAN_HPP_NAMESPACE::SubresourceLayout * pLayout, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::SubresourceLayout getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const ImageSubresource & subresource, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NAMESPACE::SubresourceLayout + getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, + const ImageSubresource & subresource, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX * pProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type + getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - uint32_t getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + uint32_t getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - uint32_t getImageViewHandleNVX( const ImageViewHandleInfoNVX & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + uint32_t getImageViewHandleNVX( const ImageViewHandleInfoNVX & info, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_USE_PLATFORM_ANDROID_KHR - template - Result getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + template + Result getMemoryAndroidHardwareBufferANDROID( + const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID * pInfo, + struct AHardwareBuffer ** pBuffer, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - template - Result getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR * pGetFdInfo, + int * pFd, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR* pMemoryFdProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + int fd, + VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR * pMemoryFdProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type + getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + int fd, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getMemoryHostPointerPropertiesEXT( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + const void * pHostPointer, + VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type + getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + const void * pHostPointer, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_USE_PLATFORM_WIN32_KHR - template - Result getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + template + Result getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ #ifdef VK_USE_PLATFORM_WIN32_KHR - template - Result getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + template + Result getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, + HANDLE * pHandle, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ #ifdef VK_USE_PLATFORM_WIN32_KHR - template - Result getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + template + Result getMemoryWin32HandlePropertiesKHR( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + HANDLE handle, + VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + HANDLE handle, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template - Result getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE* pPresentationTimings, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result + getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint32_t * pPresentationTimingCount, + VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE * pPresentationTimings, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Allocator const & vectorAllocator, + Dispatch const & d ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, VULKAN_HPP_NAMESPACE::PerformanceValueINTEL* pValue, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, + VULKAN_HPP_NAMESPACE::PerformanceValueINTEL * pValue, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type + getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, size_t* pDataSize, void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + size_t * pDataSize, + void * pData, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Allocator const& vectorAllocator, Dispatch const &d ) const; + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getPipelineCacheData( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Allocator const & vectorAllocator, Dispatch const & d ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR* pInternalRepresentations, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getPipelineExecutableInternalRepresentationsKHR( + const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pInternalRepresentationCount, + VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR * pInternalRepresentations, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, + Allocator const & vectorAllocator, + Dispatch const & d ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR * pPipelineInfo, + uint32_t * pExecutableCount, + VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR * pProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const + VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, + Allocator const & vectorAllocator, + Dispatch const & d ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR* pStatistics, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pStatisticCount, + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR * pStatistics, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const + VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, + Allocator const & vectorAllocator, + Dispatch const & d ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + void * pData, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, ArrayProxy data, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + Result getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + ArrayProxy data, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - template - Result getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy data, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template + Result getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const + VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + ArrayProxy data, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template - Result getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const + VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy data, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type + getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + ArrayProxy data, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result + getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy data, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type + getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + ArrayProxy data, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE* pDisplayTimingProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result + getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE * pDisplayTimingProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type + getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, VULKAN_HPP_NAMESPACE::Extent2D* pGranularity, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::Extent2D getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NAMESPACE::Extent2D + getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t* pValue, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + uint64_t * pValue, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type + getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t* pValue, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + uint64_t * pValue, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type + getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR * pGetFdInfo, + int * pFd, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_USE_PLATFORM_WIN32_KHR - template - Result getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + template + Result getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template - Result getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, + size_t * pInfoSize, + void * pInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Allocator const& vectorAllocator, Dispatch const &d ) const; + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, + Allocator const & vectorAllocator, + Dispatch const & d ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, + uint64_t * pCounterValue, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type + getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VULKAN_HPP_NAMESPACE::Image* pSwapchainImages, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint32_t * pSwapchainImageCount, + VULKAN_HPP_NAMESPACE::Image * pSwapchainImages, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const; + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getSwapchainImagesKHR( + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const & vectorAllocator, Dispatch const & d ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #else - template - Result getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + Result getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, size_t* pDataSize, void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + size_t * pDataSize, + void * pData, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Allocator const& vectorAllocator, Dispatch const &d ) const; + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + Allocator const & vectorAllocator, + Dispatch const & d ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR* pImportFenceFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR * pImportFenceFdInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_USE_PLATFORM_WIN32_KHR - template - Result importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + template + Result importFenceWin32HandleKHR( + const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template - Result importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type + importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_USE_PLATFORM_WIN32_KHR - template - Result importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + template + Result importSemaphoreWin32HandleKHR( + const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template - Result initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL* pInitializeInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result + initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type + initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL & initializeInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange* pMemoryRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result invalidateMappedMemoryRanges( uint32_t memoryRangeCount, + const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type invalidateMappedMemoryRanges( ArrayProxy memoryRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type + invalidateMappedMemoryRanges( ArrayProxy memoryRanges, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, void** ppData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, + void ** ppData, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags = MemoryMapFlags(), Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::MemoryMapFlags flags = MemoryMapFlags(), + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::PipelineCache* pSrcCaches, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, + uint32_t srcCacheCount, + const VULKAN_HPP_NAMESPACE::PipelineCache * pSrcCaches, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, ArrayProxy srcCaches, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type + mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, + ArrayProxy srcCaches, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::ValidationCacheEXT* pSrcCaches, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, + uint32_t srcCacheCount, + const VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pSrcCaches, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, ArrayProxy srcCaches, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type + mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, + ArrayProxy srcCaches, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT* pDeviceEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT * pDeviceEventInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Fence * pFence, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type registerEventEXTUnique( const DeviceEventInfoEXT & deviceEventInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + typename ResultValueType::type + registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + registerEventEXTUnique( const DeviceEventInfoEXT & deviceEventInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT* pDisplayEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Fence * pFence, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + typename ResultValueType::type + registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const DisplayEventInfoEXT & displayEventInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const DisplayEventInfoEXT & displayEventInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_USE_PLATFORM_WIN32_KHR -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - typename ResultValueType::type releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result + releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# else + template + typename ResultValueType::type + releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const + VULKAN_HPP_NOEXCEPT; #else - template - typename ResultValueType::type releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type + releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void releaseProfilingLockKHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void releaseProfilingLockKHR( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #else - template - typename ResultValueType::type resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags = DescriptorPoolResetFlags(), Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags = DescriptorPoolResetFlags(), + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #else - template - typename ResultValueType::type resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags = DescriptorPoolResetFlags(), Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type + resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags = DescriptorPoolResetFlags(), + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result resetEvent( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #else - template - typename ResultValueType::type resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type resetEvent( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result resetFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence* pFences, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result resetFences( uint32_t fenceCount, + const VULKAN_HPP_NAMESPACE::Fence * pFences, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type resetFences( ArrayProxy fences, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type resetFences( ArrayProxy fences, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - Result setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pNameInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pNameInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type + setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT* pTagInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT * pTagInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type + setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result setEvent( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #else - template - typename ResultValueType::type setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type setEvent( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void setHdrMetadataEXT( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains, const VULKAN_HPP_NAMESPACE::HdrMetadataEXT* pMetadata, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void setHdrMetadataEXT( uint32_t swapchainCount, + const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, + const VULKAN_HPP_NAMESPACE::HdrMetadataEXT * pMetadata, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setHdrMetadataEXT( ArrayProxy swapchains, ArrayProxy metadata, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + void setHdrMetadataEXT( ArrayProxy swapchains, + ArrayProxy metadata, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, + VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - Result signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo* pSignalInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type signalSemaphore( const SemaphoreSignalInfo & signalInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type signalSemaphore( const SemaphoreSignalInfo & signalInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo* pSignalInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type signalSemaphoreKHR( const SemaphoreSignalInfo & signalInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type signalSemaphoreKHR( const SemaphoreSignalInfo & signalInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags = CommandPoolTrimFlags(), Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags = CommandPoolTrimFlags(), + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags = CommandPoolTrimFlags(), Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags = CommandPoolTrimFlags(), + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void uninitializePerformanceApiINTEL(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void + uninitializePerformanceApiINTEL( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void + updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - void updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const + VULKAN_HPP_NOEXCEPT; - template - void updateDescriptorSets( uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VULKAN_HPP_NAMESPACE::CopyDescriptorSet* pDescriptorCopies, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void updateDescriptorSets( uint32_t descriptorWriteCount, + const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, + uint32_t descriptorCopyCount, + const VULKAN_HPP_NAMESPACE::CopyDescriptorSet * pDescriptorCopies, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void updateDescriptorSets( ArrayProxy descriptorWrites, ArrayProxy descriptorCopies, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void updateDescriptorSets( ArrayProxy descriptorWrites, + ArrayProxy descriptorCopies, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result waitForFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence* pFences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result waitForFences( uint32_t fenceCount, + const VULKAN_HPP_NAMESPACE::Fence * pFences, + VULKAN_HPP_NAMESPACE::Bool32 waitAll, + uint64_t timeout, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result waitForFences( ArrayProxy fences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + Result waitForFences( ArrayProxy fences, + VULKAN_HPP_NAMESPACE::Bool32 waitAll, + uint64_t timeout, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo* pWaitInfo, uint64_t timeout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, + uint64_t timeout, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result waitSemaphores( const SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + Result waitSemaphores( const SemaphoreWaitInfo & waitInfo, + uint64_t timeout, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo* pWaitInfo, uint64_t timeout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, + uint64_t timeout, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result waitSemaphoresKHR( const SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + Result waitSemaphoresKHR( const SemaphoreWaitInfo & waitInfo, + uint64_t timeout, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - template - Result writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t dataSize, void* pData, size_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type writeAccelerationStructuresPropertiesKHR( ArrayProxy accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, ArrayProxy data, size_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template + Result writeAccelerationStructuresPropertiesKHR( + uint32_t accelerationStructureCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + void * pData, + size_t stride, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type writeAccelerationStructuresPropertiesKHR( + ArrayProxy accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + ArrayProxy data, + size_t stride, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDevice() const VULKAN_HPP_NOEXCEPT { @@ -19405,20 +23990,16 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDisplayModeKHR; public: - VULKAN_HPP_CONSTEXPR DisplayModeKHR() VULKAN_HPP_NOEXCEPT - : m_displayModeKHR(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR DisplayModeKHR() VULKAN_HPP_NOEXCEPT : m_displayModeKHR( VK_NULL_HANDLE ) {} - VULKAN_HPP_CONSTEXPR DisplayModeKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_displayModeKHR(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR DisplayModeKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT : m_displayModeKHR( VK_NULL_HANDLE ) {} VULKAN_HPP_TYPESAFE_EXPLICIT DisplayModeKHR( VkDisplayModeKHR displayModeKHR ) VULKAN_HPP_NOEXCEPT : m_displayModeKHR( displayModeKHR ) {} -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - DisplayModeKHR & operator=(VkDisplayModeKHR displayModeKHR) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DisplayModeKHR & operator=( VkDisplayModeKHR displayModeKHR ) VULKAN_HPP_NOEXCEPT { m_displayModeKHR = displayModeKHR; return *this; @@ -19431,20 +24012,20 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DisplayModeKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayModeKHR const & ) const = default; #else - bool operator==( DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_displayModeKHR == rhs.m_displayModeKHR; } - bool operator!=(DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_displayModeKHR != rhs.m_displayModeKHR; } - bool operator<(DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator<( DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_displayModeKHR < rhs.m_displayModeKHR; } @@ -19477,7 +24058,12 @@ namespace VULKAN_HPP_NAMESPACE }; #ifndef VULKAN_HPP_NO_SMART_HANDLE - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; using UniqueDevice = UniqueHandle; #endif /*VULKAN_HPP_NO_SMART_HANDLE*/ @@ -19489,20 +24075,16 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::ePhysicalDevice; public: - VULKAN_HPP_CONSTEXPR PhysicalDevice() VULKAN_HPP_NOEXCEPT - : m_physicalDevice(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR PhysicalDevice() VULKAN_HPP_NOEXCEPT : m_physicalDevice( VK_NULL_HANDLE ) {} - VULKAN_HPP_CONSTEXPR PhysicalDevice( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_physicalDevice(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR PhysicalDevice( std::nullptr_t ) VULKAN_HPP_NOEXCEPT : m_physicalDevice( VK_NULL_HANDLE ) {} VULKAN_HPP_TYPESAFE_EXPLICIT PhysicalDevice( VkPhysicalDevice physicalDevice ) VULKAN_HPP_NOEXCEPT : m_physicalDevice( physicalDevice ) {} -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - PhysicalDevice & operator=(VkPhysicalDevice physicalDevice) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + PhysicalDevice & operator=( VkPhysicalDevice physicalDevice ) VULKAN_HPP_NOEXCEPT { m_physicalDevice = physicalDevice; return *this; @@ -19515,566 +24097,981 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDevice const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevice const & ) const = default; #else bool operator==( PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_physicalDevice == rhs.m_physicalDevice; } - bool operator!=(PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_physicalDevice != rhs.m_physicalDevice; } - bool operator<(PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator<( PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_physicalDevice < rhs.m_physicalDevice; } #endif #ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT - template - Result acquireXlibDisplayEXT( Display* dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type acquireXlibDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + template + Result acquireXlibDisplayEXT( Display * dpy, + VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + acquireXlibDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ - template - Result createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Device* pDevice, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Device * pDevice, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createDevice( const DeviceCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createDeviceUnique( const DeviceCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + template + typename ResultValueType::type + createDevice( const DeviceCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createDeviceUnique( const DeviceCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DisplayModeKHR * pMode, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const DisplayModeCreateInfoKHR & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DisplayModeKHR* pMode, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result enumerateDeviceExtensionProperties( const char * pLayerName, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const + VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + enumerateDeviceExtensionProperties( Optional layerName = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type enumerateDeviceExtensionProperties( + Optional layerName, Allocator const & vectorAllocator, Dispatch const & d ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result enumerateDeviceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result + enumerateDeviceLayerProperties( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumerateDeviceExtensionProperties( Optional layerName = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumerateDeviceExtensionProperties( Optional layerName, Allocator const& vectorAllocator, Dispatch const &d ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + enumerateDeviceLayerProperties( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + enumerateDeviceLayerProperties( Allocator const & vectorAllocator, Dispatch const & d ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result enumerateDeviceLayerProperties( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result enumerateQueueFamilyPerformanceQueryCountersKHR( + uint32_t queueFamilyIndex, + uint32_t * pCounterCount, + VULKAN_HPP_NAMESPACE::PerformanceCounterKHR * pCounters, + VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR * pCounterDescriptions, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumerateDeviceLayerProperties(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumerateDeviceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, + ArrayProxy counters, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, + ArrayProxy counters, + Allocator const & vectorAllocator, + Dispatch const & d ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, uint32_t* pCounterCount, VULKAN_HPP_NAMESPACE::PerformanceCounterKHR* pCounters, VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR* pCounterDescriptions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR * pProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, ArrayProxy counters, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, ArrayProxy counters, Allocator const& vectorAllocator, Dispatch const &d ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getDisplayModeProperties2KHR( + VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const & vectorAllocator, Dispatch const & d ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getDisplayModePropertiesKHR( + VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const & vectorAllocator, Dispatch const & d ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result + getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR * pDisplayPlaneInfo, + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR * pCapabilities, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const; + template + typename ResultValueType::type + getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR* pDisplayPlaneInfo, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR* pCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result + getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, + uint32_t planeIndex, + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR * pCapabilities, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type + getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, + uint32_t planeIndex, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR* pCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, + uint32_t * pDisplayCount, + VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const + VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getDisplayPlaneSupportedDisplaysKHR( + uint32_t planeIndex, Allocator const & vectorAllocator, Dispatch const & d ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, uint32_t* pDisplayCount, VULKAN_HPP_NAMESPACE::DisplayKHR* pDisplays, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result + getCalibrateableTimeDomainsEXT( uint32_t * pTimeDomainCount, + VULKAN_HPP_NAMESPACE::TimeDomainEXT * pTimeDomains, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Allocator const& vectorAllocator, Dispatch const &d ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + getCalibrateableTimeDomainsEXT( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + getCalibrateableTimeDomainsEXT( Allocator const & vectorAllocator, Dispatch const & d ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result getCalibrateableTimeDomainsEXT( uint32_t* pTimeDomainCount, VULKAN_HPP_NAMESPACE::TimeDomainEXT* pTimeDomains, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result + getCooperativeMatrixPropertiesNV( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV * pProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getCalibrateableTimeDomainsEXT(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getCalibrateableTimeDomainsEXT(Allocator const& vectorAllocator, Dispatch const &d ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + getCooperativeMatrixPropertiesNV( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + getCooperativeMatrixPropertiesNV( Allocator const & vectorAllocator, Dispatch const & d ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result getCooperativeMatrixPropertiesNV( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result + getDisplayPlaneProperties2KHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getCooperativeMatrixPropertiesNV(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getCooperativeMatrixPropertiesNV(Allocator const& vectorAllocator, Dispatch const &d ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + getDisplayPlaneProperties2KHR( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + getDisplayPlaneProperties2KHR( Allocator const & vectorAllocator, Dispatch const & d ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result getDisplayPlaneProperties2KHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getDisplayPlanePropertiesKHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getDisplayPlaneProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getDisplayPlaneProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + getDisplayPlanePropertiesKHR( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + getDisplayPlanePropertiesKHR( Allocator const & vectorAllocator, Dispatch const & d ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result getDisplayPlanePropertiesKHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getDisplayProperties2KHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayProperties2KHR * pProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getDisplayPlanePropertiesKHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getDisplayPlanePropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + getDisplayProperties2KHR( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + getDisplayProperties2KHR( Allocator const & vectorAllocator, Dispatch const & d ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result getDisplayProperties2KHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayProperties2KHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getDisplayPropertiesKHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR * pProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getDisplayProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getDisplayProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + getDisplayPropertiesKHR( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + getDisplayPropertiesKHR( Allocator const & vectorAllocator, Dispatch const & d ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result getDisplayPropertiesKHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void + getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getDisplayPropertiesKHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getDisplayPropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const; + template + VULKAN_HPP_NAMESPACE::ExternalBufferProperties + getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void getExternalBufferPropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NAMESPACE::ExternalBufferProperties + getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NAMESPACE::ExternalFenceProperties + getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void + getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NAMESPACE::ExternalFenceProperties + getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getExternalImageFormatPropertiesNV( + VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, + VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV * pExternalImageFormatProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + typename ResultValueType::type + getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV* pExternalImageFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void getExternalSemaphoreProperties( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties + getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void getExternalSemaphorePropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties + getExternalSemaphorePropertiesKHR( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pFeatures, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphorePropertiesKHR( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures + getFeatures( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pFeatures, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures getFeatures(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 + getFeatures2( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain + getFeatures2( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2* pFeatures, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getFeatures2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 + getFeatures2KHR( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain + getFeatures2KHR( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2* pFeatures, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::FormatProperties * pFormatProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getFeatures2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NAMESPACE::FormatProperties + getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties* pFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::FormatProperties getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NAMESPACE::FormatProperties2 + getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain + getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2* pFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NAMESPACE::FormatProperties2 + getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain + getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2* pFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, + VULKAN_HPP_NAMESPACE::ImageFormatProperties * pImageFormatProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + typename ResultValueType::type + getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ImageFormatProperties* pImageFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type + getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType>::type + getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2* pImageFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template - typename ResultValueType>::type getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type + getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType>::type + getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2* pImageFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties * pMemoryProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template - typename ResultValueType>::type getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties + getMemoryProperties( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties* pMemoryProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties getMemoryProperties(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 + getMemoryProperties2( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain + getMemoryProperties2( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getMemoryProperties2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 + getMemoryProperties2KHR( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain + getMemoryProperties2KHR( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT * pMultisampleProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getMemoryProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT + getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT* pMultisampleProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + uint32_t * pRectCount, + VULKAN_HPP_NAMESPACE::Rect2D * pRects, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getPresentRectanglesKHR( + VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const & vectorAllocator, Dispatch const & d ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pRectCount, VULKAN_HPP_NAMESPACE::Rect2D* pRects, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties * pProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const; + template + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties + getProperties( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties getProperties(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 + getProperties2( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain + getProperties2( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getProperties2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 + getProperties2KHR( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain + getProperties2KHR( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void getQueueFamilyPerformanceQueryPassesKHR( + const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo, + uint32_t * pNumPasses, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + uint32_t getQueueFamilyPerformanceQueryPassesKHR( + const QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, uint32_t* pNumPasses, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void getQueueFamilyProperties( uint32_t * pQueueFamilyPropertyCount, + VULKAN_HPP_NAMESPACE::QueueFamilyProperties * pQueueFamilyProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - uint32_t getQueueFamilyPerformanceQueryPassesKHR( const QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector + getQueueFamilyProperties( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getQueueFamilyProperties( Allocator const & vectorAllocator, + Dispatch const & d ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void getQueueFamilyProperties( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties* pQueueFamilyProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void getQueueFamilyProperties2( uint32_t * pQueueFamilyPropertyCount, + VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getQueueFamilyProperties(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getQueueFamilyProperties(Allocator const& vectorAllocator, Dispatch const &d ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector + getQueueFamilyProperties2( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getQueueFamilyProperties2( Allocator const & vectorAllocator, + Dispatch const & d ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector + getQueueFamilyProperties2( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getQueueFamilyProperties2( Allocator const & vectorAllocator, + Dispatch const & d ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void getQueueFamilyProperties2( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void getQueueFamilyProperties2KHR( uint32_t * pQueueFamilyPropertyCount, + VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getQueueFamilyProperties2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getQueueFamilyProperties2(Allocator const& vectorAllocator, Dispatch const &d ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getQueueFamilyProperties2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getQueueFamilyProperties2(Allocator const& vectorAllocator, Dispatch const &d ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector + getQueueFamilyProperties2KHR( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getQueueFamilyProperties2KHR( Allocator const & vectorAllocator, + Dispatch const & d ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector + getQueueFamilyProperties2KHR( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getQueueFamilyProperties2KHR( Allocator const & vectorAllocator, + Dispatch const & d ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void getQueueFamilyProperties2KHR( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties * pProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getQueueFamilyProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getQueueFamilyProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getQueueFamilyProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getQueueFamilyProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector + getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector + getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + Allocator const & vectorAllocator, + Dispatch const & d ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void + getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, Allocator const& vectorAllocator, Dispatch const &d ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector + getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector + getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + Allocator const & vectorAllocator, + Dispatch const & d ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void getSparseImageFormatProperties2KHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector + getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector + getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + Allocator const & vectorAllocator, + Dispatch const & d ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getSupportedFramebufferMixedSamplesCombinationsNV( + uint32_t * pCombinationCount, + VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV * pCombinations, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + getSupportedFramebufferMixedSamplesCombinationsNV( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + getSupportedFramebufferMixedSamplesCombinationsNV( Allocator const & vectorAllocator, Dispatch const & d ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result getSupportedFramebufferMixedSamplesCombinationsNV( uint32_t* pCombinationCount, VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV* pCombinations, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT * pSurfaceCapabilities, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getSupportedFramebufferMixedSamplesCombinationsNV(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getSupportedFramebufferMixedSamplesCombinationsNV(Allocator const& vectorAllocator, Dispatch const &d ) const; + template + typename ResultValueType::type + getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT* pSurfaceCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR * pSurfaceCapabilities, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type + getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType>::type + getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR* pSurfaceCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR * pSurfaceCapabilities, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template - typename ResultValueType>::type getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type + getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR* pSurfaceCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pSurfaceFormatCount, + VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR * pSurfaceFormats, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getSurfaceFormats2KHR( + const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const & vectorAllocator, Dispatch const & d ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR* pSurfaceFormats, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + uint32_t * pSurfaceFormatCount, + VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormatKHR* pSurfaceFormats, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getSurfaceFormatsKHR( + VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const & vectorAllocator, Dispatch const & d ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_USE_PLATFORM_WIN32_KHR - template - Result getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR* pPresentModes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + template + Result getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pPresentModeCount, + VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getSurfacePresentModes2EXT( + const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const & vectorAllocator, Dispatch const & d ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template - Result getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR* pPresentModes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + uint32_t * pPresentModeCount, + VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getSurfacePresentModesKHR( + VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const & vectorAllocator, Dispatch const & d ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::Bool32* pSupported, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getSurfaceSupportKHR( uint32_t queueFamilyIndex, + VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::Bool32 * pSupported, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type + getSurfaceSupportKHR( uint32_t queueFamilyIndex, + VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result getToolPropertiesEXT( uint32_t* pToolCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT* pToolProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result getToolPropertiesEXT( uint32_t * pToolCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT * pToolProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getToolPropertiesEXT(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getToolPropertiesEXT(Allocator const& vectorAllocator, Dispatch const &d ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + getToolPropertiesEXT( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + getToolPropertiesEXT( Allocator const & vectorAllocator, Dispatch const & d ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_USE_PLATFORM_WAYLAND_KHR - template - Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display* display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + template + Bool32 + getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, + struct wl_display * display, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Bool32 + getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, + struct wl_display & display, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ #ifdef VK_USE_PLATFORM_WIN32_KHR - template - Bool32 getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Bool32 + getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #ifdef VK_USE_PLATFORM_XCB_KHR - template - Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XCB_KHR*/ + template + Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, + xcb_connection_t * connection, + xcb_visualid_t visual_id, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, + xcb_connection_t & connection, + xcb_visualid_t visual_id, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XCB_KHR*/ #ifdef VK_USE_PLATFORM_XLIB_KHR - template - Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display* dpy, VisualID visualID, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + template + Bool32 + getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, + Display * dpy, + VisualID visualID, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Bool32 + getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, + Display & dpy, + VisualID visualID, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ #ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT - template - Result getRandROutputDisplayEXT( Display* dpy, RROutput rrOutput, VULKAN_HPP_NAMESPACE::DisplayKHR* pDisplay, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + template + Result getRandROutputDisplayEXT( Display * dpy, + RROutput rrOutput, + VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type getRandROutputDisplayEXT( + Display & dpy, RROutput rrOutput, Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #else - template - typename ResultValueType::type releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType::type releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPhysicalDevice() const VULKAN_HPP_NOEXCEPT @@ -20105,11 +25102,26 @@ namespace VULKAN_HPP_NAMESPACE #ifndef VULKAN_HPP_NO_SMART_HANDLE class Instance; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; using UniqueDebugReportCallbackEXT = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; using UniqueDebugUtilsMessengerEXT = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; using UniqueSurfaceKHR = UniqueHandle; #endif /*VULKAN_HPP_NO_SMART_HANDLE*/ @@ -20121,20 +25133,14 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eInstance; public: - VULKAN_HPP_CONSTEXPR Instance() VULKAN_HPP_NOEXCEPT - : m_instance(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR Instance() VULKAN_HPP_NOEXCEPT : m_instance( VK_NULL_HANDLE ) {} - VULKAN_HPP_CONSTEXPR Instance( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_instance(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR Instance( std::nullptr_t ) VULKAN_HPP_NOEXCEPT : m_instance( VK_NULL_HANDLE ) {} - VULKAN_HPP_TYPESAFE_EXPLICIT Instance( VkInstance instance ) VULKAN_HPP_NOEXCEPT - : m_instance( instance ) - {} + VULKAN_HPP_TYPESAFE_EXPLICIT Instance( VkInstance instance ) VULKAN_HPP_NOEXCEPT : m_instance( instance ) {} -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - Instance & operator=(VkInstance instance) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Instance & operator=( VkInstance instance ) VULKAN_HPP_NOEXCEPT { m_instance = instance; return *this; @@ -20147,307 +25153,512 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( Instance const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Instance const & ) const = default; #else - bool operator==( Instance const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( Instance const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_instance == rhs.m_instance; } - bool operator!=(Instance const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( Instance const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_instance != rhs.m_instance; } - bool operator<(Instance const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator<( Instance const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_instance < rhs.m_instance; } #endif #ifdef VK_USE_PLATFORM_ANDROID_KHR - template - Result createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + template + Result createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - template - Result createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT* pCallback, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT * pCallback, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + typename ResultValueType::type + createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT* pMessenger, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT * pMessenger, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createDebugUtilsMessengerEXTUnique( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + typename ResultValueType::type + createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createDebugUtilsMessengerEXTUnique( const DebugUtilsMessengerCreateInfoEXT & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + typename ResultValueType::type + createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createHeadlessSurfaceEXT( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createHeadlessSurfaceEXTUnique( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + typename ResultValueType::type + createHeadlessSurfaceEXT( const HeadlessSurfaceCreateInfoEXT & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createHeadlessSurfaceEXTUnique( const HeadlessSurfaceCreateInfoEXT & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_USE_PLATFORM_IOS_MVK - template - Result createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_IOS_MVK*/ + template + Result createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_IOS_MVK*/ #ifdef VK_USE_PLATFORM_FUCHSIA - template - Result createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createImagePipeSurfaceFUCHSIAUnique( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_FUCHSIA*/ + template + Result + createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createImagePipeSurfaceFUCHSIAUnique( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ #ifdef VK_USE_PLATFORM_MACOS_MVK - template - Result createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + template + Result createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ #ifdef VK_USE_PLATFORM_METAL_EXT - template - Result createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createMetalSurfaceEXT( const MetalSurfaceCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createMetalSurfaceEXTUnique( const MetalSurfaceCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_METAL_EXT*/ + template + Result createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createMetalSurfaceEXT( const MetalSurfaceCreateInfoEXT & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createMetalSurfaceEXTUnique( const MetalSurfaceCreateInfoEXT & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_METAL_EXT*/ #ifdef VK_USE_PLATFORM_GGP - template - Result createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createStreamDescriptorSurfaceGGP( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createStreamDescriptorSurfaceGGPUnique( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_GGP*/ + template + Result + createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createStreamDescriptorSurfaceGGP( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createStreamDescriptorSurfaceGGPUnique( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_GGP*/ #ifdef VK_USE_PLATFORM_VI_NN - template - Result createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_VI_NN*/ + template + Result createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_VI_NN*/ #ifdef VK_USE_PLATFORM_WAYLAND_KHR - template - Result createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + template + Result createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ #ifdef VK_USE_PLATFORM_WIN32_KHR - template - Result createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + template + Result createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ #ifdef VK_USE_PLATFORM_XCB_KHR - template - Result createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XCB_KHR*/ + template + Result createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XCB_KHR*/ #ifdef VK_USE_PLATFORM_XLIB_KHR - template - Result createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + template + Result createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ - template - void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const char * pLayerPrefix, + const char * pMessage, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const std::string & layerPrefix, + const std::string & message, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result enumeratePhysicalDeviceGroups( uint32_t* pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result enumeratePhysicalDeviceGroups( + uint32_t * pPhysicalDeviceGroupCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumeratePhysicalDeviceGroups(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumeratePhysicalDeviceGroups(Allocator const& vectorAllocator, Dispatch const &d ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + enumeratePhysicalDeviceGroups( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + enumeratePhysicalDeviceGroups( Allocator const & vectorAllocator, Dispatch const & d ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result enumeratePhysicalDeviceGroupsKHR( uint32_t* pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result enumeratePhysicalDeviceGroupsKHR( + uint32_t * pPhysicalDeviceGroupCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumeratePhysicalDeviceGroupsKHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumeratePhysicalDeviceGroupsKHR(Allocator const& vectorAllocator, Dispatch const &d ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + enumeratePhysicalDeviceGroupsKHR( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + enumeratePhysicalDeviceGroupsKHR( Allocator const & vectorAllocator, Dispatch const & d ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result enumeratePhysicalDevices( uint32_t* pPhysicalDeviceCount, VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + Result enumeratePhysicalDevices( uint32_t * pPhysicalDeviceCount, + VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumeratePhysicalDevices(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumeratePhysicalDevices(Allocator const& vectorAllocator, Dispatch const &d ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + enumeratePhysicalDevices( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + enumeratePhysicalDevices( Allocator const & vectorAllocator, Dispatch const & d ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - PFN_vkVoidFunction getProcAddr( const char* pName, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + PFN_vkVoidFunction getProcAddr( const char * pName, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - PFN_vkVoidFunction getProcAddr( const std::string & name, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + PFN_vkVoidFunction getProcAddr( const std::string & name, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT* pCallbackData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, + const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT * pCallbackData, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const DebugUtilsMessengerCallbackDataEXT & callbackData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, + const DebugUtilsMessengerCallbackDataEXT & callbackData, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkInstance() const VULKAN_HPP_NOEXCEPT @@ -20477,44 +25688,74 @@ namespace VULKAN_HPP_NAMESPACE }; #ifndef VULKAN_HPP_NO_SMART_HANDLE - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; using UniqueInstance = UniqueHandle; #endif /*VULKAN_HPP_NO_SMART_HANDLE*/ - template - Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Instance* pInstance, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT; + template + Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Instance * pInstance, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createInstance( const InstanceCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ); -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createInstanceUnique( const InstanceCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ); -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + template + typename ResultValueType::type + createInstance( const InstanceCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ); +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type + createInstanceUnique( const InstanceCreateInfo & createInfo, + Optional allocator = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ); +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result enumerateInstanceExtensionProperties( const char * pLayerName, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + enumerateInstanceExtensionProperties( Optional layerName = nullptr, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ); + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type enumerateInstanceExtensionProperties( + Optional layerName, Allocator const & vectorAllocator, Dispatch const & d ); #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result enumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT; + template + Result enumerateInstanceLayerProperties( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumerateInstanceExtensionProperties( Optional layerName = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ); - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumerateInstanceExtensionProperties( Optional layerName, Allocator const& vectorAllocator, Dispatch const &d ); + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + enumerateInstanceLayerProperties( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ); + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type + enumerateInstanceLayerProperties( Allocator const & vectorAllocator, Dispatch const & d ); #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result enumerateInstanceLayerProperties( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT; + template + Result enumerateInstanceVersion( uint32_t * pApiVersion, + Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumerateInstanceLayerProperties(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ); - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumerateInstanceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d ); -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result enumerateInstanceVersion( uint32_t* pApiVersion, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type enumerateInstanceVersion(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ); + template + typename ResultValueType::type + enumerateInstanceVersion( Dispatch const & d = VULKAN_HPP_DEFAULT_DISPATCHER ); #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ struct AabbPositionsKHR @@ -20538,9 +25779,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - AabbPositionsKHR& operator=( VkAabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT + AabbPositionsKHR & operator=( VkAabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -20580,30 +25821,26 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkAabbPositionsKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkAabbPositionsKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAabbPositionsKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AabbPositionsKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AabbPositionsKHR const & ) const = default; #else - bool operator==( AabbPositionsKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( AabbPositionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( minX == rhs.minX ) - && ( minY == rhs.minY ) - && ( minZ == rhs.minZ ) - && ( maxX == rhs.maxX ) - && ( maxY == rhs.maxY ) - && ( maxZ == rhs.maxZ ); + return ( minX == rhs.minX ) && ( minY == rhs.minY ) && ( minZ == rhs.minZ ) && ( maxX == rhs.maxX ) && + ( maxY == rhs.maxY ) && ( maxZ == rhs.maxZ ); } - bool operator!=( AabbPositionsKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AabbPositionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -20617,72 +25854,74 @@ namespace VULKAN_HPP_NAMESPACE float maxY = {}; float maxZ = {}; }; - static_assert( sizeof( AabbPositionsKHR ) == sizeof( VkAabbPositionsKHR ), "struct and wrapper have different size!" ); + static_assert( sizeof( AabbPositionsKHR ) == sizeof( VkAabbPositionsKHR ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); #ifdef VK_ENABLE_BETA_EXTENSIONS union DeviceOrHostAddressConstKHR { - DeviceOrHostAddressConstKHR( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const& rhs ) VULKAN_HPP_NOEXCEPT + DeviceOrHostAddressConstKHR( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR ) ); + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR ) ); } DeviceOrHostAddressConstKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) : deviceAddress( deviceAddress_ ) {} - DeviceOrHostAddressConstKHR( const void* hostAddress_ ) - : hostAddress( hostAddress_ ) - {} + DeviceOrHostAddressConstKHR( const void * hostAddress_ ) : hostAddress( hostAddress_ ) {} - DeviceOrHostAddressConstKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT + DeviceOrHostAddressConstKHR & + setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT { deviceAddress = deviceAddress_; return *this; } - DeviceOrHostAddressConstKHR & setHostAddress( const void* hostAddress_ ) VULKAN_HPP_NOEXCEPT + DeviceOrHostAddressConstKHR & setHostAddress( const void * hostAddress_ ) VULKAN_HPP_NOEXCEPT { hostAddress = hostAddress_; return *this; } - VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR & operator=( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR & + operator=( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR ) ); + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR ) ); return *this; } - operator VkDeviceOrHostAddressConstKHR const&() const + operator VkDeviceOrHostAddressConstKHR const &() const { - return *reinterpret_cast(this); + return *reinterpret_cast( this ); } operator VkDeviceOrHostAddressConstKHR &() { - return *reinterpret_cast(this); + return *reinterpret_cast( this ); } -#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS +# ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress; - const void* hostAddress; -#else - VkDeviceAddress deviceAddress; - const void* hostAddress; -#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + const void * hostAddress; +# else + VkDeviceAddress deviceAddress; + const void * hostAddress; +# endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_ENABLE_BETA_EXTENSIONS struct AccelerationStructureGeometryTrianglesDataKHR { - AccelerationStructureGeometryTrianglesDataKHR( VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {}, - VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, - VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData_ = {}, - VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR transformData_ = {} ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryTrianglesDataKHR( + VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {}, + VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR transformData_ = {} ) VULKAN_HPP_NOEXCEPT : vertexFormat( vertexFormat_ ) , vertexData( vertexData_ ) , vertexStride( vertexStride_ ) @@ -20691,87 +25930,102 @@ namespace VULKAN_HPP_NAMESPACE , transformData( transformData_ ) {} - AccelerationStructureGeometryTrianglesDataKHR & operator=( AccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryTrianglesDataKHR & + operator=( AccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( AccelerationStructureGeometryTrianglesDataKHR ) - offsetof( AccelerationStructureGeometryTrianglesDataKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( AccelerationStructureGeometryTrianglesDataKHR ) - + offsetof( AccelerationStructureGeometryTrianglesDataKHR, pNext ) ); return *this; } - AccelerationStructureGeometryTrianglesDataKHR( VkAccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryTrianglesDataKHR( VkAccelerationStructureGeometryTrianglesDataKHR const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - AccelerationStructureGeometryTrianglesDataKHR& operator=( VkAccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryTrianglesDataKHR & + operator=( VkAccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - AccelerationStructureGeometryTrianglesDataKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryTrianglesDataKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - AccelerationStructureGeometryTrianglesDataKHR & setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryTrianglesDataKHR & + setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT { vertexFormat = vertexFormat_; return *this; } - AccelerationStructureGeometryTrianglesDataKHR & setVertexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & vertexData_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryTrianglesDataKHR & + setVertexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & vertexData_ ) VULKAN_HPP_NOEXCEPT { vertexData = vertexData_; return *this; } - AccelerationStructureGeometryTrianglesDataKHR & setVertexStride( VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryTrianglesDataKHR & + setVertexStride( VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT { vertexStride = vertexStride_; return *this; } - AccelerationStructureGeometryTrianglesDataKHR & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryTrianglesDataKHR & + setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT { indexType = indexType_; return *this; } - AccelerationStructureGeometryTrianglesDataKHR & setIndexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & indexData_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryTrianglesDataKHR & + setIndexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & indexData_ ) VULKAN_HPP_NOEXCEPT { indexData = indexData_; return *this; } - AccelerationStructureGeometryTrianglesDataKHR & setTransformData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & transformData_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryTrianglesDataKHR & + setTransformData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & transformData_ ) VULKAN_HPP_NOEXCEPT { transformData = transformData_; return *this; } - operator VkAccelerationStructureGeometryTrianglesDataKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkAccelerationStructureGeometryTrianglesDataKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAccelerationStructureGeometryTrianglesDataKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryTrianglesDataKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; - VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData = {}; - VULKAN_HPP_NAMESPACE::DeviceSize vertexStride = {}; - VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; - VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData = {}; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData = {}; + VULKAN_HPP_NAMESPACE::DeviceSize vertexStride = {}; + VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData = {}; VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR transformData = {}; }; - static_assert( sizeof( AccelerationStructureGeometryTrianglesDataKHR ) == sizeof( VkAccelerationStructureGeometryTrianglesDataKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( AccelerationStructureGeometryTrianglesDataKHR ) == + sizeof( VkAccelerationStructureGeometryTrianglesDataKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_ENABLE_BETA_EXTENSIONS @@ -20783,134 +26037,159 @@ namespace VULKAN_HPP_NAMESPACE , stride( stride_ ) {} - AccelerationStructureGeometryAabbsDataKHR & operator=( AccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryAabbsDataKHR & + operator=( AccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( AccelerationStructureGeometryAabbsDataKHR ) - offsetof( AccelerationStructureGeometryAabbsDataKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( AccelerationStructureGeometryAabbsDataKHR ) - + offsetof( AccelerationStructureGeometryAabbsDataKHR, pNext ) ); return *this; } - AccelerationStructureGeometryAabbsDataKHR( VkAccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryAabbsDataKHR( VkAccelerationStructureGeometryAabbsDataKHR const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - AccelerationStructureGeometryAabbsDataKHR& operator=( VkAccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryAabbsDataKHR & + operator=( VkAccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - AccelerationStructureGeometryAabbsDataKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryAabbsDataKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - AccelerationStructureGeometryAabbsDataKHR & setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryAabbsDataKHR & + setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT { data = data_; return *this; } - AccelerationStructureGeometryAabbsDataKHR & setStride( VULKAN_HPP_NAMESPACE::DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryAabbsDataKHR & + setStride( VULKAN_HPP_NAMESPACE::DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT { stride = stride_; return *this; } - operator VkAccelerationStructureGeometryAabbsDataKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkAccelerationStructureGeometryAabbsDataKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAccelerationStructureGeometryAabbsDataKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryAabbsDataKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {}; - VULKAN_HPP_NAMESPACE::DeviceSize stride = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryAabbsDataKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {}; + VULKAN_HPP_NAMESPACE::DeviceSize stride = {}; }; - static_assert( sizeof( AccelerationStructureGeometryAabbsDataKHR ) == sizeof( VkAccelerationStructureGeometryAabbsDataKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( AccelerationStructureGeometryAabbsDataKHR ) == + sizeof( VkAccelerationStructureGeometryAabbsDataKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_ENABLE_BETA_EXTENSIONS struct AccelerationStructureGeometryInstancesDataKHR { - AccelerationStructureGeometryInstancesDataKHR( VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers_ = {}, - VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {} ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryInstancesDataKHR( VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {} ) + VULKAN_HPP_NOEXCEPT : arrayOfPointers( arrayOfPointers_ ) , data( data_ ) {} - AccelerationStructureGeometryInstancesDataKHR & operator=( AccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryInstancesDataKHR & + operator=( AccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( AccelerationStructureGeometryInstancesDataKHR ) - offsetof( AccelerationStructureGeometryInstancesDataKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( AccelerationStructureGeometryInstancesDataKHR ) - + offsetof( AccelerationStructureGeometryInstancesDataKHR, pNext ) ); return *this; } - AccelerationStructureGeometryInstancesDataKHR( VkAccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryInstancesDataKHR( VkAccelerationStructureGeometryInstancesDataKHR const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - AccelerationStructureGeometryInstancesDataKHR& operator=( VkAccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryInstancesDataKHR & + operator=( VkAccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - AccelerationStructureGeometryInstancesDataKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryInstancesDataKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - AccelerationStructureGeometryInstancesDataKHR & setArrayOfPointers( VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryInstancesDataKHR & + setArrayOfPointers( VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers_ ) VULKAN_HPP_NOEXCEPT { arrayOfPointers = arrayOfPointers_; return *this; } - AccelerationStructureGeometryInstancesDataKHR & setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryInstancesDataKHR & + setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT { data = data_; return *this; } - operator VkAccelerationStructureGeometryInstancesDataKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkAccelerationStructureGeometryInstancesDataKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAccelerationStructureGeometryInstancesDataKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryInstancesDataKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers = {}; - VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {}; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {}; }; - static_assert( sizeof( AccelerationStructureGeometryInstancesDataKHR ) == sizeof( VkAccelerationStructureGeometryInstancesDataKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( AccelerationStructureGeometryInstancesDataKHR ) == + sizeof( VkAccelerationStructureGeometryInstancesDataKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_ENABLE_BETA_EXTENSIONS union AccelerationStructureGeometryDataKHR { - AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const& rhs ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const & rhs ) + VULKAN_HPP_NOEXCEPT { - memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR ) ); + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR ) ); } - AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR triangles_ = {} ) + AccelerationStructureGeometryDataKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR triangles_ = {} ) : triangles( triangles_ ) {} @@ -20918,62 +26197,68 @@ namespace VULKAN_HPP_NAMESPACE : aabbs( aabbs_ ) {} - AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR instances_ ) + AccelerationStructureGeometryDataKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR instances_ ) : instances( instances_ ) {} - AccelerationStructureGeometryDataKHR & setTriangles( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR const & triangles_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryDataKHR & setTriangles( + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR const & triangles_ ) VULKAN_HPP_NOEXCEPT { - memcpy( &triangles, &triangles_, sizeof(VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR)); + memcpy( &triangles, &triangles_, sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR ) ); return *this; } - AccelerationStructureGeometryDataKHR & setAabbs( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR const & aabbs_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryDataKHR & + setAabbs( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR const & aabbs_ ) VULKAN_HPP_NOEXCEPT { - memcpy( &aabbs, &aabbs_, sizeof(VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR)); + memcpy( &aabbs, &aabbs_, sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR ) ); return *this; } - AccelerationStructureGeometryDataKHR & setInstances( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR const & instances_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryDataKHR & setInstances( + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR const & instances_ ) VULKAN_HPP_NOEXCEPT { - memcpy( &instances, &instances_, sizeof(VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR)); + memcpy( &instances, &instances_, sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR ) ); return *this; } - VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR & operator=( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR & + operator=( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR ) ); + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR ) ); return *this; } - operator VkAccelerationStructureGeometryDataKHR const&() const + operator VkAccelerationStructureGeometryDataKHR const &() const { - return *reinterpret_cast(this); + return *reinterpret_cast( this ); } operator VkAccelerationStructureGeometryDataKHR &() { - return *reinterpret_cast(this); + return *reinterpret_cast( this ); } -#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS +# ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR triangles; - VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR aabbs; + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR aabbs; VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR instances; -#else +# else VkAccelerationStructureGeometryTrianglesDataKHR triangles; - VkAccelerationStructureGeometryAabbsDataKHR aabbs; + VkAccelerationStructureGeometryAabbsDataKHR aabbs; VkAccelerationStructureGeometryInstancesDataKHR instances; -#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ +# endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_ENABLE_BETA_EXTENSIONS struct AccelerationStructureGeometryKHR { - AccelerationStructureGeometryKHR( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles, - VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR geometry_ = {}, - VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ = {} ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryKHR( + VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles, + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR geometry_ = {}, + VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ = {} ) VULKAN_HPP_NOEXCEPT : geometryType( geometryType_ ) , geometry( geometry_ ) , flags( flags_ ) @@ -20981,7 +26266,9 @@ namespace VULKAN_HPP_NAMESPACE AccelerationStructureGeometryKHR & operator=( AccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( AccelerationStructureGeometryKHR ) - offsetof( AccelerationStructureGeometryKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( AccelerationStructureGeometryKHR ) - offsetof( AccelerationStructureGeometryKHR, pNext ) ); return *this; } @@ -20990,25 +26277,27 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - AccelerationStructureGeometryKHR& operator=( VkAccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryKHR & operator=( VkAccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - AccelerationStructureGeometryKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - AccelerationStructureGeometryKHR & setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryKHR & + setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ ) VULKAN_HPP_NOEXCEPT { geometryType = geometryType_; return *this; } - AccelerationStructureGeometryKHR & setGeometry( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const & geometry_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryKHR & + setGeometry( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const & geometry_ ) VULKAN_HPP_NOEXCEPT { geometry = geometry_; return *this; @@ -21020,42 +26309,41 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkAccelerationStructureGeometryKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkAccelerationStructureGeometryKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAccelerationStructureGeometryKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles; VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR geometry = {}; - VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags = {}; }; - static_assert( sizeof( AccelerationStructureGeometryKHR ) == sizeof( VkAccelerationStructureGeometryKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( AccelerationStructureGeometryKHR ) == sizeof( VkAccelerationStructureGeometryKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_ENABLE_BETA_EXTENSIONS union DeviceOrHostAddressKHR { - DeviceOrHostAddressKHR( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const& rhs ) VULKAN_HPP_NOEXCEPT + DeviceOrHostAddressKHR( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR ) ); + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR ) ); } - DeviceOrHostAddressKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) - : deviceAddress( deviceAddress_ ) + DeviceOrHostAddressKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) : deviceAddress( deviceAddress_ ) {} - DeviceOrHostAddressKHR( void* hostAddress_ ) - : hostAddress( hostAddress_ ) - {} + DeviceOrHostAddressKHR( void * hostAddress_ ) : hostAddress( hostAddress_ ) {} DeviceOrHostAddressKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT { @@ -21063,50 +26351,53 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DeviceOrHostAddressKHR & setHostAddress( void* hostAddress_ ) VULKAN_HPP_NOEXCEPT + DeviceOrHostAddressKHR & setHostAddress( void * hostAddress_ ) VULKAN_HPP_NOEXCEPT { hostAddress = hostAddress_; return *this; } - VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR & operator=( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR & + operator=( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR ) ); + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR ) ); return *this; } - operator VkDeviceOrHostAddressKHR const&() const + operator VkDeviceOrHostAddressKHR const &() const { - return *reinterpret_cast(this); + return *reinterpret_cast( this ); } operator VkDeviceOrHostAddressKHR &() { - return *reinterpret_cast(this); + return *reinterpret_cast( this ); } -#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS +# ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress; - void* hostAddress; -#else - VkDeviceAddress deviceAddress; - void* hostAddress; -#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + void * hostAddress; +# else + VkDeviceAddress deviceAddress; + void * hostAddress; +# endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_ENABLE_BETA_EXTENSIONS struct AccelerationStructureBuildGeometryInfoKHR { - AccelerationStructureBuildGeometryInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel, - VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 update_ = {}, - VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_ = {}, - VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 geometryArrayOfPointers_ = {}, - uint32_t geometryCount_ = {}, - const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR* const* ppGeometries_ = {}, - VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {} ) VULKAN_HPP_NOEXCEPT + AccelerationStructureBuildGeometryInfoKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ = + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel, + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 update_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 geometryArrayOfPointers_ = {}, + uint32_t geometryCount_ = {}, + const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const * ppGeometries_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {} ) VULKAN_HPP_NOEXCEPT : type( type_ ) , flags( flags_ ) , update( update_ ) @@ -21118,36 +26409,44 @@ namespace VULKAN_HPP_NAMESPACE , scratchData( scratchData_ ) {} - AccelerationStructureBuildGeometryInfoKHR & operator=( AccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + AccelerationStructureBuildGeometryInfoKHR & + operator=( AccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( AccelerationStructureBuildGeometryInfoKHR ) - offsetof( AccelerationStructureBuildGeometryInfoKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( AccelerationStructureBuildGeometryInfoKHR ) - + offsetof( AccelerationStructureBuildGeometryInfoKHR, pNext ) ); return *this; } - AccelerationStructureBuildGeometryInfoKHR( VkAccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + AccelerationStructureBuildGeometryInfoKHR( VkAccelerationStructureBuildGeometryInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - AccelerationStructureBuildGeometryInfoKHR& operator=( VkAccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + AccelerationStructureBuildGeometryInfoKHR & + operator=( VkAccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - AccelerationStructureBuildGeometryInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureBuildGeometryInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - AccelerationStructureBuildGeometryInfoKHR & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureBuildGeometryInfoKHR & + setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ ) VULKAN_HPP_NOEXCEPT { type = type_; return *this; } - AccelerationStructureBuildGeometryInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureBuildGeometryInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; @@ -21159,19 +26458,22 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - AccelerationStructureBuildGeometryInfoKHR & setSrcAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureBuildGeometryInfoKHR & setSrcAccelerationStructure( + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT { srcAccelerationStructure = srcAccelerationStructure_; return *this; } - AccelerationStructureBuildGeometryInfoKHR & setDstAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureBuildGeometryInfoKHR & setDstAccelerationStructure( + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT { dstAccelerationStructure = dstAccelerationStructure_; return *this; } - AccelerationStructureBuildGeometryInfoKHR & setGeometryArrayOfPointers( VULKAN_HPP_NAMESPACE::Bool32 geometryArrayOfPointers_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureBuildGeometryInfoKHR & + setGeometryArrayOfPointers( VULKAN_HPP_NAMESPACE::Bool32 geometryArrayOfPointers_ ) VULKAN_HPP_NOEXCEPT { geometryArrayOfPointers = geometryArrayOfPointers_; return *this; @@ -21183,51 +26485,57 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - AccelerationStructureBuildGeometryInfoKHR & setPpGeometries( const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR* const* ppGeometries_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureBuildGeometryInfoKHR & setPpGeometries( + const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const * ppGeometries_ ) VULKAN_HPP_NOEXCEPT { ppGeometries = ppGeometries_; return *this; } - AccelerationStructureBuildGeometryInfoKHR & setScratchData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & scratchData_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureBuildGeometryInfoKHR & + setScratchData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & scratchData_ ) VULKAN_HPP_NOEXCEPT { scratchData = scratchData_; return *this; } - operator VkAccelerationStructureBuildGeometryInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkAccelerationStructureBuildGeometryInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAccelerationStructureBuildGeometryInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureBuildGeometryInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel; - VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags = {}; - VULKAN_HPP_NAMESPACE::Bool32 update = {}; - VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure = {}; - VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure = {}; - VULKAN_HPP_NAMESPACE::Bool32 geometryArrayOfPointers = {}; - uint32_t geometryCount = {}; - const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR* const* ppGeometries = {}; - VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData = {}; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type = + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel; + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::Bool32 update = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure = {}; + VULKAN_HPP_NAMESPACE::Bool32 geometryArrayOfPointers = {}; + uint32_t geometryCount = {}; + const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const * ppGeometries = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData = {}; }; - static_assert( sizeof( AccelerationStructureBuildGeometryInfoKHR ) == sizeof( VkAccelerationStructureBuildGeometryInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( AccelerationStructureBuildGeometryInfoKHR ) == + sizeof( VkAccelerationStructureBuildGeometryInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_ENABLE_BETA_EXTENSIONS struct AccelerationStructureBuildOffsetInfoKHR { - VULKAN_HPP_CONSTEXPR AccelerationStructureBuildOffsetInfoKHR( uint32_t primitiveCount_ = {}, + VULKAN_HPP_CONSTEXPR AccelerationStructureBuildOffsetInfoKHR( uint32_t primitiveCount_ = {}, uint32_t primitiveOffset_ = {}, - uint32_t firstVertex_ = {}, + uint32_t firstVertex_ = {}, uint32_t transformOffset_ = {} ) VULKAN_HPP_NOEXCEPT : primitiveCount( primitiveCount_ ) , primitiveOffset( primitiveOffset_ ) @@ -21240,9 +26548,10 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - AccelerationStructureBuildOffsetInfoKHR& operator=( VkAccelerationStructureBuildOffsetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + AccelerationStructureBuildOffsetInfoKHR & + operator=( VkAccelerationStructureBuildOffsetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -21270,52 +26579,54 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkAccelerationStructureBuildOffsetInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkAccelerationStructureBuildOffsetInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAccelerationStructureBuildOffsetInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AccelerationStructureBuildOffsetInfoKHR const& ) const = default; -#else - bool operator==( AccelerationStructureBuildOffsetInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureBuildOffsetInfoKHR const & ) const = default; +# else + bool operator==( AccelerationStructureBuildOffsetInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( primitiveCount == rhs.primitiveCount ) - && ( primitiveOffset == rhs.primitiveOffset ) - && ( firstVertex == rhs.firstVertex ) - && ( transformOffset == rhs.transformOffset ); + return ( primitiveCount == rhs.primitiveCount ) && ( primitiveOffset == rhs.primitiveOffset ) && + ( firstVertex == rhs.firstVertex ) && ( transformOffset == rhs.transformOffset ); } - bool operator!=( AccelerationStructureBuildOffsetInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AccelerationStructureBuildOffsetInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - uint32_t primitiveCount = {}; + uint32_t primitiveCount = {}; uint32_t primitiveOffset = {}; - uint32_t firstVertex = {}; + uint32_t firstVertex = {}; uint32_t transformOffset = {}; }; - static_assert( sizeof( AccelerationStructureBuildOffsetInfoKHR ) == sizeof( VkAccelerationStructureBuildOffsetInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( AccelerationStructureBuildOffsetInfoKHR ) == + sizeof( VkAccelerationStructureBuildOffsetInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_ENABLE_BETA_EXTENSIONS struct AccelerationStructureCreateGeometryTypeInfoKHR { - VULKAN_HPP_CONSTEXPR AccelerationStructureCreateGeometryTypeInfoKHR( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles, - uint32_t maxPrimitiveCount_ = {}, - VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, - uint32_t maxVertexCount_ = {}, - VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::Bool32 allowsTransforms_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR AccelerationStructureCreateGeometryTypeInfoKHR( + VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles, + uint32_t maxPrimitiveCount_ = {}, + VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, + uint32_t maxVertexCount_ = {}, + VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Bool32 allowsTransforms_ = {} ) VULKAN_HPP_NOEXCEPT : geometryType( geometryType_ ) , maxPrimitiveCount( maxPrimitiveCount_ ) , indexType( indexType_ ) @@ -21324,42 +26635,51 @@ namespace VULKAN_HPP_NAMESPACE , allowsTransforms( allowsTransforms_ ) {} - AccelerationStructureCreateGeometryTypeInfoKHR & operator=( AccelerationStructureCreateGeometryTypeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + AccelerationStructureCreateGeometryTypeInfoKHR & + operator=( AccelerationStructureCreateGeometryTypeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( AccelerationStructureCreateGeometryTypeInfoKHR ) - offsetof( AccelerationStructureCreateGeometryTypeInfoKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( AccelerationStructureCreateGeometryTypeInfoKHR ) - + offsetof( AccelerationStructureCreateGeometryTypeInfoKHR, pNext ) ); return *this; } - AccelerationStructureCreateGeometryTypeInfoKHR( VkAccelerationStructureCreateGeometryTypeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + AccelerationStructureCreateGeometryTypeInfoKHR( VkAccelerationStructureCreateGeometryTypeInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - AccelerationStructureCreateGeometryTypeInfoKHR& operator=( VkAccelerationStructureCreateGeometryTypeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + AccelerationStructureCreateGeometryTypeInfoKHR & + operator=( VkAccelerationStructureCreateGeometryTypeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - AccelerationStructureCreateGeometryTypeInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureCreateGeometryTypeInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - AccelerationStructureCreateGeometryTypeInfoKHR & setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureCreateGeometryTypeInfoKHR & + setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ ) VULKAN_HPP_NOEXCEPT { geometryType = geometryType_; return *this; } - AccelerationStructureCreateGeometryTypeInfoKHR & setMaxPrimitiveCount( uint32_t maxPrimitiveCount_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureCreateGeometryTypeInfoKHR & + setMaxPrimitiveCount( uint32_t maxPrimitiveCount_ ) VULKAN_HPP_NOEXCEPT { maxPrimitiveCount = maxPrimitiveCount_; return *this; } - AccelerationStructureCreateGeometryTypeInfoKHR & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureCreateGeometryTypeInfoKHR & + setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT { indexType = indexType_; return *this; @@ -21371,72 +26691,75 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - AccelerationStructureCreateGeometryTypeInfoKHR & setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureCreateGeometryTypeInfoKHR & + setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT { vertexFormat = vertexFormat_; return *this; } - AccelerationStructureCreateGeometryTypeInfoKHR & setAllowsTransforms( VULKAN_HPP_NAMESPACE::Bool32 allowsTransforms_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureCreateGeometryTypeInfoKHR & + setAllowsTransforms( VULKAN_HPP_NAMESPACE::Bool32 allowsTransforms_ ) VULKAN_HPP_NOEXCEPT { allowsTransforms = allowsTransforms_; return *this; } - operator VkAccelerationStructureCreateGeometryTypeInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkAccelerationStructureCreateGeometryTypeInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAccelerationStructureCreateGeometryTypeInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AccelerationStructureCreateGeometryTypeInfoKHR const& ) const = default; -#else - bool operator==( AccelerationStructureCreateGeometryTypeInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureCreateGeometryTypeInfoKHR const & ) const = default; +# else + bool operator==( AccelerationStructureCreateGeometryTypeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( geometryType == rhs.geometryType ) - && ( maxPrimitiveCount == rhs.maxPrimitiveCount ) - && ( indexType == rhs.indexType ) - && ( maxVertexCount == rhs.maxVertexCount ) - && ( vertexFormat == rhs.vertexFormat ) - && ( allowsTransforms == rhs.allowsTransforms ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( geometryType == rhs.geometryType ) && + ( maxPrimitiveCount == rhs.maxPrimitiveCount ) && ( indexType == rhs.indexType ) && + ( maxVertexCount == rhs.maxVertexCount ) && ( vertexFormat == rhs.vertexFormat ) && + ( allowsTransforms == rhs.allowsTransforms ); } - bool operator!=( AccelerationStructureCreateGeometryTypeInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AccelerationStructureCreateGeometryTypeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateGeometryTypeInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles; - uint32_t maxPrimitiveCount = {}; - VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; - uint32_t maxVertexCount = {}; - VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; - VULKAN_HPP_NAMESPACE::Bool32 allowsTransforms = {}; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles; + uint32_t maxPrimitiveCount = {}; + VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; + uint32_t maxVertexCount = {}; + VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Bool32 allowsTransforms = {}; }; - static_assert( sizeof( AccelerationStructureCreateGeometryTypeInfoKHR ) == sizeof( VkAccelerationStructureCreateGeometryTypeInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( AccelerationStructureCreateGeometryTypeInfoKHR ) == + sizeof( VkAccelerationStructureCreateGeometryTypeInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_ENABLE_BETA_EXTENSIONS struct AccelerationStructureCreateInfoKHR { - VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoKHR( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ = {}, - VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel, - VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ = {}, - uint32_t maxGeometryCount_ = {}, - const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateGeometryTypeInfoKHR* pGeometryInfos_ = {}, - VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoKHR( + VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ = + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel, + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ = {}, + uint32_t maxGeometryCount_ = {}, + const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateGeometryTypeInfoKHR * pGeometryInfos_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) VULKAN_HPP_NOEXCEPT : compactedSize( compactedSize_ ) , type( type_ ) , flags( flags_ ) @@ -21447,7 +26770,9 @@ namespace VULKAN_HPP_NAMESPACE AccelerationStructureCreateInfoKHR & operator=( AccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( AccelerationStructureCreateInfoKHR ) - offsetof( AccelerationStructureCreateInfoKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( AccelerationStructureCreateInfoKHR ) - offsetof( AccelerationStructureCreateInfoKHR, pNext ) ); return *this; } @@ -21456,31 +26781,35 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - AccelerationStructureCreateInfoKHR& operator=( VkAccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + AccelerationStructureCreateInfoKHR & + operator=( VkAccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - AccelerationStructureCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - AccelerationStructureCreateInfoKHR & setCompactedSize( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureCreateInfoKHR & + setCompactedSize( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ ) VULKAN_HPP_NOEXCEPT { compactedSize = compactedSize_; return *this; } - AccelerationStructureCreateInfoKHR & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureCreateInfoKHR & + setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ ) VULKAN_HPP_NOEXCEPT { type = type_; return *this; } - AccelerationStructureCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureCreateInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; @@ -21492,76 +26821,77 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - AccelerationStructureCreateInfoKHR & setPGeometryInfos( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateGeometryTypeInfoKHR* pGeometryInfos_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureCreateInfoKHR & setPGeometryInfos( + const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateGeometryTypeInfoKHR * pGeometryInfos_ ) VULKAN_HPP_NOEXCEPT { pGeometryInfos = pGeometryInfos_; return *this; } - AccelerationStructureCreateInfoKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureCreateInfoKHR & + setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT { deviceAddress = deviceAddress_; return *this; } - operator VkAccelerationStructureCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkAccelerationStructureCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAccelerationStructureCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AccelerationStructureCreateInfoKHR const& ) const = default; -#else - bool operator==( AccelerationStructureCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureCreateInfoKHR const & ) const = default; +# else + bool operator==( AccelerationStructureCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( compactedSize == rhs.compactedSize ) - && ( type == rhs.type ) - && ( flags == rhs.flags ) - && ( maxGeometryCount == rhs.maxGeometryCount ) - && ( pGeometryInfos == rhs.pGeometryInfos ) - && ( deviceAddress == rhs.deviceAddress ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( compactedSize == rhs.compactedSize ) && + ( type == rhs.type ) && ( flags == rhs.flags ) && ( maxGeometryCount == rhs.maxGeometryCount ) && + ( pGeometryInfos == rhs.pGeometryInfos ) && ( deviceAddress == rhs.deviceAddress ); } - bool operator!=( AccelerationStructureCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AccelerationStructureCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceSize compactedSize = {}; - VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel; - VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags = {}; - uint32_t maxGeometryCount = {}; - const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateGeometryTypeInfoKHR* pGeometryInfos = {}; - VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize compactedSize = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type = + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel; + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags = {}; + uint32_t maxGeometryCount = {}; + const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateGeometryTypeInfoKHR * pGeometryInfos = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; }; - static_assert( sizeof( AccelerationStructureCreateInfoKHR ) == sizeof( VkAccelerationStructureCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( AccelerationStructureCreateInfoKHR ) == sizeof( VkAccelerationStructureCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct GeometryTrianglesNV { - VULKAN_HPP_CONSTEXPR GeometryTrianglesNV( VULKAN_HPP_NAMESPACE::Buffer vertexData_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset_ = {}, - uint32_t vertexCount_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {}, - VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::Buffer indexData_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize indexOffset_ = {}, - uint32_t indexCount_ = {}, - VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, - VULKAN_HPP_NAMESPACE::Buffer transformData_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + GeometryTrianglesNV( VULKAN_HPP_NAMESPACE::Buffer vertexData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset_ = {}, + uint32_t vertexCount_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {}, + VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Buffer indexData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize indexOffset_ = {}, + uint32_t indexCount_ = {}, + VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, + VULKAN_HPP_NAMESPACE::Buffer transformData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_ = {} ) VULKAN_HPP_NOEXCEPT : vertexData( vertexData_ ) , vertexOffset( vertexOffset_ ) , vertexCount( vertexCount_ ) @@ -21586,13 +26916,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - GeometryTrianglesNV& operator=( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT + GeometryTrianglesNV & operator=( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - GeometryTrianglesNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + GeometryTrianglesNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -21664,66 +26994,60 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkGeometryTrianglesNV const&() const VULKAN_HPP_NOEXCEPT + operator VkGeometryTrianglesNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkGeometryTrianglesNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( GeometryTrianglesNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GeometryTrianglesNV const & ) const = default; #else - bool operator==( GeometryTrianglesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( GeometryTrianglesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( vertexData == rhs.vertexData ) - && ( vertexOffset == rhs.vertexOffset ) - && ( vertexCount == rhs.vertexCount ) - && ( vertexStride == rhs.vertexStride ) - && ( vertexFormat == rhs.vertexFormat ) - && ( indexData == rhs.indexData ) - && ( indexOffset == rhs.indexOffset ) - && ( indexCount == rhs.indexCount ) - && ( indexType == rhs.indexType ) - && ( transformData == rhs.transformData ) - && ( transformOffset == rhs.transformOffset ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vertexData == rhs.vertexData ) && + ( vertexOffset == rhs.vertexOffset ) && ( vertexCount == rhs.vertexCount ) && + ( vertexStride == rhs.vertexStride ) && ( vertexFormat == rhs.vertexFormat ) && + ( indexData == rhs.indexData ) && ( indexOffset == rhs.indexOffset ) && ( indexCount == rhs.indexCount ) && + ( indexType == rhs.indexType ) && ( transformData == rhs.transformData ) && + ( transformOffset == rhs.transformOffset ); } - bool operator!=( GeometryTrianglesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( GeometryTrianglesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryTrianglesNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Buffer vertexData = {}; - VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset = {}; - uint32_t vertexCount = {}; - VULKAN_HPP_NAMESPACE::DeviceSize vertexStride = {}; - VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; - VULKAN_HPP_NAMESPACE::Buffer indexData = {}; - VULKAN_HPP_NAMESPACE::DeviceSize indexOffset = {}; - uint32_t indexCount = {}; - VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; - VULKAN_HPP_NAMESPACE::Buffer transformData = {}; - VULKAN_HPP_NAMESPACE::DeviceSize transformOffset = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryTrianglesNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer vertexData = {}; + VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset = {}; + uint32_t vertexCount = {}; + VULKAN_HPP_NAMESPACE::DeviceSize vertexStride = {}; + VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Buffer indexData = {}; + VULKAN_HPP_NAMESPACE::DeviceSize indexOffset = {}; + uint32_t indexCount = {}; + VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; + VULKAN_HPP_NAMESPACE::Buffer transformData = {}; + VULKAN_HPP_NAMESPACE::DeviceSize transformOffset = {}; }; - static_assert( sizeof( GeometryTrianglesNV ) == sizeof( VkGeometryTrianglesNV ), "struct and wrapper have different size!" ); + static_assert( sizeof( GeometryTrianglesNV ) == sizeof( VkGeometryTrianglesNV ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct GeometryAABBNV { - VULKAN_HPP_CONSTEXPR GeometryAABBNV( VULKAN_HPP_NAMESPACE::Buffer aabbData_ = {}, - uint32_t numAABBs_ = {}, - uint32_t stride_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR GeometryAABBNV( VULKAN_HPP_NAMESPACE::Buffer aabbData_ = {}, + uint32_t numAABBs_ = {}, + uint32_t stride_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {} ) VULKAN_HPP_NOEXCEPT : aabbData( aabbData_ ) , numAABBs( numAABBs_ ) , stride( stride_ ) @@ -21741,13 +27065,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - GeometryAABBNV& operator=( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT + GeometryAABBNV & operator=( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - GeometryAABBNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + GeometryAABBNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -21777,42 +27101,38 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkGeometryAABBNV const&() const VULKAN_HPP_NOEXCEPT + operator VkGeometryAABBNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkGeometryAABBNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( GeometryAABBNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GeometryAABBNV const & ) const = default; #else - bool operator==( GeometryAABBNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( GeometryAABBNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( aabbData == rhs.aabbData ) - && ( numAABBs == rhs.numAABBs ) - && ( stride == rhs.stride ) - && ( offset == rhs.offset ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( aabbData == rhs.aabbData ) && + ( numAABBs == rhs.numAABBs ) && ( stride == rhs.stride ) && ( offset == rhs.offset ); } - bool operator!=( GeometryAABBNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( GeometryAABBNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryAabbNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Buffer aabbData = {}; - uint32_t numAABBs = {}; - uint32_t stride = {}; - VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryAabbNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer aabbData = {}; + uint32_t numAABBs = {}; + uint32_t stride = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; }; static_assert( sizeof( GeometryAABBNV ) == sizeof( VkGeometryAABBNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); @@ -21820,7 +27140,7 @@ namespace VULKAN_HPP_NAMESPACE struct GeometryDataNV { VULKAN_HPP_CONSTEXPR GeometryDataNV( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles_ = {}, - VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs_ = {} ) VULKAN_HPP_NOEXCEPT : triangles( triangles_ ) , aabbs( aabbs_ ) {} @@ -21830,9 +27150,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - GeometryDataNV& operator=( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + GeometryDataNV & operator=( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -21848,26 +27168,25 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkGeometryDataNV const&() const VULKAN_HPP_NOEXCEPT + operator VkGeometryDataNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkGeometryDataNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( GeometryDataNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GeometryDataNV const & ) const = default; #else - bool operator==( GeometryDataNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( GeometryDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( triangles == rhs.triangles ) - && ( aabbs == rhs.aabbs ); + return ( triangles == rhs.triangles ) && ( aabbs == rhs.aabbs ); } - bool operator!=( GeometryDataNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( GeometryDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -21875,16 +27194,17 @@ namespace VULKAN_HPP_NAMESPACE public: VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles = {}; - VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs = {}; + VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs = {}; }; static_assert( sizeof( GeometryDataNV ) == sizeof( VkGeometryDataNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct GeometryNV { - VULKAN_HPP_CONSTEXPR GeometryNV( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles, - VULKAN_HPP_NAMESPACE::GeometryDataNV geometry_ = {}, - VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR GeometryNV( + VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles, + VULKAN_HPP_NAMESPACE::GeometryDataNV geometry_ = {}, + VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ = {} ) VULKAN_HPP_NOEXCEPT : geometryType( geometryType_ ) , geometry( geometry_ ) , flags( flags_ ) @@ -21901,13 +27221,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - GeometryNV& operator=( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT + GeometryNV & operator=( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - GeometryNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + GeometryNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -21931,51 +27251,49 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkGeometryNV const&() const VULKAN_HPP_NOEXCEPT + operator VkGeometryNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkGeometryNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( GeometryNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GeometryNV const & ) const = default; #else - bool operator==( GeometryNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( GeometryNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( geometryType == rhs.geometryType ) - && ( geometry == rhs.geometry ) - && ( flags == rhs.flags ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( geometryType == rhs.geometryType ) && + ( geometry == rhs.geometry ) && ( flags == rhs.flags ); } - bool operator!=( GeometryNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( GeometryNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles; - VULKAN_HPP_NAMESPACE::GeometryDataNV geometry = {}; - VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles; + VULKAN_HPP_NAMESPACE::GeometryDataNV geometry = {}; + VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags = {}; }; static_assert( sizeof( GeometryNV ) == sizeof( VkGeometryNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct AccelerationStructureInfoNV { - VULKAN_HPP_CONSTEXPR AccelerationStructureInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_ = {}, - VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ = {}, - uint32_t instanceCount_ = {}, - uint32_t geometryCount_ = {}, - const VULKAN_HPP_NAMESPACE::GeometryNV* pGeometries_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + AccelerationStructureInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_ = {}, + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ = {}, + uint32_t instanceCount_ = {}, + uint32_t geometryCount_ = {}, + const VULKAN_HPP_NAMESPACE::GeometryNV * pGeometries_ = {} ) VULKAN_HPP_NOEXCEPT : type( type_ ) , flags( flags_ ) , instanceCount( instanceCount_ ) @@ -21985,7 +27303,8 @@ namespace VULKAN_HPP_NAMESPACE AccelerationStructureInfoNV & operator=( AccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( AccelerationStructureInfoNV ) - offsetof( AccelerationStructureInfoNV, pNext ) ); + memcpy( + &pNext, &rhs.pNext, sizeof( AccelerationStructureInfoNV ) - offsetof( AccelerationStructureInfoNV, pNext ) ); return *this; } @@ -21994,13 +27313,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - AccelerationStructureInfoNV& operator=( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + AccelerationStructureInfoNV & operator=( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - AccelerationStructureInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -22012,7 +27331,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - AccelerationStructureInfoNV & setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; @@ -22030,65 +27350,67 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - AccelerationStructureInfoNV & setPGeometries( const VULKAN_HPP_NAMESPACE::GeometryNV* pGeometries_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureInfoNV & + setPGeometries( const VULKAN_HPP_NAMESPACE::GeometryNV * pGeometries_ ) VULKAN_HPP_NOEXCEPT { pGeometries = pGeometries_; return *this; } - operator VkAccelerationStructureInfoNV const&() const VULKAN_HPP_NOEXCEPT + operator VkAccelerationStructureInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAccelerationStructureInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AccelerationStructureInfoNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureInfoNV const & ) const = default; #else - bool operator==( AccelerationStructureInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( AccelerationStructureInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( type == rhs.type ) - && ( flags == rhs.flags ) - && ( instanceCount == rhs.instanceCount ) - && ( geometryCount == rhs.geometryCount ) - && ( pGeometries == rhs.pGeometries ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( flags == rhs.flags ) && + ( instanceCount == rhs.instanceCount ) && ( geometryCount == rhs.geometryCount ) && + ( pGeometries == rhs.pGeometries ); } - bool operator!=( AccelerationStructureInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AccelerationStructureInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureInfoNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type = {}; - VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags = {}; - uint32_t instanceCount = {}; - uint32_t geometryCount = {}; - const VULKAN_HPP_NAMESPACE::GeometryNV* pGeometries = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type = {}; + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags = {}; + uint32_t instanceCount = {}; + uint32_t geometryCount = {}; + const VULKAN_HPP_NAMESPACE::GeometryNV * pGeometries = {}; }; - static_assert( sizeof( AccelerationStructureInfoNV ) == sizeof( VkAccelerationStructureInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( AccelerationStructureInfoNV ) == sizeof( VkAccelerationStructureInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct AccelerationStructureCreateInfoNV { - VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoNV( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ = {}, - VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoNV( + VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info_ = {} ) VULKAN_HPP_NOEXCEPT : compactedSize( compactedSize_ ) , info( info_ ) {} AccelerationStructureCreateInfoNV & operator=( AccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( AccelerationStructureCreateInfoNV ) - offsetof( AccelerationStructureCreateInfoNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( AccelerationStructureCreateInfoNV ) - offsetof( AccelerationStructureCreateInfoNV, pNext ) ); return *this; } @@ -22097,140 +27419,152 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - AccelerationStructureCreateInfoNV& operator=( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + AccelerationStructureCreateInfoNV & operator=( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - AccelerationStructureCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - AccelerationStructureCreateInfoNV & setCompactedSize( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureCreateInfoNV & + setCompactedSize( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ ) VULKAN_HPP_NOEXCEPT { compactedSize = compactedSize_; return *this; } - AccelerationStructureCreateInfoNV & setInfo( VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV const & info_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureCreateInfoNV & + setInfo( VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV const & info_ ) VULKAN_HPP_NOEXCEPT { info = info_; return *this; } - operator VkAccelerationStructureCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + operator VkAccelerationStructureCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAccelerationStructureCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AccelerationStructureCreateInfoNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureCreateInfoNV const & ) const = default; #else - bool operator==( AccelerationStructureCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( AccelerationStructureCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( compactedSize == rhs.compactedSize ) - && ( info == rhs.info ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( compactedSize == rhs.compactedSize ) && + ( info == rhs.info ); } - bool operator!=( AccelerationStructureCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AccelerationStructureCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateInfoNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceSize compactedSize = {}; - VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize compactedSize = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info = {}; }; - static_assert( sizeof( AccelerationStructureCreateInfoNV ) == sizeof( VkAccelerationStructureCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( AccelerationStructureCreateInfoNV ) == sizeof( VkAccelerationStructureCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #ifdef VK_ENABLE_BETA_EXTENSIONS struct AccelerationStructureDeviceAddressInfoKHR { - VULKAN_HPP_CONSTEXPR AccelerationStructureDeviceAddressInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR AccelerationStructureDeviceAddressInfoKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ = {} ) VULKAN_HPP_NOEXCEPT : accelerationStructure( accelerationStructure_ ) {} - AccelerationStructureDeviceAddressInfoKHR & operator=( AccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + AccelerationStructureDeviceAddressInfoKHR & + operator=( AccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( AccelerationStructureDeviceAddressInfoKHR ) - offsetof( AccelerationStructureDeviceAddressInfoKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( AccelerationStructureDeviceAddressInfoKHR ) - + offsetof( AccelerationStructureDeviceAddressInfoKHR, pNext ) ); return *this; } - AccelerationStructureDeviceAddressInfoKHR( VkAccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + AccelerationStructureDeviceAddressInfoKHR( VkAccelerationStructureDeviceAddressInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - AccelerationStructureDeviceAddressInfoKHR& operator=( VkAccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + AccelerationStructureDeviceAddressInfoKHR & + operator=( VkAccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - AccelerationStructureDeviceAddressInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureDeviceAddressInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - AccelerationStructureDeviceAddressInfoKHR & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureDeviceAddressInfoKHR & setAccelerationStructure( + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ ) VULKAN_HPP_NOEXCEPT { accelerationStructure = accelerationStructure_; return *this; } - operator VkAccelerationStructureDeviceAddressInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkAccelerationStructureDeviceAddressInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAccelerationStructureDeviceAddressInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AccelerationStructureDeviceAddressInfoKHR const& ) const = default; -#else - bool operator==( AccelerationStructureDeviceAddressInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureDeviceAddressInfoKHR const & ) const = default; +# else + bool operator==( AccelerationStructureDeviceAddressInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( accelerationStructure == rhs.accelerationStructure ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructure == rhs.accelerationStructure ); } - bool operator!=( AccelerationStructureDeviceAddressInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AccelerationStructureDeviceAddressInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureDeviceAddressInfoKHR; - const void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureDeviceAddressInfoKHR; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure = {}; }; - static_assert( sizeof( AccelerationStructureDeviceAddressInfoKHR ) == sizeof( VkAccelerationStructureDeviceAddressInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( AccelerationStructureDeviceAddressInfoKHR ) == + sizeof( VkAccelerationStructureDeviceAddressInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct TransformMatrixKHR { - VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR( std::array,3> const& matrix_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 + TransformMatrixKHR( std::array, 3> const & matrix_ = {} ) VULKAN_HPP_NOEXCEPT : matrix( matrix_ ) {} @@ -22239,37 +27573,37 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - TransformMatrixKHR& operator=( VkTransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT + TransformMatrixKHR & operator=( VkTransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - TransformMatrixKHR & setMatrix( std::array,3> matrix_ ) VULKAN_HPP_NOEXCEPT + TransformMatrixKHR & setMatrix( std::array, 3> matrix_ ) VULKAN_HPP_NOEXCEPT { matrix = matrix_; return *this; } - operator VkTransformMatrixKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkTransformMatrixKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkTransformMatrixKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( TransformMatrixKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( TransformMatrixKHR const & ) const = default; #else - bool operator==( TransformMatrixKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( TransformMatrixKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return ( matrix == rhs.matrix ); } - bool operator!=( TransformMatrixKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( TransformMatrixKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -22278,17 +27612,19 @@ namespace VULKAN_HPP_NAMESPACE public: VULKAN_HPP_NAMESPACE::ArrayWrapper2D matrix = {}; }; - static_assert( sizeof( TransformMatrixKHR ) == sizeof( VkTransformMatrixKHR ), "struct and wrapper have different size!" ); + static_assert( sizeof( TransformMatrixKHR ) == sizeof( VkTransformMatrixKHR ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct AccelerationStructureInstanceKHR { - VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR( VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform_ = {}, - uint32_t instanceCustomIndex_ = {}, - uint32_t mask_ = {}, - uint32_t instanceShaderBindingTableRecordOffset_ = {}, - VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ = {}, - uint64_t accelerationStructureReference_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 + AccelerationStructureInstanceKHR( VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform_ = {}, + uint32_t instanceCustomIndex_ = {}, + uint32_t mask_ = {}, + uint32_t instanceShaderBindingTableRecordOffset_ = {}, + VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ = {}, + uint64_t accelerationStructureReference_ = {} ) VULKAN_HPP_NOEXCEPT : transform( transform_ ) , instanceCustomIndex( instanceCustomIndex_ ) , mask( mask_ ) @@ -22302,13 +27638,14 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - AccelerationStructureInstanceKHR& operator=( VkAccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT + AccelerationStructureInstanceKHR & operator=( VkAccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - AccelerationStructureInstanceKHR & setTransform( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transform_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureInstanceKHR & + setTransform( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transform_ ) VULKAN_HPP_NOEXCEPT { transform = transform_; return *this; @@ -22326,48 +27663,49 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - AccelerationStructureInstanceKHR & setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureInstanceKHR & + setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT { instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_; return *this; } - AccelerationStructureInstanceKHR & setFlags( VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureInstanceKHR & + setFlags( VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT { - flags = *reinterpret_cast(&flags_); + flags = *reinterpret_cast( &flags_ ); return *this; } - AccelerationStructureInstanceKHR & setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureInstanceKHR & + setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) VULKAN_HPP_NOEXCEPT { accelerationStructureReference = accelerationStructureReference_; return *this; } - operator VkAccelerationStructureInstanceKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkAccelerationStructureInstanceKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAccelerationStructureInstanceKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AccelerationStructureInstanceKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureInstanceKHR const & ) const = default; #else - bool operator==( AccelerationStructureInstanceKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( AccelerationStructureInstanceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( transform == rhs.transform ) - && ( instanceCustomIndex == rhs.instanceCustomIndex ) - && ( mask == rhs.mask ) - && ( instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset ) - && ( flags == rhs.flags ) - && ( accelerationStructureReference == rhs.accelerationStructureReference ); + return ( transform == rhs.transform ) && ( instanceCustomIndex == rhs.instanceCustomIndex ) && + ( mask == rhs.mask ) && + ( instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset ) && + ( flags == rhs.flags ) && ( accelerationStructureReference == rhs.accelerationStructureReference ); } - bool operator!=( AccelerationStructureInstanceKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AccelerationStructureInstanceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -22375,171 +27713,194 @@ namespace VULKAN_HPP_NAMESPACE public: VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform = {}; - uint32_t instanceCustomIndex : 24; - uint32_t mask : 8; - uint32_t instanceShaderBindingTableRecordOffset : 24; - VkGeometryInstanceFlagsKHR flags : 8; - uint64_t accelerationStructureReference = {}; + uint32_t instanceCustomIndex : 24; + uint32_t mask : 8; + uint32_t instanceShaderBindingTableRecordOffset : 24; + VkGeometryInstanceFlagsKHR flags : 8; + uint64_t accelerationStructureReference = {}; }; - static_assert( sizeof( AccelerationStructureInstanceKHR ) == sizeof( VkAccelerationStructureInstanceKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( AccelerationStructureInstanceKHR ) == sizeof( VkAccelerationStructureInstanceKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #ifdef VK_ENABLE_BETA_EXTENSIONS struct AccelerationStructureMemoryRequirementsInfoKHR { - VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeKHR type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeKHR::eObject, - VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType_ = VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR::eHost, - VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeKHR type_ = + VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeKHR::eObject, + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType_ = + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR::eHost, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ = {} ) VULKAN_HPP_NOEXCEPT : type( type_ ) , buildType( buildType_ ) , accelerationStructure( accelerationStructure_ ) {} - AccelerationStructureMemoryRequirementsInfoKHR & operator=( AccelerationStructureMemoryRequirementsInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + AccelerationStructureMemoryRequirementsInfoKHR & + operator=( AccelerationStructureMemoryRequirementsInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( AccelerationStructureMemoryRequirementsInfoKHR ) - offsetof( AccelerationStructureMemoryRequirementsInfoKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( AccelerationStructureMemoryRequirementsInfoKHR ) - + offsetof( AccelerationStructureMemoryRequirementsInfoKHR, pNext ) ); return *this; } - AccelerationStructureMemoryRequirementsInfoKHR( VkAccelerationStructureMemoryRequirementsInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + AccelerationStructureMemoryRequirementsInfoKHR( VkAccelerationStructureMemoryRequirementsInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - AccelerationStructureMemoryRequirementsInfoKHR& operator=( VkAccelerationStructureMemoryRequirementsInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + AccelerationStructureMemoryRequirementsInfoKHR & + operator=( VkAccelerationStructureMemoryRequirementsInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - AccelerationStructureMemoryRequirementsInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureMemoryRequirementsInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - AccelerationStructureMemoryRequirementsInfoKHR & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeKHR type_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureMemoryRequirementsInfoKHR & + setType( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeKHR type_ ) VULKAN_HPP_NOEXCEPT { type = type_; return *this; } - AccelerationStructureMemoryRequirementsInfoKHR & setBuildType( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureMemoryRequirementsInfoKHR & + setBuildType( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType_ ) VULKAN_HPP_NOEXCEPT { buildType = buildType_; return *this; } - AccelerationStructureMemoryRequirementsInfoKHR & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureMemoryRequirementsInfoKHR & setAccelerationStructure( + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ ) VULKAN_HPP_NOEXCEPT { accelerationStructure = accelerationStructure_; return *this; } - operator VkAccelerationStructureMemoryRequirementsInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkAccelerationStructureMemoryRequirementsInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAccelerationStructureMemoryRequirementsInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AccelerationStructureMemoryRequirementsInfoKHR const& ) const = default; -#else - bool operator==( AccelerationStructureMemoryRequirementsInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureMemoryRequirementsInfoKHR const & ) const = default; +# else + bool operator==( AccelerationStructureMemoryRequirementsInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( type == rhs.type ) - && ( buildType == rhs.buildType ) - && ( accelerationStructure == rhs.accelerationStructure ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && + ( buildType == rhs.buildType ) && ( accelerationStructure == rhs.accelerationStructure ); } - bool operator!=( AccelerationStructureMemoryRequirementsInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AccelerationStructureMemoryRequirementsInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureMemoryRequirementsInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeKHR type = VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeKHR::eObject; - VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType = VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR::eHost; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeKHR type = + VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeKHR::eObject; + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType = + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR::eHost; VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure = {}; }; - static_assert( sizeof( AccelerationStructureMemoryRequirementsInfoKHR ) == sizeof( VkAccelerationStructureMemoryRequirementsInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( AccelerationStructureMemoryRequirementsInfoKHR ) == + sizeof( VkAccelerationStructureMemoryRequirementsInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct AccelerationStructureMemoryRequirementsInfoNV { - VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ = {}, - VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV( + VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {} ) VULKAN_HPP_NOEXCEPT : type( type_ ) , accelerationStructure( accelerationStructure_ ) {} - AccelerationStructureMemoryRequirementsInfoNV & operator=( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + AccelerationStructureMemoryRequirementsInfoNV & + operator=( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( AccelerationStructureMemoryRequirementsInfoNV ) - offsetof( AccelerationStructureMemoryRequirementsInfoNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( AccelerationStructureMemoryRequirementsInfoNV ) - + offsetof( AccelerationStructureMemoryRequirementsInfoNV, pNext ) ); return *this; } - AccelerationStructureMemoryRequirementsInfoNV( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + AccelerationStructureMemoryRequirementsInfoNV( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - AccelerationStructureMemoryRequirementsInfoNV& operator=( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + AccelerationStructureMemoryRequirementsInfoNV & + operator=( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - AccelerationStructureMemoryRequirementsInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureMemoryRequirementsInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - AccelerationStructureMemoryRequirementsInfoNV & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureMemoryRequirementsInfoNV & + setType( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ ) VULKAN_HPP_NOEXCEPT { type = type_; return *this; } - AccelerationStructureMemoryRequirementsInfoNV & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureMemoryRequirementsInfoNV & setAccelerationStructure( + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT { accelerationStructure = accelerationStructure_; return *this; } - operator VkAccelerationStructureMemoryRequirementsInfoNV const&() const VULKAN_HPP_NOEXCEPT + operator VkAccelerationStructureMemoryRequirementsInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAccelerationStructureMemoryRequirementsInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AccelerationStructureMemoryRequirementsInfoNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureMemoryRequirementsInfoNV const & ) const = default; #else - bool operator==( AccelerationStructureMemoryRequirementsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( type == rhs.type ) - && ( accelerationStructure == rhs.accelerationStructure ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && + ( accelerationStructure == rhs.accelerationStructure ); } - bool operator!=( AccelerationStructureMemoryRequirementsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -22547,23 +27908,28 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureMemoryRequirementsInfoNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type = {}; - VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure = {}; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure = {}; }; - static_assert( sizeof( AccelerationStructureMemoryRequirementsInfoNV ) == sizeof( VkAccelerationStructureMemoryRequirementsInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( AccelerationStructureMemoryRequirementsInfoNV ) == + sizeof( VkAccelerationStructureMemoryRequirementsInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #ifdef VK_ENABLE_BETA_EXTENSIONS struct AccelerationStructureVersionKHR { - VULKAN_HPP_CONSTEXPR AccelerationStructureVersionKHR( const uint8_t* versionData_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR AccelerationStructureVersionKHR( const uint8_t * versionData_ = {} ) VULKAN_HPP_NOEXCEPT : versionData( versionData_ ) {} AccelerationStructureVersionKHR & operator=( AccelerationStructureVersionKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( AccelerationStructureVersionKHR ) - offsetof( AccelerationStructureVersionKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( AccelerationStructureVersionKHR ) - offsetof( AccelerationStructureVersionKHR, pNext ) ); return *this; } @@ -22572,65 +27938,65 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - AccelerationStructureVersionKHR& operator=( VkAccelerationStructureVersionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + AccelerationStructureVersionKHR & operator=( VkAccelerationStructureVersionKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - AccelerationStructureVersionKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureVersionKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - AccelerationStructureVersionKHR & setVersionData( const uint8_t* versionData_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureVersionKHR & setVersionData( const uint8_t * versionData_ ) VULKAN_HPP_NOEXCEPT { versionData = versionData_; return *this; } - operator VkAccelerationStructureVersionKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkAccelerationStructureVersionKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAccelerationStructureVersionKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AccelerationStructureVersionKHR const& ) const = default; -#else - bool operator==( AccelerationStructureVersionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureVersionKHR const & ) const = default; +# else + bool operator==( AccelerationStructureVersionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( versionData == rhs.versionData ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( versionData == rhs.versionData ); } - bool operator!=( AccelerationStructureVersionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AccelerationStructureVersionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureVersionKHR; - const void* pNext = {}; - const uint8_t* versionData = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureVersionKHR; + const void * pNext = {}; + const uint8_t * versionData = {}; }; - static_assert( sizeof( AccelerationStructureVersionKHR ) == sizeof( VkAccelerationStructureVersionKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( AccelerationStructureVersionKHR ) == sizeof( VkAccelerationStructureVersionKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct AcquireNextImageInfoKHR { VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, - uint64_t timeout_ = {}, - VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, - VULKAN_HPP_NAMESPACE::Fence fence_ = {}, + uint64_t timeout_ = {}, + VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::Fence fence_ = {}, uint32_t deviceMask_ = {} ) VULKAN_HPP_NOEXCEPT : swapchain( swapchain_ ) , timeout( timeout_ ) @@ -22650,13 +28016,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - AcquireNextImageInfoKHR& operator=( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + AcquireNextImageInfoKHR & operator=( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - AcquireNextImageInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + AcquireNextImageInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -22692,46 +28058,43 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkAcquireNextImageInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkAcquireNextImageInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAcquireNextImageInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AcquireNextImageInfoKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AcquireNextImageInfoKHR const & ) const = default; #else - bool operator==( AcquireNextImageInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( AcquireNextImageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( swapchain == rhs.swapchain ) - && ( timeout == rhs.timeout ) - && ( semaphore == rhs.semaphore ) - && ( fence == rhs.fence ) - && ( deviceMask == rhs.deviceMask ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchain == rhs.swapchain ) && + ( timeout == rhs.timeout ) && ( semaphore == rhs.semaphore ) && ( fence == rhs.fence ) && + ( deviceMask == rhs.deviceMask ); } - bool operator!=( AcquireNextImageInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AcquireNextImageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAcquireNextImageInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {}; - uint64_t timeout = {}; - VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; - VULKAN_HPP_NAMESPACE::Fence fence = {}; - uint32_t deviceMask = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAcquireNextImageInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {}; + uint64_t timeout = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::Fence fence = {}; + uint32_t deviceMask = {}; }; - static_assert( sizeof( AcquireNextImageInfoKHR ) == sizeof( VkAcquireNextImageInfoKHR ), "struct and wrapper have different size!" ); + static_assert( sizeof( AcquireNextImageInfoKHR ) == sizeof( VkAcquireNextImageInfoKHR ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct AcquireProfilingLockInfoKHR @@ -22744,7 +28107,8 @@ namespace VULKAN_HPP_NAMESPACE AcquireProfilingLockInfoKHR & operator=( AcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( AcquireProfilingLockInfoKHR ) - offsetof( AcquireProfilingLockInfoKHR, pNext ) ); + memcpy( + &pNext, &rhs.pNext, sizeof( AcquireProfilingLockInfoKHR ) - offsetof( AcquireProfilingLockInfoKHR, pNext ) ); return *this; } @@ -22753,19 +28117,20 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - AcquireProfilingLockInfoKHR& operator=( VkAcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + AcquireProfilingLockInfoKHR & operator=( VkAcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - AcquireProfilingLockInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + AcquireProfilingLockInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - AcquireProfilingLockInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + AcquireProfilingLockInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; @@ -22777,48 +28142,47 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkAcquireProfilingLockInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkAcquireProfilingLockInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAcquireProfilingLockInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AcquireProfilingLockInfoKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AcquireProfilingLockInfoKHR const & ) const = default; #else - bool operator==( AcquireProfilingLockInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( AcquireProfilingLockInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( timeout == rhs.timeout ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( timeout == rhs.timeout ); } - bool operator!=( AcquireProfilingLockInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AcquireProfilingLockInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAcquireProfilingLockInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags = {}; - uint64_t timeout = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAcquireProfilingLockInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags = {}; + uint64_t timeout = {}; }; - static_assert( sizeof( AcquireProfilingLockInfoKHR ) == sizeof( VkAcquireProfilingLockInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( AcquireProfilingLockInfoKHR ) == sizeof( VkAcquireProfilingLockInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct AllocationCallbacks { - VULKAN_HPP_CONSTEXPR AllocationCallbacks( void* pUserData_ = {}, - PFN_vkAllocationFunction pfnAllocation_ = {}, - PFN_vkReallocationFunction pfnReallocation_ = {}, - PFN_vkFreeFunction pfnFree_ = {}, + VULKAN_HPP_CONSTEXPR AllocationCallbacks( void * pUserData_ = {}, + PFN_vkAllocationFunction pfnAllocation_ = {}, + PFN_vkReallocationFunction pfnReallocation_ = {}, + PFN_vkFreeFunction pfnFree_ = {}, PFN_vkInternalAllocationNotification pfnInternalAllocation_ = {}, PFN_vkInternalFreeNotification pfnInternalFree_ = {} ) VULKAN_HPP_NOEXCEPT : pUserData( pUserData_ ) @@ -22834,13 +28198,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - AllocationCallbacks& operator=( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT + AllocationCallbacks & operator=( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - AllocationCallbacks & setPUserData( void* pUserData_ ) VULKAN_HPP_NOEXCEPT + AllocationCallbacks & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT { pUserData = pUserData_; return *this; @@ -22864,7 +28228,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - AllocationCallbacks & setPfnInternalAllocation( PFN_vkInternalAllocationNotification pfnInternalAllocation_ ) VULKAN_HPP_NOEXCEPT + AllocationCallbacks & + setPfnInternalAllocation( PFN_vkInternalAllocationNotification pfnInternalAllocation_ ) VULKAN_HPP_NOEXCEPT { pfnInternalAllocation = pfnInternalAllocation_; return *this; @@ -22876,52 +28241,52 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkAllocationCallbacks const&() const VULKAN_HPP_NOEXCEPT + operator VkAllocationCallbacks const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAllocationCallbacks &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AllocationCallbacks const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AllocationCallbacks const & ) const = default; #else - bool operator==( AllocationCallbacks const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( AllocationCallbacks const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( pUserData == rhs.pUserData ) - && ( pfnAllocation == rhs.pfnAllocation ) - && ( pfnReallocation == rhs.pfnReallocation ) - && ( pfnFree == rhs.pfnFree ) - && ( pfnInternalAllocation == rhs.pfnInternalAllocation ) - && ( pfnInternalFree == rhs.pfnInternalFree ); + return ( pUserData == rhs.pUserData ) && ( pfnAllocation == rhs.pfnAllocation ) && + ( pfnReallocation == rhs.pfnReallocation ) && ( pfnFree == rhs.pfnFree ) && + ( pfnInternalAllocation == rhs.pfnInternalAllocation ) && ( pfnInternalFree == rhs.pfnInternalFree ); } - bool operator!=( AllocationCallbacks const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AllocationCallbacks const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - void* pUserData = {}; - PFN_vkAllocationFunction pfnAllocation = {}; - PFN_vkReallocationFunction pfnReallocation = {}; - PFN_vkFreeFunction pfnFree = {}; + void * pUserData = {}; + PFN_vkAllocationFunction pfnAllocation = {}; + PFN_vkReallocationFunction pfnReallocation = {}; + PFN_vkFreeFunction pfnFree = {}; PFN_vkInternalAllocationNotification pfnInternalAllocation = {}; - PFN_vkInternalFreeNotification pfnInternalFree = {}; + PFN_vkInternalFreeNotification pfnInternalFree = {}; }; - static_assert( sizeof( AllocationCallbacks ) == sizeof( VkAllocationCallbacks ), "struct and wrapper have different size!" ); + static_assert( sizeof( AllocationCallbacks ) == sizeof( VkAllocationCallbacks ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct ComponentMapping { - VULKAN_HPP_CONSTEXPR ComponentMapping( VULKAN_HPP_NAMESPACE::ComponentSwizzle r_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, - VULKAN_HPP_NAMESPACE::ComponentSwizzle g_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, - VULKAN_HPP_NAMESPACE::ComponentSwizzle b_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, - VULKAN_HPP_NAMESPACE::ComponentSwizzle a_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + ComponentMapping( VULKAN_HPP_NAMESPACE::ComponentSwizzle r_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, + VULKAN_HPP_NAMESPACE::ComponentSwizzle g_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, + VULKAN_HPP_NAMESPACE::ComponentSwizzle b_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, + VULKAN_HPP_NAMESPACE::ComponentSwizzle a_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity ) + VULKAN_HPP_NOEXCEPT : r( r_ ) , g( g_ ) , b( b_ ) @@ -22933,9 +28298,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ComponentMapping& operator=( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT + ComponentMapping & operator=( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -22963,28 +28328,25 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkComponentMapping const&() const VULKAN_HPP_NOEXCEPT + operator VkComponentMapping const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkComponentMapping &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ComponentMapping const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ComponentMapping const & ) const = default; #else - bool operator==( ComponentMapping const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ComponentMapping const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( r == rhs.r ) - && ( g == rhs.g ) - && ( b == rhs.b ) - && ( a == rhs.a ); + return ( r == rhs.r ) && ( g == rhs.g ) && ( b == rhs.b ) && ( a == rhs.a ); } - bool operator!=( ComponentMapping const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ComponentMapping const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -22996,20 +28358,24 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ComponentSwizzle b = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity; VULKAN_HPP_NAMESPACE::ComponentSwizzle a = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity; }; - static_assert( sizeof( ComponentMapping ) == sizeof( VkComponentMapping ), "struct and wrapper have different size!" ); + static_assert( sizeof( ComponentMapping ) == sizeof( VkComponentMapping ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); #ifdef VK_USE_PLATFORM_ANDROID_KHR struct AndroidHardwareBufferFormatPropertiesANDROID { - VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatPropertiesANDROID( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - uint64_t externalFormat_ = {}, - VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ = {}, - VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {}, - VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, - VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, - VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, - VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatPropertiesANDROID( + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + uint64_t externalFormat_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ = {}, + VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {}, + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ = + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = + VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven ) VULKAN_HPP_NOEXCEPT : format( format_ ) , externalFormat( externalFormat_ ) , formatFeatures( formatFeatures_ ) @@ -23020,70 +28386,76 @@ namespace VULKAN_HPP_NAMESPACE , suggestedYChromaOffset( suggestedYChromaOffset_ ) {} - AndroidHardwareBufferFormatPropertiesANDROID & operator=( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + AndroidHardwareBufferFormatPropertiesANDROID & + operator=( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( AndroidHardwareBufferFormatPropertiesANDROID ) - offsetof( AndroidHardwareBufferFormatPropertiesANDROID, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( AndroidHardwareBufferFormatPropertiesANDROID ) - + offsetof( AndroidHardwareBufferFormatPropertiesANDROID, pNext ) ); return *this; } - AndroidHardwareBufferFormatPropertiesANDROID( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + AndroidHardwareBufferFormatPropertiesANDROID( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - AndroidHardwareBufferFormatPropertiesANDROID& operator=( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + AndroidHardwareBufferFormatPropertiesANDROID & + operator=( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkAndroidHardwareBufferFormatPropertiesANDROID const&() const VULKAN_HPP_NOEXCEPT + operator VkAndroidHardwareBufferFormatPropertiesANDROID const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAndroidHardwareBufferFormatPropertiesANDROID &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AndroidHardwareBufferFormatPropertiesANDROID const& ) const = default; -#else - bool operator==( AndroidHardwareBufferFormatPropertiesANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AndroidHardwareBufferFormatPropertiesANDROID const & ) const = default; +# else + bool operator==( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( format == rhs.format ) - && ( externalFormat == rhs.externalFormat ) - && ( formatFeatures == rhs.formatFeatures ) - && ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents ) - && ( suggestedYcbcrModel == rhs.suggestedYcbcrModel ) - && ( suggestedYcbcrRange == rhs.suggestedYcbcrRange ) - && ( suggestedXChromaOffset == rhs.suggestedXChromaOffset ) - && ( suggestedYChromaOffset == rhs.suggestedYChromaOffset ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && + ( externalFormat == rhs.externalFormat ) && ( formatFeatures == rhs.formatFeatures ) && + ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents ) && + ( suggestedYcbcrModel == rhs.suggestedYcbcrModel ) && ( suggestedYcbcrRange == rhs.suggestedYcbcrRange ) && + ( suggestedXChromaOffset == rhs.suggestedXChromaOffset ) && + ( suggestedYChromaOffset == rhs.suggestedYChromaOffset ); } - bool operator!=( AndroidHardwareBufferFormatPropertiesANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; - uint64_t externalFormat = {}; - VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures = {}; - VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents = {}; - VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity; - VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull; - VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; - VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + uint64_t externalFormat = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures = {}; + VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents = {}; + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel = + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity; + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull; + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; }; - static_assert( sizeof( AndroidHardwareBufferFormatPropertiesANDROID ) == sizeof( VkAndroidHardwareBufferFormatPropertiesANDROID ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( AndroidHardwareBufferFormatPropertiesANDROID ) == + sizeof( VkAndroidHardwareBufferFormatPropertiesANDROID ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ #ifdef VK_USE_PLATFORM_ANDROID_KHR @@ -23095,9 +28467,13 @@ namespace VULKAN_HPP_NAMESPACE , memoryTypeBits( memoryTypeBits_ ) {} - AndroidHardwareBufferPropertiesANDROID & operator=( AndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + AndroidHardwareBufferPropertiesANDROID & + operator=( AndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( AndroidHardwareBufferPropertiesANDROID ) - offsetof( AndroidHardwareBufferPropertiesANDROID, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( AndroidHardwareBufferPropertiesANDROID ) - + offsetof( AndroidHardwareBufferPropertiesANDROID, pNext ) ); return *this; } @@ -23106,59 +28482,63 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - AndroidHardwareBufferPropertiesANDROID& operator=( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + AndroidHardwareBufferPropertiesANDROID & + operator=( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkAndroidHardwareBufferPropertiesANDROID const&() const VULKAN_HPP_NOEXCEPT + operator VkAndroidHardwareBufferPropertiesANDROID const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAndroidHardwareBufferPropertiesANDROID &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AndroidHardwareBufferPropertiesANDROID const& ) const = default; -#else - bool operator==( AndroidHardwareBufferPropertiesANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AndroidHardwareBufferPropertiesANDROID const & ) const = default; +# else + bool operator==( AndroidHardwareBufferPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( allocationSize == rhs.allocationSize ) - && ( memoryTypeBits == rhs.memoryTypeBits ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( allocationSize == rhs.allocationSize ) && + ( memoryTypeBits == rhs.memoryTypeBits ); } - bool operator!=( AndroidHardwareBufferPropertiesANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AndroidHardwareBufferPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferPropertiesANDROID; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {}; - uint32_t memoryTypeBits = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferPropertiesANDROID; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {}; + uint32_t memoryTypeBits = {}; }; - static_assert( sizeof( AndroidHardwareBufferPropertiesANDROID ) == sizeof( VkAndroidHardwareBufferPropertiesANDROID ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( AndroidHardwareBufferPropertiesANDROID ) == sizeof( VkAndroidHardwareBufferPropertiesANDROID ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ #ifdef VK_USE_PLATFORM_ANDROID_KHR struct AndroidHardwareBufferUsageANDROID { - VULKAN_HPP_CONSTEXPR AndroidHardwareBufferUsageANDROID( uint64_t androidHardwareBufferUsage_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + AndroidHardwareBufferUsageANDROID( uint64_t androidHardwareBufferUsage_ = {} ) VULKAN_HPP_NOEXCEPT : androidHardwareBufferUsage( androidHardwareBufferUsage_ ) {} AndroidHardwareBufferUsageANDROID & operator=( AndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( AndroidHardwareBufferUsageANDROID ) - offsetof( AndroidHardwareBufferUsageANDROID, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( AndroidHardwareBufferUsageANDROID ) - offsetof( AndroidHardwareBufferUsageANDROID, pNext ) ); return *this; } @@ -23167,59 +28547,61 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - AndroidHardwareBufferUsageANDROID& operator=( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + AndroidHardwareBufferUsageANDROID & operator=( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkAndroidHardwareBufferUsageANDROID const&() const VULKAN_HPP_NOEXCEPT + operator VkAndroidHardwareBufferUsageANDROID const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAndroidHardwareBufferUsageANDROID &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AndroidHardwareBufferUsageANDROID const& ) const = default; -#else - bool operator==( AndroidHardwareBufferUsageANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AndroidHardwareBufferUsageANDROID const & ) const = default; +# else + bool operator==( AndroidHardwareBufferUsageANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( androidHardwareBufferUsage == rhs.androidHardwareBufferUsage ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( androidHardwareBufferUsage == rhs.androidHardwareBufferUsage ); } - bool operator!=( AndroidHardwareBufferUsageANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AndroidHardwareBufferUsageANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferUsageANDROID; - void* pNext = {}; - uint64_t androidHardwareBufferUsage = {}; + void * pNext = {}; + uint64_t androidHardwareBufferUsage = {}; }; - static_assert( sizeof( AndroidHardwareBufferUsageANDROID ) == sizeof( VkAndroidHardwareBufferUsageANDROID ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( AndroidHardwareBufferUsageANDROID ) == sizeof( VkAndroidHardwareBufferUsageANDROID ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ #ifdef VK_USE_PLATFORM_ANDROID_KHR struct AndroidSurfaceCreateInfoKHR { VULKAN_HPP_CONSTEXPR AndroidSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ = {}, - struct ANativeWindow* window_ = {} ) VULKAN_HPP_NOEXCEPT + struct ANativeWindow * window_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , window( window_ ) {} AndroidSurfaceCreateInfoKHR & operator=( AndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( AndroidSurfaceCreateInfoKHR ) - offsetof( AndroidSurfaceCreateInfoKHR, pNext ) ); + memcpy( + &pNext, &rhs.pNext, sizeof( AndroidSurfaceCreateInfoKHR ) - offsetof( AndroidSurfaceCreateInfoKHR, pNext ) ); return *this; } @@ -23228,74 +28610,74 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - AndroidSurfaceCreateInfoKHR& operator=( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + AndroidSurfaceCreateInfoKHR & operator=( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - AndroidSurfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + AndroidSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - AndroidSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + AndroidSurfaceCreateInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - AndroidSurfaceCreateInfoKHR & setWindow( struct ANativeWindow* window_ ) VULKAN_HPP_NOEXCEPT + AndroidSurfaceCreateInfoKHR & setWindow( struct ANativeWindow * window_ ) VULKAN_HPP_NOEXCEPT { window = window_; return *this; } - operator VkAndroidSurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkAndroidSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAndroidSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AndroidSurfaceCreateInfoKHR const& ) const = default; -#else - bool operator==( AndroidSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AndroidSurfaceCreateInfoKHR const & ) const = default; +# else + bool operator==( AndroidSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( window == rhs.window ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( window == rhs.window ); } - bool operator!=( AndroidSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AndroidSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidSurfaceCreateInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags = {}; - struct ANativeWindow* window = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidSurfaceCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags = {}; + struct ANativeWindow * window = {}; }; - static_assert( sizeof( AndroidSurfaceCreateInfoKHR ) == sizeof( VkAndroidSurfaceCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( AndroidSurfaceCreateInfoKHR ) == sizeof( VkAndroidSurfaceCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ struct ApplicationInfo { - VULKAN_HPP_CONSTEXPR ApplicationInfo( const char* pApplicationName_ = {}, - uint32_t applicationVersion_ = {}, - const char* pEngineName_ = {}, - uint32_t engineVersion_ = {}, - uint32_t apiVersion_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ApplicationInfo( const char * pApplicationName_ = {}, + uint32_t applicationVersion_ = {}, + const char * pEngineName_ = {}, + uint32_t engineVersion_ = {}, + uint32_t apiVersion_ = {} ) VULKAN_HPP_NOEXCEPT : pApplicationName( pApplicationName_ ) , applicationVersion( applicationVersion_ ) , pEngineName( pEngineName_ ) @@ -23314,19 +28696,19 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ApplicationInfo& operator=( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT + ApplicationInfo & operator=( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ApplicationInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ApplicationInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - ApplicationInfo & setPApplicationName( const char* pApplicationName_ ) VULKAN_HPP_NOEXCEPT + ApplicationInfo & setPApplicationName( const char * pApplicationName_ ) VULKAN_HPP_NOEXCEPT { pApplicationName = pApplicationName_; return *this; @@ -23338,7 +28720,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - ApplicationInfo & setPEngineName( const char* pEngineName_ ) VULKAN_HPP_NOEXCEPT + ApplicationInfo & setPEngineName( const char * pEngineName_ ) VULKAN_HPP_NOEXCEPT { pEngineName = pEngineName_; return *this; @@ -23356,59 +28738,57 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkApplicationInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkApplicationInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkApplicationInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ApplicationInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ApplicationInfo const & ) const = default; #else - bool operator==( ApplicationInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ApplicationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pApplicationName == rhs.pApplicationName ) - && ( applicationVersion == rhs.applicationVersion ) - && ( pEngineName == rhs.pEngineName ) - && ( engineVersion == rhs.engineVersion ) - && ( apiVersion == rhs.apiVersion ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pApplicationName == rhs.pApplicationName ) && + ( applicationVersion == rhs.applicationVersion ) && ( pEngineName == rhs.pEngineName ) && + ( engineVersion == rhs.engineVersion ) && ( apiVersion == rhs.apiVersion ); } - bool operator!=( ApplicationInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ApplicationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eApplicationInfo; - const void* pNext = {}; - const char* pApplicationName = {}; - uint32_t applicationVersion = {}; - const char* pEngineName = {}; - uint32_t engineVersion = {}; - uint32_t apiVersion = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eApplicationInfo; + const void * pNext = {}; + const char * pApplicationName = {}; + uint32_t applicationVersion = {}; + const char * pEngineName = {}; + uint32_t engineVersion = {}; + uint32_t apiVersion = {}; }; static_assert( sizeof( ApplicationInfo ) == sizeof( VkApplicationInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct AttachmentDescription { - VULKAN_HPP_CONSTEXPR AttachmentDescription( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, - VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, - VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, - VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, - VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, - VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, - VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR AttachmentDescription( + VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, + VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, + VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, + VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) + VULKAN_HPP_NOEXCEPT : flags( flags_ ) , format( format_ ) , samples( samples_ ) @@ -23425,9 +28805,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - AttachmentDescription& operator=( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT + AttachmentDescription & operator=( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -23461,13 +28841,15 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - AttachmentDescription & setStencilLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ ) VULKAN_HPP_NOEXCEPT + AttachmentDescription & + setStencilLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ ) VULKAN_HPP_NOEXCEPT { stencilLoadOp = stencilLoadOp_; return *this; } - AttachmentDescription & setStencilStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ ) VULKAN_HPP_NOEXCEPT + AttachmentDescription & + setStencilStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ ) VULKAN_HPP_NOEXCEPT { stencilStoreOp = stencilStoreOp_; return *this; @@ -23485,63 +28867,61 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkAttachmentDescription const&() const VULKAN_HPP_NOEXCEPT + operator VkAttachmentDescription const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAttachmentDescription &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AttachmentDescription const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentDescription const & ) const = default; #else - bool operator==( AttachmentDescription const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( AttachmentDescription const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( flags == rhs.flags ) - && ( format == rhs.format ) - && ( samples == rhs.samples ) - && ( loadOp == rhs.loadOp ) - && ( storeOp == rhs.storeOp ) - && ( stencilLoadOp == rhs.stencilLoadOp ) - && ( stencilStoreOp == rhs.stencilStoreOp ) - && ( initialLayout == rhs.initialLayout ) - && ( finalLayout == rhs.finalLayout ); + return ( flags == rhs.flags ) && ( format == rhs.format ) && ( samples == rhs.samples ) && + ( loadOp == rhs.loadOp ) && ( storeOp == rhs.storeOp ) && ( stencilLoadOp == rhs.stencilLoadOp ) && + ( stencilStoreOp == rhs.stencilStoreOp ) && ( initialLayout == rhs.initialLayout ) && + ( finalLayout == rhs.finalLayout ); } - bool operator!=( AttachmentDescription const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AttachmentDescription const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags = {}; - VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; - VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; - VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; - VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; - VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; - VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; - VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; - VULKAN_HPP_NAMESPACE::ImageLayout finalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; + VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; + VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; + VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageLayout finalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; }; - static_assert( sizeof( AttachmentDescription ) == sizeof( VkAttachmentDescription ), "struct and wrapper have different size!" ); + static_assert( sizeof( AttachmentDescription ) == sizeof( VkAttachmentDescription ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct AttachmentDescription2 { - VULKAN_HPP_CONSTEXPR AttachmentDescription2( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, - VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, - VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, - VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, - VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, - VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, - VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR AttachmentDescription2( + VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, + VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, + VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, + VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) + VULKAN_HPP_NOEXCEPT : flags( flags_ ) , format( format_ ) , samples( samples_ ) @@ -23564,13 +28944,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - AttachmentDescription2& operator=( VkAttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT + AttachmentDescription2 & operator=( VkAttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - AttachmentDescription2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + AttachmentDescription2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -23606,13 +28986,15 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - AttachmentDescription2 & setStencilLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ ) VULKAN_HPP_NOEXCEPT + AttachmentDescription2 & + setStencilLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ ) VULKAN_HPP_NOEXCEPT { stencilLoadOp = stencilLoadOp_; return *this; } - AttachmentDescription2 & setStencilStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ ) VULKAN_HPP_NOEXCEPT + AttachmentDescription2 & + setStencilStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ ) VULKAN_HPP_NOEXCEPT { stencilStoreOp = stencilStoreOp_; return *this; @@ -23630,67 +29012,65 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkAttachmentDescription2 const&() const VULKAN_HPP_NOEXCEPT + operator VkAttachmentDescription2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAttachmentDescription2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AttachmentDescription2 const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentDescription2 const & ) const = default; #else - bool operator==( AttachmentDescription2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( AttachmentDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( format == rhs.format ) - && ( samples == rhs.samples ) - && ( loadOp == rhs.loadOp ) - && ( storeOp == rhs.storeOp ) - && ( stencilLoadOp == rhs.stencilLoadOp ) - && ( stencilStoreOp == rhs.stencilStoreOp ) - && ( initialLayout == rhs.initialLayout ) - && ( finalLayout == rhs.finalLayout ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( format == rhs.format ) && + ( samples == rhs.samples ) && ( loadOp == rhs.loadOp ) && ( storeOp == rhs.storeOp ) && + ( stencilLoadOp == rhs.stencilLoadOp ) && ( stencilStoreOp == rhs.stencilStoreOp ) && + ( initialLayout == rhs.initialLayout ) && ( finalLayout == rhs.finalLayout ); } - bool operator!=( AttachmentDescription2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AttachmentDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentDescription2; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags = {}; - VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; - VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; - VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; - VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; - VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; - VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; - VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; - VULKAN_HPP_NAMESPACE::ImageLayout finalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentDescription2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; + VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; + VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; + VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageLayout finalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; }; - static_assert( sizeof( AttachmentDescription2 ) == sizeof( VkAttachmentDescription2 ), "struct and wrapper have different size!" ); + static_assert( sizeof( AttachmentDescription2 ) == sizeof( VkAttachmentDescription2 ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct AttachmentDescriptionStencilLayout { - VULKAN_HPP_CONSTEXPR AttachmentDescriptionStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, - VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR AttachmentDescriptionStencilLayout( + VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) + VULKAN_HPP_NOEXCEPT : stencilInitialLayout( stencilInitialLayout_ ) , stencilFinalLayout( stencilFinalLayout_ ) {} AttachmentDescriptionStencilLayout & operator=( AttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( AttachmentDescriptionStencilLayout ) - offsetof( AttachmentDescriptionStencilLayout, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( AttachmentDescriptionStencilLayout ) - offsetof( AttachmentDescriptionStencilLayout, pNext ) ); return *this; } @@ -23699,70 +29079,74 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - AttachmentDescriptionStencilLayout& operator=( VkAttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT + AttachmentDescriptionStencilLayout & + operator=( VkAttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - AttachmentDescriptionStencilLayout & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + AttachmentDescriptionStencilLayout & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - AttachmentDescriptionStencilLayout & setStencilInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ ) VULKAN_HPP_NOEXCEPT + AttachmentDescriptionStencilLayout & + setStencilInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ ) VULKAN_HPP_NOEXCEPT { stencilInitialLayout = stencilInitialLayout_; return *this; } - AttachmentDescriptionStencilLayout & setStencilFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ ) VULKAN_HPP_NOEXCEPT + AttachmentDescriptionStencilLayout & + setStencilFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ ) VULKAN_HPP_NOEXCEPT { stencilFinalLayout = stencilFinalLayout_; return *this; } - operator VkAttachmentDescriptionStencilLayout const&() const VULKAN_HPP_NOEXCEPT + operator VkAttachmentDescriptionStencilLayout const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAttachmentDescriptionStencilLayout &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AttachmentDescriptionStencilLayout const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentDescriptionStencilLayout const & ) const = default; #else - bool operator==( AttachmentDescriptionStencilLayout const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( AttachmentDescriptionStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( stencilInitialLayout == rhs.stencilInitialLayout ) - && ( stencilFinalLayout == rhs.stencilFinalLayout ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stencilInitialLayout == rhs.stencilInitialLayout ) && + ( stencilFinalLayout == rhs.stencilFinalLayout ); } - bool operator!=( AttachmentDescriptionStencilLayout const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AttachmentDescriptionStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentDescriptionStencilLayout; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; - VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentDescriptionStencilLayout; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; }; - static_assert( sizeof( AttachmentDescriptionStencilLayout ) == sizeof( VkAttachmentDescriptionStencilLayout ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( AttachmentDescriptionStencilLayout ) == sizeof( VkAttachmentDescriptionStencilLayout ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct AttachmentReference { - VULKAN_HPP_CONSTEXPR AttachmentReference( uint32_t attachment_ = {}, - VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR AttachmentReference( + uint32_t attachment_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT : attachment( attachment_ ) , layout( layout_ ) {} @@ -23772,9 +29156,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - AttachmentReference& operator=( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT + AttachmentReference & operator=( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -23790,43 +29174,44 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkAttachmentReference const&() const VULKAN_HPP_NOEXCEPT + operator VkAttachmentReference const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAttachmentReference &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AttachmentReference const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentReference const & ) const = default; #else - bool operator==( AttachmentReference const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( AttachmentReference const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( attachment == rhs.attachment ) - && ( layout == rhs.layout ); + return ( attachment == rhs.attachment ) && ( layout == rhs.layout ); } - bool operator!=( AttachmentReference const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AttachmentReference const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint32_t attachment = {}; - VULKAN_HPP_NAMESPACE::ImageLayout layout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t attachment = {}; + VULKAN_HPP_NAMESPACE::ImageLayout layout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; }; - static_assert( sizeof( AttachmentReference ) == sizeof( VkAttachmentReference ), "struct and wrapper have different size!" ); + static_assert( sizeof( AttachmentReference ) == sizeof( VkAttachmentReference ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct AttachmentReference2 { - VULKAN_HPP_CONSTEXPR AttachmentReference2( uint32_t attachment_ = {}, - VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, - VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + AttachmentReference2( uint32_t attachment_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {} ) VULKAN_HPP_NOEXCEPT : attachment( attachment_ ) , layout( layout_ ) , aspectMask( aspectMask_ ) @@ -23843,13 +29228,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - AttachmentReference2& operator=( VkAttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT + AttachmentReference2 & operator=( VkAttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - AttachmentReference2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + AttachmentReference2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -23873,53 +29258,55 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkAttachmentReference2 const&() const VULKAN_HPP_NOEXCEPT + operator VkAttachmentReference2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAttachmentReference2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AttachmentReference2 const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentReference2 const & ) const = default; #else - bool operator==( AttachmentReference2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( AttachmentReference2 const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( attachment == rhs.attachment ) - && ( layout == rhs.layout ) - && ( aspectMask == rhs.aspectMask ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachment == rhs.attachment ) && + ( layout == rhs.layout ) && ( aspectMask == rhs.aspectMask ); } - bool operator!=( AttachmentReference2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AttachmentReference2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentReference2; - const void* pNext = {}; - uint32_t attachment = {}; - VULKAN_HPP_NAMESPACE::ImageLayout layout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; - VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentReference2; + const void * pNext = {}; + uint32_t attachment = {}; + VULKAN_HPP_NAMESPACE::ImageLayout layout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; }; - static_assert( sizeof( AttachmentReference2 ) == sizeof( VkAttachmentReference2 ), "struct and wrapper have different size!" ); + static_assert( sizeof( AttachmentReference2 ) == sizeof( VkAttachmentReference2 ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct AttachmentReferenceStencilLayout { - VULKAN_HPP_CONSTEXPR AttachmentReferenceStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + AttachmentReferenceStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ = + VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT : stencilLayout( stencilLayout_ ) {} AttachmentReferenceStencilLayout & operator=( AttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( AttachmentReferenceStencilLayout ) - offsetof( AttachmentReferenceStencilLayout, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( AttachmentReferenceStencilLayout ) - offsetof( AttachmentReferenceStencilLayout, pNext ) ); return *this; } @@ -23928,62 +29315,62 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - AttachmentReferenceStencilLayout& operator=( VkAttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT + AttachmentReferenceStencilLayout & operator=( VkAttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - AttachmentReferenceStencilLayout & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + AttachmentReferenceStencilLayout & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - AttachmentReferenceStencilLayout & setStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ ) VULKAN_HPP_NOEXCEPT + AttachmentReferenceStencilLayout & + setStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ ) VULKAN_HPP_NOEXCEPT { stencilLayout = stencilLayout_; return *this; } - operator VkAttachmentReferenceStencilLayout const&() const VULKAN_HPP_NOEXCEPT + operator VkAttachmentReferenceStencilLayout const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAttachmentReferenceStencilLayout &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AttachmentReferenceStencilLayout const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentReferenceStencilLayout const & ) const = default; #else - bool operator==( AttachmentReferenceStencilLayout const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( AttachmentReferenceStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( stencilLayout == rhs.stencilLayout ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stencilLayout == rhs.stencilLayout ); } - bool operator!=( AttachmentReferenceStencilLayout const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AttachmentReferenceStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentReferenceStencilLayout; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentReferenceStencilLayout; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; }; - static_assert( sizeof( AttachmentReferenceStencilLayout ) == sizeof( VkAttachmentReferenceStencilLayout ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( AttachmentReferenceStencilLayout ) == sizeof( VkAttachmentReferenceStencilLayout ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct Extent2D { - VULKAN_HPP_CONSTEXPR Extent2D( uint32_t width_ = {}, - uint32_t height_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR Extent2D( uint32_t width_ = {}, uint32_t height_ = {} ) VULKAN_HPP_NOEXCEPT : width( width_ ) , height( height_ ) {} @@ -23993,9 +29380,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - Extent2D& operator=( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT + Extent2D & operator=( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -24011,33 +29398,32 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkExtent2D const&() const VULKAN_HPP_NOEXCEPT + operator VkExtent2D const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkExtent2D &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( Extent2D const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Extent2D const & ) const = default; #else - bool operator==( Extent2D const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( Extent2D const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( width == rhs.width ) - && ( height == rhs.height ); + return ( width == rhs.width ) && ( height == rhs.height ); } - bool operator!=( Extent2D const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( Extent2D const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint32_t width = {}; + uint32_t width = {}; uint32_t height = {}; }; static_assert( sizeof( Extent2D ) == sizeof( VkExtent2D ), "struct and wrapper have different size!" ); @@ -24045,8 +29431,7 @@ namespace VULKAN_HPP_NAMESPACE struct SampleLocationEXT { - VULKAN_HPP_CONSTEXPR SampleLocationEXT( float x_ = {}, - float y_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR SampleLocationEXT( float x_ = {}, float y_ = {} ) VULKAN_HPP_NOEXCEPT : x( x_ ) , y( y_ ) {} @@ -24056,9 +29441,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SampleLocationEXT& operator=( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT + SampleLocationEXT & operator=( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -24074,26 +29459,25 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkSampleLocationEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkSampleLocationEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSampleLocationEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SampleLocationEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SampleLocationEXT const & ) const = default; #else - bool operator==( SampleLocationEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SampleLocationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( x == rhs.x ) - && ( y == rhs.y ); + return ( x == rhs.x ) && ( y == rhs.y ); } - bool operator!=( SampleLocationEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SampleLocationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -24103,15 +29487,18 @@ namespace VULKAN_HPP_NAMESPACE float x = {}; float y = {}; }; - static_assert( sizeof( SampleLocationEXT ) == sizeof( VkSampleLocationEXT ), "struct and wrapper have different size!" ); + static_assert( sizeof( SampleLocationEXT ) == sizeof( VkSampleLocationEXT ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct SampleLocationsInfoEXT { - VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, - VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_ = {}, - uint32_t sampleLocationsCount_ = {}, - const VULKAN_HPP_NAMESPACE::SampleLocationEXT* pSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT( + VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ = + VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_ = {}, + uint32_t sampleLocationsCount_ = {}, + const VULKAN_HPP_NAMESPACE::SampleLocationEXT * pSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT : sampleLocationsPerPixel( sampleLocationsPerPixel_ ) , sampleLocationGridSize( sampleLocationGridSize_ ) , sampleLocationsCount( sampleLocationsCount_ ) @@ -24129,25 +29516,27 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SampleLocationsInfoEXT& operator=( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + SampleLocationsInfoEXT & operator=( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - SampleLocationsInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + SampleLocationsInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - SampleLocationsInfoEXT & setSampleLocationsPerPixel( VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ ) VULKAN_HPP_NOEXCEPT + SampleLocationsInfoEXT & setSampleLocationsPerPixel( + VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ ) VULKAN_HPP_NOEXCEPT { sampleLocationsPerPixel = sampleLocationsPerPixel_; return *this; } - SampleLocationsInfoEXT & setSampleLocationGridSize( VULKAN_HPP_NAMESPACE::Extent2D const & sampleLocationGridSize_ ) VULKAN_HPP_NOEXCEPT + SampleLocationsInfoEXT & + setSampleLocationGridSize( VULKAN_HPP_NAMESPACE::Extent2D const & sampleLocationGridSize_ ) VULKAN_HPP_NOEXCEPT { sampleLocationGridSize = sampleLocationGridSize_; return *this; @@ -24159,56 +29548,57 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - SampleLocationsInfoEXT & setPSampleLocations( const VULKAN_HPP_NAMESPACE::SampleLocationEXT* pSampleLocations_ ) VULKAN_HPP_NOEXCEPT + SampleLocationsInfoEXT & + setPSampleLocations( const VULKAN_HPP_NAMESPACE::SampleLocationEXT * pSampleLocations_ ) VULKAN_HPP_NOEXCEPT { pSampleLocations = pSampleLocations_; return *this; } - operator VkSampleLocationsInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkSampleLocationsInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSampleLocationsInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SampleLocationsInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SampleLocationsInfoEXT const & ) const = default; #else - bool operator==( SampleLocationsInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SampleLocationsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( sampleLocationsPerPixel == rhs.sampleLocationsPerPixel ) - && ( sampleLocationGridSize == rhs.sampleLocationGridSize ) - && ( sampleLocationsCount == rhs.sampleLocationsCount ) - && ( pSampleLocations == rhs.pSampleLocations ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( sampleLocationsPerPixel == rhs.sampleLocationsPerPixel ) && + ( sampleLocationGridSize == rhs.sampleLocationGridSize ) && + ( sampleLocationsCount == rhs.sampleLocationsCount ) && ( pSampleLocations == rhs.pSampleLocations ); } - bool operator!=( SampleLocationsInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SampleLocationsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSampleLocationsInfoEXT; - const void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSampleLocationsInfoEXT; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; - VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize = {}; - uint32_t sampleLocationsCount = {}; - const VULKAN_HPP_NAMESPACE::SampleLocationEXT* pSampleLocations = {}; + VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize = {}; + uint32_t sampleLocationsCount = {}; + const VULKAN_HPP_NAMESPACE::SampleLocationEXT * pSampleLocations = {}; }; - static_assert( sizeof( SampleLocationsInfoEXT ) == sizeof( VkSampleLocationsInfoEXT ), "struct and wrapper have different size!" ); + static_assert( sizeof( SampleLocationsInfoEXT ) == sizeof( VkSampleLocationsInfoEXT ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct AttachmentSampleLocationsEXT { - VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT( uint32_t attachmentIndex_ = {}, - VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT( + uint32_t attachmentIndex_ = {}, + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT : attachmentIndex( attachmentIndex_ ) , sampleLocationsInfo( sampleLocationsInfo_ ) {} @@ -24218,9 +29608,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - AttachmentSampleLocationsEXT& operator=( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + AttachmentSampleLocationsEXT & operator=( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -24230,159 +29620,159 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - AttachmentSampleLocationsEXT & setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT + AttachmentSampleLocationsEXT & setSampleLocationsInfo( + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT { sampleLocationsInfo = sampleLocationsInfo_; return *this; } - operator VkAttachmentSampleLocationsEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkAttachmentSampleLocationsEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAttachmentSampleLocationsEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AttachmentSampleLocationsEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentSampleLocationsEXT const & ) const = default; #else - bool operator==( AttachmentSampleLocationsEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( AttachmentSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( attachmentIndex == rhs.attachmentIndex ) - && ( sampleLocationsInfo == rhs.sampleLocationsInfo ); + return ( attachmentIndex == rhs.attachmentIndex ) && ( sampleLocationsInfo == rhs.sampleLocationsInfo ); } - bool operator!=( AttachmentSampleLocationsEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AttachmentSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint32_t attachmentIndex = {}; + uint32_t attachmentIndex = {}; VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {}; }; - static_assert( sizeof( AttachmentSampleLocationsEXT ) == sizeof( VkAttachmentSampleLocationsEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( AttachmentSampleLocationsEXT ) == sizeof( VkAttachmentSampleLocationsEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct BaseInStructure { - BaseInStructure() VULKAN_HPP_NOEXCEPT - {} + BaseInStructure() VULKAN_HPP_NOEXCEPT {} BaseInStructure( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - BaseInStructure& operator=( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT + BaseInStructure & operator=( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - BaseInStructure & setPNext( const struct VULKAN_HPP_NAMESPACE::BaseInStructure* pNext_ ) VULKAN_HPP_NOEXCEPT + BaseInStructure & setPNext( const struct VULKAN_HPP_NAMESPACE::BaseInStructure * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - operator VkBaseInStructure const&() const VULKAN_HPP_NOEXCEPT + operator VkBaseInStructure const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkBaseInStructure &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BaseInStructure const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BaseInStructure const & ) const = default; #else - bool operator==( BaseInStructure const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BaseInStructure const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ); } - bool operator!=( BaseInStructure const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BaseInStructure const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = {}; - const struct VULKAN_HPP_NAMESPACE::BaseInStructure* pNext = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = {}; + const struct VULKAN_HPP_NAMESPACE::BaseInStructure * pNext = {}; }; static_assert( sizeof( BaseInStructure ) == sizeof( VkBaseInStructure ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct BaseOutStructure { - BaseOutStructure() VULKAN_HPP_NOEXCEPT - {} + BaseOutStructure() VULKAN_HPP_NOEXCEPT {} BaseOutStructure( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - BaseOutStructure& operator=( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT + BaseOutStructure & operator=( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - BaseOutStructure & setPNext( struct VULKAN_HPP_NAMESPACE::BaseOutStructure* pNext_ ) VULKAN_HPP_NOEXCEPT + BaseOutStructure & setPNext( struct VULKAN_HPP_NAMESPACE::BaseOutStructure * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - operator VkBaseOutStructure const&() const VULKAN_HPP_NOEXCEPT + operator VkBaseOutStructure const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkBaseOutStructure &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BaseOutStructure const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BaseOutStructure const & ) const = default; #else - bool operator==( BaseOutStructure const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BaseOutStructure const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ); } - bool operator!=( BaseOutStructure const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BaseOutStructure const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = {}; - struct VULKAN_HPP_NAMESPACE::BaseOutStructure* pNext = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = {}; + struct VULKAN_HPP_NAMESPACE::BaseOutStructure * pNext = {}; }; - static_assert( sizeof( BaseOutStructure ) == sizeof( VkBaseOutStructure ), "struct and wrapper have different size!" ); + static_assert( sizeof( BaseOutStructure ) == sizeof( VkBaseOutStructure ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct BindAccelerationStructureMemoryInfoKHR { - VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ = {}, - VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, - uint32_t deviceIndexCount_ = {}, - const uint32_t* pDeviceIndices_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, + uint32_t deviceIndexCount_ = {}, + const uint32_t * pDeviceIndices_ = {} ) VULKAN_HPP_NOEXCEPT : accelerationStructure( accelerationStructure_ ) , memory( memory_ ) , memoryOffset( memoryOffset_ ) @@ -24390,9 +29780,13 @@ namespace VULKAN_HPP_NAMESPACE , pDeviceIndices( pDeviceIndices_ ) {} - BindAccelerationStructureMemoryInfoKHR & operator=( BindAccelerationStructureMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + BindAccelerationStructureMemoryInfoKHR & + operator=( BindAccelerationStructureMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( BindAccelerationStructureMemoryInfoKHR ) - offsetof( BindAccelerationStructureMemoryInfoKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( BindAccelerationStructureMemoryInfoKHR ) - + offsetof( BindAccelerationStructureMemoryInfoKHR, pNext ) ); return *this; } @@ -24401,19 +29795,21 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - BindAccelerationStructureMemoryInfoKHR& operator=( VkBindAccelerationStructureMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + BindAccelerationStructureMemoryInfoKHR & + operator=( VkBindAccelerationStructureMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - BindAccelerationStructureMemoryInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + BindAccelerationStructureMemoryInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - BindAccelerationStructureMemoryInfoKHR & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + BindAccelerationStructureMemoryInfoKHR & setAccelerationStructure( + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ ) VULKAN_HPP_NOEXCEPT { accelerationStructure = accelerationStructure_; return *this; @@ -24425,7 +29821,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - BindAccelerationStructureMemoryInfoKHR & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT + BindAccelerationStructureMemoryInfoKHR & + setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT { memoryOffset = memoryOffset_; return *this; @@ -24437,65 +29834,66 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - BindAccelerationStructureMemoryInfoKHR & setPDeviceIndices( const uint32_t* pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + BindAccelerationStructureMemoryInfoKHR & setPDeviceIndices( const uint32_t * pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT { pDeviceIndices = pDeviceIndices_; return *this; } - operator VkBindAccelerationStructureMemoryInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkBindAccelerationStructureMemoryInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkBindAccelerationStructureMemoryInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BindAccelerationStructureMemoryInfoKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindAccelerationStructureMemoryInfoKHR const & ) const = default; #else - bool operator==( BindAccelerationStructureMemoryInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BindAccelerationStructureMemoryInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( accelerationStructure == rhs.accelerationStructure ) - && ( memory == rhs.memory ) - && ( memoryOffset == rhs.memoryOffset ) - && ( deviceIndexCount == rhs.deviceIndexCount ) - && ( pDeviceIndices == rhs.pDeviceIndices ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( accelerationStructure == rhs.accelerationStructure ) && ( memory == rhs.memory ) && + ( memoryOffset == rhs.memoryOffset ) && ( deviceIndexCount == rhs.deviceIndexCount ) && + ( pDeviceIndices == rhs.pDeviceIndices ); } - bool operator!=( BindAccelerationStructureMemoryInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BindAccelerationStructureMemoryInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindAccelerationStructureMemoryInfoKHR; - const void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindAccelerationStructureMemoryInfoKHR; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure = {}; - VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; - VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; - uint32_t deviceIndexCount = {}; - const uint32_t* pDeviceIndices = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; + uint32_t deviceIndexCount = {}; + const uint32_t * pDeviceIndices = {}; }; - static_assert( sizeof( BindAccelerationStructureMemoryInfoKHR ) == sizeof( VkBindAccelerationStructureMemoryInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( BindAccelerationStructureMemoryInfoKHR ) == sizeof( VkBindAccelerationStructureMemoryInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct BindBufferMemoryDeviceGroupInfo { - VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = {}, - const uint32_t* pDeviceIndices_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = {}, + const uint32_t * pDeviceIndices_ = {} ) VULKAN_HPP_NOEXCEPT : deviceIndexCount( deviceIndexCount_ ) , pDeviceIndices( pDeviceIndices_ ) {} BindBufferMemoryDeviceGroupInfo & operator=( BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( BindBufferMemoryDeviceGroupInfo ) - offsetof( BindBufferMemoryDeviceGroupInfo, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( BindBufferMemoryDeviceGroupInfo ) - offsetof( BindBufferMemoryDeviceGroupInfo, pNext ) ); return *this; } @@ -24504,13 +29902,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - BindBufferMemoryDeviceGroupInfo& operator=( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT + BindBufferMemoryDeviceGroupInfo & operator=( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - BindBufferMemoryDeviceGroupInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + BindBufferMemoryDeviceGroupInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -24522,52 +29920,52 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - BindBufferMemoryDeviceGroupInfo & setPDeviceIndices( const uint32_t* pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + BindBufferMemoryDeviceGroupInfo & setPDeviceIndices( const uint32_t * pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT { pDeviceIndices = pDeviceIndices_; return *this; } - operator VkBindBufferMemoryDeviceGroupInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkBindBufferMemoryDeviceGroupInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkBindBufferMemoryDeviceGroupInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BindBufferMemoryDeviceGroupInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindBufferMemoryDeviceGroupInfo const & ) const = default; #else - bool operator==( BindBufferMemoryDeviceGroupInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BindBufferMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( deviceIndexCount == rhs.deviceIndexCount ) - && ( pDeviceIndices == rhs.pDeviceIndices ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceIndexCount == rhs.deviceIndexCount ) && + ( pDeviceIndices == rhs.pDeviceIndices ); } - bool operator!=( BindBufferMemoryDeviceGroupInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BindBufferMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindBufferMemoryDeviceGroupInfo; - const void* pNext = {}; - uint32_t deviceIndexCount = {}; - const uint32_t* pDeviceIndices = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindBufferMemoryDeviceGroupInfo; + const void * pNext = {}; + uint32_t deviceIndexCount = {}; + const uint32_t * pDeviceIndices = {}; }; - static_assert( sizeof( BindBufferMemoryDeviceGroupInfo ) == sizeof( VkBindBufferMemoryDeviceGroupInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( BindBufferMemoryDeviceGroupInfo ) == sizeof( VkBindBufferMemoryDeviceGroupInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct BindBufferMemoryInfo { - VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, - VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {} ) VULKAN_HPP_NOEXCEPT : buffer( buffer_ ) , memory( memory_ ) @@ -24585,13 +29983,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - BindBufferMemoryInfo& operator=( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + BindBufferMemoryInfo & operator=( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - BindBufferMemoryInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + BindBufferMemoryInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -24615,48 +30013,45 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkBindBufferMemoryInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkBindBufferMemoryInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkBindBufferMemoryInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BindBufferMemoryInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindBufferMemoryInfo const & ) const = default; #else - bool operator==( BindBufferMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BindBufferMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( buffer == rhs.buffer ) - && ( memory == rhs.memory ) - && ( memoryOffset == rhs.memoryOffset ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ) && ( memory == rhs.memory ) && + ( memoryOffset == rhs.memoryOffset ); } - bool operator!=( BindBufferMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BindBufferMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindBufferMemoryInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Buffer buffer = {}; - VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; - VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindBufferMemoryInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; }; - static_assert( sizeof( BindBufferMemoryInfo ) == sizeof( VkBindBufferMemoryInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( BindBufferMemoryInfo ) == sizeof( VkBindBufferMemoryInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct Offset2D { - VULKAN_HPP_CONSTEXPR Offset2D( int32_t x_ = {}, - int32_t y_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR Offset2D( int32_t x_ = {}, int32_t y_ = {} ) VULKAN_HPP_NOEXCEPT : x( x_ ) , y( y_ ) {} @@ -24666,9 +30061,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - Offset2D& operator=( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT + Offset2D & operator=( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -24684,26 +30079,25 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkOffset2D const&() const VULKAN_HPP_NOEXCEPT + operator VkOffset2D const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkOffset2D &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( Offset2D const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Offset2D const & ) const = default; #else - bool operator==( Offset2D const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( Offset2D const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( x == rhs.x ) - && ( y == rhs.y ); + return ( x == rhs.x ) && ( y == rhs.y ); } - bool operator!=( Offset2D const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( Offset2D const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -24729,9 +30123,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - Rect2D& operator=( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT + Rect2D & operator=( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -24747,26 +30141,25 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkRect2D const&() const VULKAN_HPP_NOEXCEPT + operator VkRect2D const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkRect2D &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( Rect2D const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Rect2D const & ) const = default; #else - bool operator==( Rect2D const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( Rect2D const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( offset == rhs.offset ) - && ( extent == rhs.extent ); + return ( offset == rhs.offset ) && ( extent == rhs.extent ); } - bool operator!=( Rect2D const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( Rect2D const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -24781,10 +30174,11 @@ namespace VULKAN_HPP_NAMESPACE struct BindImageMemoryDeviceGroupInfo { - VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = {}, - const uint32_t* pDeviceIndices_ = {}, - uint32_t splitInstanceBindRegionCount_ = {}, - const VULKAN_HPP_NAMESPACE::Rect2D* pSplitInstanceBindRegions_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo( + uint32_t deviceIndexCount_ = {}, + const uint32_t * pDeviceIndices_ = {}, + uint32_t splitInstanceBindRegionCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D * pSplitInstanceBindRegions_ = {} ) VULKAN_HPP_NOEXCEPT : deviceIndexCount( deviceIndexCount_ ) , pDeviceIndices( pDeviceIndices_ ) , splitInstanceBindRegionCount( splitInstanceBindRegionCount_ ) @@ -24793,7 +30187,9 @@ namespace VULKAN_HPP_NAMESPACE BindImageMemoryDeviceGroupInfo & operator=( BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( BindImageMemoryDeviceGroupInfo ) - offsetof( BindImageMemoryDeviceGroupInfo, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( BindImageMemoryDeviceGroupInfo ) - offsetof( BindImageMemoryDeviceGroupInfo, pNext ) ); return *this; } @@ -24802,13 +30198,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - BindImageMemoryDeviceGroupInfo& operator=( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT + BindImageMemoryDeviceGroupInfo & operator=( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - BindImageMemoryDeviceGroupInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + BindImageMemoryDeviceGroupInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -24820,68 +30216,70 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - BindImageMemoryDeviceGroupInfo & setPDeviceIndices( const uint32_t* pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + BindImageMemoryDeviceGroupInfo & setPDeviceIndices( const uint32_t * pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT { pDeviceIndices = pDeviceIndices_; return *this; } - BindImageMemoryDeviceGroupInfo & setSplitInstanceBindRegionCount( uint32_t splitInstanceBindRegionCount_ ) VULKAN_HPP_NOEXCEPT + BindImageMemoryDeviceGroupInfo & + setSplitInstanceBindRegionCount( uint32_t splitInstanceBindRegionCount_ ) VULKAN_HPP_NOEXCEPT { splitInstanceBindRegionCount = splitInstanceBindRegionCount_; return *this; } - BindImageMemoryDeviceGroupInfo & setPSplitInstanceBindRegions( const VULKAN_HPP_NAMESPACE::Rect2D* pSplitInstanceBindRegions_ ) VULKAN_HPP_NOEXCEPT + BindImageMemoryDeviceGroupInfo & setPSplitInstanceBindRegions( + const VULKAN_HPP_NAMESPACE::Rect2D * pSplitInstanceBindRegions_ ) VULKAN_HPP_NOEXCEPT { pSplitInstanceBindRegions = pSplitInstanceBindRegions_; return *this; } - operator VkBindImageMemoryDeviceGroupInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkBindImageMemoryDeviceGroupInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkBindImageMemoryDeviceGroupInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BindImageMemoryDeviceGroupInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindImageMemoryDeviceGroupInfo const & ) const = default; #else - bool operator==( BindImageMemoryDeviceGroupInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BindImageMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( deviceIndexCount == rhs.deviceIndexCount ) - && ( pDeviceIndices == rhs.pDeviceIndices ) - && ( splitInstanceBindRegionCount == rhs.splitInstanceBindRegionCount ) - && ( pSplitInstanceBindRegions == rhs.pSplitInstanceBindRegions ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceIndexCount == rhs.deviceIndexCount ) && + ( pDeviceIndices == rhs.pDeviceIndices ) && + ( splitInstanceBindRegionCount == rhs.splitInstanceBindRegionCount ) && + ( pSplitInstanceBindRegions == rhs.pSplitInstanceBindRegions ); } - bool operator!=( BindImageMemoryDeviceGroupInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BindImageMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemoryDeviceGroupInfo; - const void* pNext = {}; - uint32_t deviceIndexCount = {}; - const uint32_t* pDeviceIndices = {}; - uint32_t splitInstanceBindRegionCount = {}; - const VULKAN_HPP_NAMESPACE::Rect2D* pSplitInstanceBindRegions = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemoryDeviceGroupInfo; + const void * pNext = {}; + uint32_t deviceIndexCount = {}; + const uint32_t * pDeviceIndices = {}; + uint32_t splitInstanceBindRegionCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D * pSplitInstanceBindRegions = {}; }; - static_assert( sizeof( BindImageMemoryDeviceGroupInfo ) == sizeof( VkBindImageMemoryDeviceGroupInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( BindImageMemoryDeviceGroupInfo ) == sizeof( VkBindImageMemoryDeviceGroupInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct BindImageMemoryInfo { - VULKAN_HPP_CONSTEXPR BindImageMemoryInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, - VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_CONSTEXPR BindImageMemoryInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {} ) VULKAN_HPP_NOEXCEPT : image( image_ ) , memory( memory_ ) @@ -24899,13 +30297,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - BindImageMemoryInfo& operator=( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + BindImageMemoryInfo & operator=( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - BindImageMemoryInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + BindImageMemoryInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -24929,42 +30327,40 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkBindImageMemoryInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkBindImageMemoryInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkBindImageMemoryInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BindImageMemoryInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindImageMemoryInfo const & ) const = default; #else - bool operator==( BindImageMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BindImageMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( image == rhs.image ) - && ( memory == rhs.memory ) - && ( memoryOffset == rhs.memoryOffset ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( memory == rhs.memory ) && + ( memoryOffset == rhs.memoryOffset ); } - bool operator!=( BindImageMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BindImageMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemoryInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Image image = {}; - VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; - VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemoryInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; }; - static_assert( sizeof( BindImageMemoryInfo ) == sizeof( VkBindImageMemoryInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( BindImageMemoryInfo ) == sizeof( VkBindImageMemoryInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct BindImageMemorySwapchainInfoKHR @@ -24977,7 +30373,9 @@ namespace VULKAN_HPP_NAMESPACE BindImageMemorySwapchainInfoKHR & operator=( BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( BindImageMemorySwapchainInfoKHR ) - offsetof( BindImageMemorySwapchainInfoKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( BindImageMemorySwapchainInfoKHR ) - offsetof( BindImageMemorySwapchainInfoKHR, pNext ) ); return *this; } @@ -24986,13 +30384,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - BindImageMemorySwapchainInfoKHR& operator=( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + BindImageMemorySwapchainInfoKHR & operator=( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - BindImageMemorySwapchainInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + BindImageMemorySwapchainInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -25010,45 +30408,47 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkBindImageMemorySwapchainInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkBindImageMemorySwapchainInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkBindImageMemorySwapchainInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BindImageMemorySwapchainInfoKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindImageMemorySwapchainInfoKHR const & ) const = default; #else - bool operator==( BindImageMemorySwapchainInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BindImageMemorySwapchainInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( swapchain == rhs.swapchain ) - && ( imageIndex == rhs.imageIndex ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchain == rhs.swapchain ) && + ( imageIndex == rhs.imageIndex ); } - bool operator!=( BindImageMemorySwapchainInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BindImageMemorySwapchainInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemorySwapchainInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {}; - uint32_t imageIndex = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemorySwapchainInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {}; + uint32_t imageIndex = {}; }; - static_assert( sizeof( BindImageMemorySwapchainInfoKHR ) == sizeof( VkBindImageMemorySwapchainInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( BindImageMemorySwapchainInfoKHR ) == sizeof( VkBindImageMemorySwapchainInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct BindImagePlaneMemoryInfo { - VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + BindImagePlaneMemoryInfo( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = + VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor ) VULKAN_HPP_NOEXCEPT : planeAspect( planeAspect_ ) {} @@ -25063,63 +30463,64 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - BindImagePlaneMemoryInfo& operator=( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + BindImagePlaneMemoryInfo & operator=( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - BindImagePlaneMemoryInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + BindImagePlaneMemoryInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - BindImagePlaneMemoryInfo & setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT + BindImagePlaneMemoryInfo & + setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT { planeAspect = planeAspect_; return *this; } - operator VkBindImagePlaneMemoryInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkBindImagePlaneMemoryInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkBindImagePlaneMemoryInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BindImagePlaneMemoryInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindImagePlaneMemoryInfo const & ) const = default; #else - bool operator==( BindImagePlaneMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BindImagePlaneMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( planeAspect == rhs.planeAspect ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( planeAspect == rhs.planeAspect ); } - bool operator!=( BindImagePlaneMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BindImagePlaneMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImagePlaneMemoryInfo; - const void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImagePlaneMemoryInfo; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor; }; - static_assert( sizeof( BindImagePlaneMemoryInfo ) == sizeof( VkBindImagePlaneMemoryInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( BindImagePlaneMemoryInfo ) == sizeof( VkBindImagePlaneMemoryInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct BindIndexBufferIndirectCommandNV { - VULKAN_HPP_CONSTEXPR BindIndexBufferIndirectCommandNV( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, - uint32_t size_ = {}, - VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR BindIndexBufferIndirectCommandNV( + VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, + uint32_t size_ = {}, + VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16 ) VULKAN_HPP_NOEXCEPT : bufferAddress( bufferAddress_ ) , size( size_ ) , indexType( indexType_ ) @@ -25130,13 +30531,14 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - BindIndexBufferIndirectCommandNV& operator=( VkBindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + BindIndexBufferIndirectCommandNV & operator=( VkBindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - BindIndexBufferIndirectCommandNV & setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT + BindIndexBufferIndirectCommandNV & + setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT { bufferAddress = bufferAddress_; return *this; @@ -25154,27 +30556,25 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkBindIndexBufferIndirectCommandNV const&() const VULKAN_HPP_NOEXCEPT + operator VkBindIndexBufferIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkBindIndexBufferIndirectCommandNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BindIndexBufferIndirectCommandNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindIndexBufferIndirectCommandNV const & ) const = default; #else - bool operator==( BindIndexBufferIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BindIndexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( bufferAddress == rhs.bufferAddress ) - && ( size == rhs.size ) - && ( indexType == rhs.indexType ); + return ( bufferAddress == rhs.bufferAddress ) && ( size == rhs.size ) && ( indexType == rhs.indexType ); } - bool operator!=( BindIndexBufferIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BindIndexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -25182,11 +30582,13 @@ namespace VULKAN_HPP_NAMESPACE public: VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress = {}; - uint32_t size = {}; - VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; + uint32_t size = {}; + VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; }; - static_assert( sizeof( BindIndexBufferIndirectCommandNV ) == sizeof( VkBindIndexBufferIndirectCommandNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( BindIndexBufferIndirectCommandNV ) == sizeof( VkBindIndexBufferIndirectCommandNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct BindShaderGroupIndirectCommandNV { @@ -25199,9 +30601,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - BindShaderGroupIndirectCommandNV& operator=( VkBindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + BindShaderGroupIndirectCommandNV & operator=( VkBindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -25211,25 +30613,25 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkBindShaderGroupIndirectCommandNV const&() const VULKAN_HPP_NOEXCEPT + operator VkBindShaderGroupIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkBindShaderGroupIndirectCommandNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BindShaderGroupIndirectCommandNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindShaderGroupIndirectCommandNV const & ) const = default; #else - bool operator==( BindShaderGroupIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BindShaderGroupIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return ( groupIndex == rhs.groupIndex ); } - bool operator!=( BindShaderGroupIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BindShaderGroupIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -25238,15 +30640,17 @@ namespace VULKAN_HPP_NAMESPACE public: uint32_t groupIndex = {}; }; - static_assert( sizeof( BindShaderGroupIndirectCommandNV ) == sizeof( VkBindShaderGroupIndirectCommandNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( BindShaderGroupIndirectCommandNV ) == sizeof( VkBindShaderGroupIndirectCommandNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct SparseMemoryBind { - VULKAN_HPP_CONSTEXPR SparseMemoryBind( VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, - VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, + VULKAN_HPP_CONSTEXPR SparseMemoryBind( VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT : resourceOffset( resourceOffset_ ) , size( size_ ) @@ -25260,9 +30664,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SparseMemoryBind& operator=( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT + SparseMemoryBind & operator=( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -25296,49 +30700,48 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkSparseMemoryBind const&() const VULKAN_HPP_NOEXCEPT + operator VkSparseMemoryBind const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSparseMemoryBind &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SparseMemoryBind const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseMemoryBind const & ) const = default; #else - bool operator==( SparseMemoryBind const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SparseMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( resourceOffset == rhs.resourceOffset ) - && ( size == rhs.size ) - && ( memory == rhs.memory ) - && ( memoryOffset == rhs.memoryOffset ) - && ( flags == rhs.flags ); + return ( resourceOffset == rhs.resourceOffset ) && ( size == rhs.size ) && ( memory == rhs.memory ) && + ( memoryOffset == rhs.memoryOffset ) && ( flags == rhs.flags ); } - bool operator!=( SparseMemoryBind const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SparseMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset = {}; - VULKAN_HPP_NAMESPACE::DeviceSize size = {}; - VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; - VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; - VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags = {}; + VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; + VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags = {}; }; - static_assert( sizeof( SparseMemoryBind ) == sizeof( VkSparseMemoryBind ), "struct and wrapper have different size!" ); + static_assert( sizeof( SparseMemoryBind ) == sizeof( VkSparseMemoryBind ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct SparseBufferMemoryBindInfo { - VULKAN_HPP_CONSTEXPR SparseBufferMemoryBindInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, - uint32_t bindCount_ = {}, - const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + SparseBufferMemoryBindInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + uint32_t bindCount_ = {}, + const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ = {} ) VULKAN_HPP_NOEXCEPT : buffer( buffer_ ) , bindCount( bindCount_ ) , pBinds( pBinds_ ) @@ -25349,9 +30752,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SparseBufferMemoryBindInfo& operator=( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + SparseBufferMemoryBindInfo & operator=( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -25367,51 +30770,52 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - SparseBufferMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds_ ) VULKAN_HPP_NOEXCEPT + SparseBufferMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ ) VULKAN_HPP_NOEXCEPT { pBinds = pBinds_; return *this; } - operator VkSparseBufferMemoryBindInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkSparseBufferMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSparseBufferMemoryBindInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SparseBufferMemoryBindInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseBufferMemoryBindInfo const & ) const = default; #else - bool operator==( SparseBufferMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SparseBufferMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( buffer == rhs.buffer ) - && ( bindCount == rhs.bindCount ) - && ( pBinds == rhs.pBinds ); + return ( buffer == rhs.buffer ) && ( bindCount == rhs.bindCount ) && ( pBinds == rhs.pBinds ); } - bool operator!=( SparseBufferMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SparseBufferMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::Buffer buffer = {}; - uint32_t bindCount = {}; - const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + uint32_t bindCount = {}; + const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds = {}; }; - static_assert( sizeof( SparseBufferMemoryBindInfo ) == sizeof( VkSparseBufferMemoryBindInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( SparseBufferMemoryBindInfo ) == sizeof( VkSparseBufferMemoryBindInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct SparseImageOpaqueMemoryBindInfo { - VULKAN_HPP_CONSTEXPR SparseImageOpaqueMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, - uint32_t bindCount_ = {}, - const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + SparseImageOpaqueMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, + uint32_t bindCount_ = {}, + const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ = {} ) VULKAN_HPP_NOEXCEPT : image( image_ ) , bindCount( bindCount_ ) , pBinds( pBinds_ ) @@ -25422,9 +30826,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SparseImageOpaqueMemoryBindInfo& operator=( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + SparseImageOpaqueMemoryBindInfo & operator=( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -25440,51 +30844,52 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - SparseImageOpaqueMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds_ ) VULKAN_HPP_NOEXCEPT + SparseImageOpaqueMemoryBindInfo & + setPBinds( const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ ) VULKAN_HPP_NOEXCEPT { pBinds = pBinds_; return *this; } - operator VkSparseImageOpaqueMemoryBindInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkSparseImageOpaqueMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSparseImageOpaqueMemoryBindInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SparseImageOpaqueMemoryBindInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseImageOpaqueMemoryBindInfo const & ) const = default; #else - bool operator==( SparseImageOpaqueMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SparseImageOpaqueMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( image == rhs.image ) - && ( bindCount == rhs.bindCount ) - && ( pBinds == rhs.pBinds ); + return ( image == rhs.image ) && ( bindCount == rhs.bindCount ) && ( pBinds == rhs.pBinds ); } - bool operator!=( SparseImageOpaqueMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SparseImageOpaqueMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::Image image = {}; - uint32_t bindCount = {}; - const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + uint32_t bindCount = {}; + const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds = {}; }; - static_assert( sizeof( SparseImageOpaqueMemoryBindInfo ) == sizeof( VkSparseImageOpaqueMemoryBindInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( SparseImageOpaqueMemoryBindInfo ) == sizeof( VkSparseImageOpaqueMemoryBindInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct ImageSubresource { VULKAN_HPP_CONSTEXPR ImageSubresource( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, - uint32_t mipLevel_ = {}, - uint32_t arrayLayer_ = {} ) VULKAN_HPP_NOEXCEPT + uint32_t mipLevel_ = {}, + uint32_t arrayLayer_ = {} ) VULKAN_HPP_NOEXCEPT : aspectMask( aspectMask_ ) , mipLevel( mipLevel_ ) , arrayLayer( arrayLayer_ ) @@ -25495,9 +30900,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ImageSubresource& operator=( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT + ImageSubresource & operator=( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -25519,27 +30924,25 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkImageSubresource const&() const VULKAN_HPP_NOEXCEPT + operator VkImageSubresource const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkImageSubresource &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageSubresource const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageSubresource const & ) const = default; #else - bool operator==( ImageSubresource const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageSubresource const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( aspectMask == rhs.aspectMask ) - && ( mipLevel == rhs.mipLevel ) - && ( arrayLayer == rhs.arrayLayer ); + return ( aspectMask == rhs.aspectMask ) && ( mipLevel == rhs.mipLevel ) && ( arrayLayer == rhs.arrayLayer ); } - bool operator!=( ImageSubresource const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageSubresource const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -25547,37 +30950,31 @@ namespace VULKAN_HPP_NAMESPACE public: VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; - uint32_t mipLevel = {}; - uint32_t arrayLayer = {}; + uint32_t mipLevel = {}; + uint32_t arrayLayer = {}; }; - static_assert( sizeof( ImageSubresource ) == sizeof( VkImageSubresource ), "struct and wrapper have different size!" ); + static_assert( sizeof( ImageSubresource ) == sizeof( VkImageSubresource ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct Offset3D { - VULKAN_HPP_CONSTEXPR Offset3D( int32_t x_ = {}, - int32_t y_ = {}, - int32_t z_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR Offset3D( int32_t x_ = {}, int32_t y_ = {}, int32_t z_ = {} ) VULKAN_HPP_NOEXCEPT : x( x_ ) , y( y_ ) , z( z_ ) {} - explicit Offset3D( Offset2D const& offset2D, - int32_t z_ = {} ) - : x( offset2D.x ) - , y( offset2D.y ) - , z( z_ ) - {} + explicit Offset3D( Offset2D const & offset2D, int32_t z_ = {} ) : x( offset2D.x ), y( offset2D.y ), z( z_ ) {} Offset3D( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - Offset3D& operator=( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT + Offset3D & operator=( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -25599,27 +30996,25 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkOffset3D const&() const VULKAN_HPP_NOEXCEPT + operator VkOffset3D const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkOffset3D &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( Offset3D const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Offset3D const & ) const = default; #else - bool operator==( Offset3D const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( Offset3D const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( x == rhs.x ) - && ( y == rhs.y ) - && ( z == rhs.z ); + return ( x == rhs.x ) && ( y == rhs.y ) && ( z == rhs.z ); } - bool operator!=( Offset3D const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( Offset3D const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -25635,19 +31030,15 @@ namespace VULKAN_HPP_NAMESPACE struct Extent3D { - VULKAN_HPP_CONSTEXPR Extent3D( uint32_t width_ = {}, - uint32_t height_ = {}, - uint32_t depth_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + Extent3D( uint32_t width_ = {}, uint32_t height_ = {}, uint32_t depth_ = {} ) VULKAN_HPP_NOEXCEPT : width( width_ ) , height( height_ ) , depth( depth_ ) {} - explicit Extent3D( Extent2D const& extent2D, - uint32_t depth_ = {} ) - : width( extent2D.width ) - , height( extent2D.height ) - , depth( depth_ ) + explicit Extent3D( Extent2D const & extent2D, uint32_t depth_ = {} ) + : width( extent2D.width ), height( extent2D.height ), depth( depth_ ) {} Extent3D( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT @@ -25655,9 +31046,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - Extent3D& operator=( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT + Extent3D & operator=( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -25679,48 +31070,47 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkExtent3D const&() const VULKAN_HPP_NOEXCEPT + operator VkExtent3D const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkExtent3D &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( Extent3D const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Extent3D const & ) const = default; #else - bool operator==( Extent3D const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( Extent3D const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( width == rhs.width ) - && ( height == rhs.height ) - && ( depth == rhs.depth ); + return ( width == rhs.width ) && ( height == rhs.height ) && ( depth == rhs.depth ); } - bool operator!=( Extent3D const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( Extent3D const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint32_t width = {}; + uint32_t width = {}; uint32_t height = {}; - uint32_t depth = {}; + uint32_t depth = {}; }; static_assert( sizeof( Extent3D ) == sizeof( VkExtent3D ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct SparseImageMemoryBind { - VULKAN_HPP_CONSTEXPR SparseImageMemoryBind( VULKAN_HPP_NAMESPACE::ImageSubresource subresource_ = {}, - VULKAN_HPP_NAMESPACE::Offset3D offset_ = {}, - VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, - VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, - VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + SparseImageMemoryBind( VULKAN_HPP_NAMESPACE::ImageSubresource subresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D offset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, + VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT : subresource( subresource_ ) , offset( offset_ ) , extent( extent_ ) @@ -25734,13 +31124,14 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SparseImageMemoryBind& operator=( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT + SparseImageMemoryBind & operator=( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - SparseImageMemoryBind & setSubresource( VULKAN_HPP_NAMESPACE::ImageSubresource const & subresource_ ) VULKAN_HPP_NOEXCEPT + SparseImageMemoryBind & + setSubresource( VULKAN_HPP_NAMESPACE::ImageSubresource const & subresource_ ) VULKAN_HPP_NOEXCEPT { subresource = subresource_; return *this; @@ -25776,51 +31167,49 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkSparseImageMemoryBind const&() const VULKAN_HPP_NOEXCEPT + operator VkSparseImageMemoryBind const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSparseImageMemoryBind &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SparseImageMemoryBind const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseImageMemoryBind const & ) const = default; #else - bool operator==( SparseImageMemoryBind const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SparseImageMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( subresource == rhs.subresource ) - && ( offset == rhs.offset ) - && ( extent == rhs.extent ) - && ( memory == rhs.memory ) - && ( memoryOffset == rhs.memoryOffset ) - && ( flags == rhs.flags ); + return ( subresource == rhs.subresource ) && ( offset == rhs.offset ) && ( extent == rhs.extent ) && + ( memory == rhs.memory ) && ( memoryOffset == rhs.memoryOffset ) && ( flags == rhs.flags ); } - bool operator!=( SparseImageMemoryBind const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SparseImageMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::ImageSubresource subresource = {}; - VULKAN_HPP_NAMESPACE::Offset3D offset = {}; - VULKAN_HPP_NAMESPACE::Extent3D extent = {}; - VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; - VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; - VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags = {}; + VULKAN_HPP_NAMESPACE::ImageSubresource subresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D offset = {}; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; + VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags = {}; }; - static_assert( sizeof( SparseImageMemoryBind ) == sizeof( VkSparseImageMemoryBind ), "struct and wrapper have different size!" ); + static_assert( sizeof( SparseImageMemoryBind ) == sizeof( VkSparseImageMemoryBind ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct SparseImageMemoryBindInfo { - VULKAN_HPP_CONSTEXPR SparseImageMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, - uint32_t bindCount_ = {}, - const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind* pBinds_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + SparseImageMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, + uint32_t bindCount_ = {}, + const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind * pBinds_ = {} ) VULKAN_HPP_NOEXCEPT : image( image_ ) , bindCount( bindCount_ ) , pBinds( pBinds_ ) @@ -25831,9 +31220,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SparseImageMemoryBindInfo& operator=( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + SparseImageMemoryBindInfo & operator=( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -25849,58 +31238,60 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - SparseImageMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind* pBinds_ ) VULKAN_HPP_NOEXCEPT + SparseImageMemoryBindInfo & + setPBinds( const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind * pBinds_ ) VULKAN_HPP_NOEXCEPT { pBinds = pBinds_; return *this; } - operator VkSparseImageMemoryBindInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkSparseImageMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSparseImageMemoryBindInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SparseImageMemoryBindInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseImageMemoryBindInfo const & ) const = default; #else - bool operator==( SparseImageMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SparseImageMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( image == rhs.image ) - && ( bindCount == rhs.bindCount ) - && ( pBinds == rhs.pBinds ); + return ( image == rhs.image ) && ( bindCount == rhs.bindCount ) && ( pBinds == rhs.pBinds ); } - bool operator!=( SparseImageMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SparseImageMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::Image image = {}; - uint32_t bindCount = {}; - const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind* pBinds = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + uint32_t bindCount = {}; + const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind * pBinds = {}; }; - static_assert( sizeof( SparseImageMemoryBindInfo ) == sizeof( VkSparseImageMemoryBindInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( SparseImageMemoryBindInfo ) == sizeof( VkSparseImageMemoryBindInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct BindSparseInfo { - VULKAN_HPP_CONSTEXPR BindSparseInfo( uint32_t waitSemaphoreCount_ = {}, - const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ = {}, - uint32_t bufferBindCount_ = {}, - const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo* pBufferBinds_ = {}, - uint32_t imageOpaqueBindCount_ = {}, - const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds_ = {}, - uint32_t imageBindCount_ = {}, - const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo* pImageBinds_ = {}, - uint32_t signalSemaphoreCount_ = {}, - const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + BindSparseInfo( uint32_t waitSemaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ = {}, + uint32_t bufferBindCount_ = {}, + const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo * pBufferBinds_ = {}, + uint32_t imageOpaqueBindCount_ = {}, + const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo * pImageOpaqueBinds_ = {}, + uint32_t imageBindCount_ = {}, + const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo * pImageBinds_ = {}, + uint32_t signalSemaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ = {} ) VULKAN_HPP_NOEXCEPT : waitSemaphoreCount( waitSemaphoreCount_ ) , pWaitSemaphores( pWaitSemaphores_ ) , bufferBindCount( bufferBindCount_ ) @@ -25924,13 +31315,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - BindSparseInfo& operator=( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT + BindSparseInfo & operator=( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - BindSparseInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + BindSparseInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -25942,7 +31333,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - BindSparseInfo & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT + BindSparseInfo & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT { pWaitSemaphores = pWaitSemaphores_; return *this; @@ -25954,7 +31345,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - BindSparseInfo & setPBufferBinds( const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo* pBufferBinds_ ) VULKAN_HPP_NOEXCEPT + BindSparseInfo & + setPBufferBinds( const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo * pBufferBinds_ ) VULKAN_HPP_NOEXCEPT { pBufferBinds = pBufferBinds_; return *this; @@ -25966,7 +31358,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - BindSparseInfo & setPImageOpaqueBinds( const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds_ ) VULKAN_HPP_NOEXCEPT + BindSparseInfo & setPImageOpaqueBinds( + const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo * pImageOpaqueBinds_ ) VULKAN_HPP_NOEXCEPT { pImageOpaqueBinds = pImageOpaqueBinds_; return *this; @@ -25978,7 +31371,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - BindSparseInfo & setPImageBinds( const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo* pImageBinds_ ) VULKAN_HPP_NOEXCEPT + BindSparseInfo & + setPImageBinds( const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo * pImageBinds_ ) VULKAN_HPP_NOEXCEPT { pImageBinds = pImageBinds_; return *this; @@ -25990,60 +31384,55 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - BindSparseInfo & setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT + BindSparseInfo & + setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT { pSignalSemaphores = pSignalSemaphores_; return *this; } - operator VkBindSparseInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkBindSparseInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkBindSparseInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BindSparseInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindSparseInfo const & ) const = default; #else - bool operator==( BindSparseInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) - && ( pWaitSemaphores == rhs.pWaitSemaphores ) - && ( bufferBindCount == rhs.bufferBindCount ) - && ( pBufferBinds == rhs.pBufferBinds ) - && ( imageOpaqueBindCount == rhs.imageOpaqueBindCount ) - && ( pImageOpaqueBinds == rhs.pImageOpaqueBinds ) - && ( imageBindCount == rhs.imageBindCount ) - && ( pImageBinds == rhs.pImageBinds ) - && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) - && ( pSignalSemaphores == rhs.pSignalSemaphores ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) && + ( pWaitSemaphores == rhs.pWaitSemaphores ) && ( bufferBindCount == rhs.bufferBindCount ) && + ( pBufferBinds == rhs.pBufferBinds ) && ( imageOpaqueBindCount == rhs.imageOpaqueBindCount ) && + ( pImageOpaqueBinds == rhs.pImageOpaqueBinds ) && ( imageBindCount == rhs.imageBindCount ) && + ( pImageBinds == rhs.pImageBinds ) && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) && + ( pSignalSemaphores == rhs.pSignalSemaphores ); } - bool operator!=( BindSparseInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindSparseInfo; - const void* pNext = {}; - uint32_t waitSemaphoreCount = {}; - const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores = {}; - uint32_t bufferBindCount = {}; - const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo* pBufferBinds = {}; - uint32_t imageOpaqueBindCount = {}; - const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds = {}; - uint32_t imageBindCount = {}; - const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo* pImageBinds = {}; - uint32_t signalSemaphoreCount = {}; - const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindSparseInfo; + const void * pNext = {}; + uint32_t waitSemaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores = {}; + uint32_t bufferBindCount = {}; + const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo * pBufferBinds = {}; + uint32_t imageOpaqueBindCount = {}; + const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo * pImageOpaqueBinds = {}; + uint32_t imageBindCount = {}; + const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo * pImageBinds = {}; + uint32_t signalSemaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores = {}; }; static_assert( sizeof( BindSparseInfo ) == sizeof( VkBindSparseInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); @@ -26051,7 +31440,7 @@ namespace VULKAN_HPP_NAMESPACE struct BindVertexBufferIndirectCommandNV { VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandNV( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, - uint32_t size_ = {}, + uint32_t size_ = {}, uint32_t stride_ = {} ) VULKAN_HPP_NOEXCEPT : bufferAddress( bufferAddress_ ) , size( size_ ) @@ -26063,13 +31452,14 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - BindVertexBufferIndirectCommandNV& operator=( VkBindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + BindVertexBufferIndirectCommandNV & operator=( VkBindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - BindVertexBufferIndirectCommandNV & setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT + BindVertexBufferIndirectCommandNV & + setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT { bufferAddress = bufferAddress_; return *this; @@ -26087,27 +31477,25 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkBindVertexBufferIndirectCommandNV const&() const VULKAN_HPP_NOEXCEPT + operator VkBindVertexBufferIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkBindVertexBufferIndirectCommandNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BindVertexBufferIndirectCommandNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindVertexBufferIndirectCommandNV const & ) const = default; #else - bool operator==( BindVertexBufferIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BindVertexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( bufferAddress == rhs.bufferAddress ) - && ( size == rhs.size ) - && ( stride == rhs.stride ); + return ( bufferAddress == rhs.bufferAddress ) && ( size == rhs.size ) && ( stride == rhs.stride ); } - bool operator!=( BindVertexBufferIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BindVertexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -26115,17 +31503,19 @@ namespace VULKAN_HPP_NAMESPACE public: VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress = {}; - uint32_t size = {}; - uint32_t stride = {}; + uint32_t size = {}; + uint32_t stride = {}; }; - static_assert( sizeof( BindVertexBufferIndirectCommandNV ) == sizeof( VkBindVertexBufferIndirectCommandNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( BindVertexBufferIndirectCommandNV ) == sizeof( VkBindVertexBufferIndirectCommandNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct BufferCopy { VULKAN_HPP_CONSTEXPR BufferCopy( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT : srcOffset( srcOffset_ ) , dstOffset( dstOffset_ ) , size( size_ ) @@ -26136,9 +31526,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - BufferCopy& operator=( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT + BufferCopy & operator=( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -26160,27 +31550,25 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkBufferCopy const&() const VULKAN_HPP_NOEXCEPT + operator VkBufferCopy const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkBufferCopy &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BufferCopy const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferCopy const & ) const = default; #else - bool operator==( BufferCopy const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BufferCopy const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( srcOffset == rhs.srcOffset ) - && ( dstOffset == rhs.dstOffset ) - && ( size == rhs.size ); + return ( srcOffset == rhs.srcOffset ) && ( dstOffset == rhs.dstOffset ) && ( size == rhs.size ); } - bool operator!=( BufferCopy const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BufferCopy const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -26189,19 +31577,20 @@ namespace VULKAN_HPP_NAMESPACE public: VULKAN_HPP_NAMESPACE::DeviceSize srcOffset = {}; VULKAN_HPP_NAMESPACE::DeviceSize dstOffset = {}; - VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; }; static_assert( sizeof( BufferCopy ) == sizeof( VkBufferCopy ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct BufferCreateInfo { - VULKAN_HPP_CONSTEXPR BufferCreateInfo( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, - VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, - VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, - uint32_t queueFamilyIndexCount_ = {}, - const uint32_t* pQueueFamilyIndices_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + BufferCreateInfo( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, + VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, + uint32_t queueFamilyIndexCount_ = {}, + const uint32_t * pQueueFamilyIndices_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , size( size_ ) , usage( usage_ ) @@ -26221,13 +31610,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - BufferCreateInfo& operator=( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + BufferCreateInfo & operator=( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - BufferCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + BufferCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -26263,65 +31652,65 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - BufferCreateInfo & setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + BufferCreateInfo & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT { pQueueFamilyIndices = pQueueFamilyIndices_; return *this; } - operator VkBufferCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkBufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkBufferCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BufferCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferCreateInfo const & ) const = default; #else - bool operator==( BufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( size == rhs.size ) - && ( usage == rhs.usage ) - && ( sharingMode == rhs.sharingMode ) - && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) - && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( size == rhs.size ) && + ( usage == rhs.usage ) && ( sharingMode == rhs.sharingMode ) && + ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && + ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ); } - bool operator!=( BufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::BufferCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::DeviceSize size = {}; - VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {}; - VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; - uint32_t queueFamilyIndexCount = {}; - const uint32_t* pQueueFamilyIndices = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::BufferCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {}; + VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; + uint32_t queueFamilyIndexCount = {}; + const uint32_t * pQueueFamilyIndices = {}; }; - static_assert( sizeof( BufferCreateInfo ) == sizeof( VkBufferCreateInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( BufferCreateInfo ) == sizeof( VkBufferCreateInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct BufferDeviceAddressCreateInfoEXT { - VULKAN_HPP_CONSTEXPR BufferDeviceAddressCreateInfoEXT( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + BufferDeviceAddressCreateInfoEXT( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) VULKAN_HPP_NOEXCEPT : deviceAddress( deviceAddress_ ) {} BufferDeviceAddressCreateInfoEXT & operator=( BufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( BufferDeviceAddressCreateInfoEXT ) - offsetof( BufferDeviceAddressCreateInfoEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( BufferDeviceAddressCreateInfoEXT ) - offsetof( BufferDeviceAddressCreateInfoEXT, pNext ) ); return *this; } @@ -26330,57 +31719,58 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - BufferDeviceAddressCreateInfoEXT& operator=( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + BufferDeviceAddressCreateInfoEXT & operator=( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - BufferDeviceAddressCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + BufferDeviceAddressCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - BufferDeviceAddressCreateInfoEXT & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT + BufferDeviceAddressCreateInfoEXT & + setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT { deviceAddress = deviceAddress_; return *this; } - operator VkBufferDeviceAddressCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkBufferDeviceAddressCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkBufferDeviceAddressCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BufferDeviceAddressCreateInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferDeviceAddressCreateInfoEXT const & ) const = default; #else - bool operator==( BufferDeviceAddressCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BufferDeviceAddressCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( deviceAddress == rhs.deviceAddress ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceAddress == rhs.deviceAddress ); } - bool operator!=( BufferDeviceAddressCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BufferDeviceAddressCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferDeviceAddressCreateInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferDeviceAddressCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; }; - static_assert( sizeof( BufferDeviceAddressCreateInfoEXT ) == sizeof( VkBufferDeviceAddressCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( BufferDeviceAddressCreateInfoEXT ) == sizeof( VkBufferDeviceAddressCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct BufferDeviceAddressInfo { @@ -26399,13 +31789,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - BufferDeviceAddressInfo& operator=( VkBufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT + BufferDeviceAddressInfo & operator=( VkBufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - BufferDeviceAddressInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + BufferDeviceAddressInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -26417,45 +31807,44 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkBufferDeviceAddressInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkBufferDeviceAddressInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkBufferDeviceAddressInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BufferDeviceAddressInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferDeviceAddressInfo const & ) const = default; #else - bool operator==( BufferDeviceAddressInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BufferDeviceAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( buffer == rhs.buffer ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ); } - bool operator!=( BufferDeviceAddressInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BufferDeviceAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferDeviceAddressInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferDeviceAddressInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; }; - static_assert( sizeof( BufferDeviceAddressInfo ) == sizeof( VkBufferDeviceAddressInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( BufferDeviceAddressInfo ) == sizeof( VkBufferDeviceAddressInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct ImageSubresourceLayers { - VULKAN_HPP_CONSTEXPR ImageSubresourceLayers( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, - uint32_t mipLevel_ = {}, - uint32_t baseArrayLayer_ = {}, + VULKAN_HPP_CONSTEXPR ImageSubresourceLayers( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, + uint32_t mipLevel_ = {}, + uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT : aspectMask( aspectMask_ ) , mipLevel( mipLevel_ ) @@ -26468,9 +31857,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ImageSubresourceLayers& operator=( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT + ImageSubresourceLayers & operator=( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -26498,49 +31887,48 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkImageSubresourceLayers const&() const VULKAN_HPP_NOEXCEPT + operator VkImageSubresourceLayers const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkImageSubresourceLayers &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageSubresourceLayers const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageSubresourceLayers const & ) const = default; #else - bool operator==( ImageSubresourceLayers const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageSubresourceLayers const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( aspectMask == rhs.aspectMask ) - && ( mipLevel == rhs.mipLevel ) - && ( baseArrayLayer == rhs.baseArrayLayer ) - && ( layerCount == rhs.layerCount ); + return ( aspectMask == rhs.aspectMask ) && ( mipLevel == rhs.mipLevel ) && + ( baseArrayLayer == rhs.baseArrayLayer ) && ( layerCount == rhs.layerCount ); } - bool operator!=( ImageSubresourceLayers const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageSubresourceLayers const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; - uint32_t mipLevel = {}; - uint32_t baseArrayLayer = {}; - uint32_t layerCount = {}; + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + uint32_t mipLevel = {}; + uint32_t baseArrayLayer = {}; + uint32_t layerCount = {}; }; - static_assert( sizeof( ImageSubresourceLayers ) == sizeof( VkImageSubresourceLayers ), "struct and wrapper have different size!" ); + static_assert( sizeof( ImageSubresourceLayers ) == sizeof( VkImageSubresourceLayers ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct BufferImageCopy { - VULKAN_HPP_CONSTEXPR BufferImageCopy( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ = {}, - uint32_t bufferRowLength_ = {}, - uint32_t bufferImageHeight_ = {}, - VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, - VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, + VULKAN_HPP_CONSTEXPR BufferImageCopy( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ = {}, + uint32_t bufferRowLength_ = {}, + uint32_t bufferImageHeight_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {} ) VULKAN_HPP_NOEXCEPT : bufferOffset( bufferOffset_ ) , bufferRowLength( bufferRowLength_ ) @@ -26555,9 +31943,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - BufferImageCopy& operator=( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT + BufferImageCopy & operator=( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -26579,7 +31967,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - BufferImageCopy & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT + BufferImageCopy & + setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT { imageSubresource = imageSubresource_; return *this; @@ -26597,55 +31986,52 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkBufferImageCopy const&() const VULKAN_HPP_NOEXCEPT + operator VkBufferImageCopy const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkBufferImageCopy &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BufferImageCopy const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferImageCopy const & ) const = default; #else - bool operator==( BufferImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BufferImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( bufferOffset == rhs.bufferOffset ) - && ( bufferRowLength == rhs.bufferRowLength ) - && ( bufferImageHeight == rhs.bufferImageHeight ) - && ( imageSubresource == rhs.imageSubresource ) - && ( imageOffset == rhs.imageOffset ) - && ( imageExtent == rhs.imageExtent ); + return ( bufferOffset == rhs.bufferOffset ) && ( bufferRowLength == rhs.bufferRowLength ) && + ( bufferImageHeight == rhs.bufferImageHeight ) && ( imageSubresource == rhs.imageSubresource ) && + ( imageOffset == rhs.imageOffset ) && ( imageExtent == rhs.imageExtent ); } - bool operator!=( BufferImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BufferImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset = {}; - uint32_t bufferRowLength = {}; - uint32_t bufferImageHeight = {}; - VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {}; - VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {}; - VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {}; + VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset = {}; + uint32_t bufferRowLength = {}; + uint32_t bufferImageHeight = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {}; }; static_assert( sizeof( BufferImageCopy ) == sizeof( VkBufferImageCopy ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct BufferMemoryBarrier { - VULKAN_HPP_CONSTEXPR BufferMemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, - VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, - uint32_t srcQueueFamilyIndex_ = {}, - uint32_t dstQueueFamilyIndex_ = {}, - VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR BufferMemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, + uint32_t srcQueueFamilyIndex_ = {}, + uint32_t dstQueueFamilyIndex_ = {}, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT : srcAccessMask( srcAccessMask_ ) , dstAccessMask( dstAccessMask_ ) , srcQueueFamilyIndex( srcQueueFamilyIndex_ ) @@ -26666,13 +32052,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - BufferMemoryBarrier& operator=( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT + BufferMemoryBarrier & operator=( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - BufferMemoryBarrier & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + BufferMemoryBarrier & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -26720,50 +32106,46 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkBufferMemoryBarrier const&() const VULKAN_HPP_NOEXCEPT + operator VkBufferMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkBufferMemoryBarrier &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BufferMemoryBarrier const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferMemoryBarrier const & ) const = default; #else - bool operator==( BufferMemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BufferMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( srcAccessMask == rhs.srcAccessMask ) - && ( dstAccessMask == rhs.dstAccessMask ) - && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) - && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) - && ( buffer == rhs.buffer ) - && ( offset == rhs.offset ) - && ( size == rhs.size ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask == rhs.srcAccessMask ) && + ( dstAccessMask == rhs.dstAccessMask ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) && + ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( buffer == rhs.buffer ) && + ( offset == rhs.offset ) && ( size == rhs.size ); } - bool operator!=( BufferMemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BufferMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryBarrier; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; - VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; - uint32_t srcQueueFamilyIndex = {}; - uint32_t dstQueueFamilyIndex = {}; - VULKAN_HPP_NAMESPACE::Buffer buffer = {}; - VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; - VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryBarrier; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; + uint32_t srcQueueFamilyIndex = {}; + uint32_t dstQueueFamilyIndex = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; }; - static_assert( sizeof( BufferMemoryBarrier ) == sizeof( VkBufferMemoryBarrier ), "struct and wrapper have different size!" ); + static_assert( sizeof( BufferMemoryBarrier ) == sizeof( VkBufferMemoryBarrier ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct BufferMemoryRequirementsInfo2 @@ -26774,7 +32156,9 @@ namespace VULKAN_HPP_NAMESPACE BufferMemoryRequirementsInfo2 & operator=( BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( BufferMemoryRequirementsInfo2 ) - offsetof( BufferMemoryRequirementsInfo2, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( BufferMemoryRequirementsInfo2 ) - offsetof( BufferMemoryRequirementsInfo2, pNext ) ); return *this; } @@ -26783,13 +32167,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - BufferMemoryRequirementsInfo2& operator=( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + BufferMemoryRequirementsInfo2 & operator=( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - BufferMemoryRequirementsInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + BufferMemoryRequirementsInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -26801,39 +32185,39 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkBufferMemoryRequirementsInfo2 const&() const VULKAN_HPP_NOEXCEPT + operator VkBufferMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkBufferMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BufferMemoryRequirementsInfo2 const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferMemoryRequirementsInfo2 const & ) const = default; #else - bool operator==( BufferMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BufferMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( buffer == rhs.buffer ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ); } - bool operator!=( BufferMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BufferMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryRequirementsInfo2; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryRequirementsInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; }; - static_assert( sizeof( BufferMemoryRequirementsInfo2 ) == sizeof( VkBufferMemoryRequirementsInfo2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( BufferMemoryRequirementsInfo2 ) == sizeof( VkBufferMemoryRequirementsInfo2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct BufferOpaqueCaptureAddressCreateInfo { @@ -26841,9 +32225,13 @@ namespace VULKAN_HPP_NAMESPACE : opaqueCaptureAddress( opaqueCaptureAddress_ ) {} - BufferOpaqueCaptureAddressCreateInfo & operator=( BufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + BufferOpaqueCaptureAddressCreateInfo & + operator=( BufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( BufferOpaqueCaptureAddressCreateInfo ) - offsetof( BufferOpaqueCaptureAddressCreateInfo, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( BufferOpaqueCaptureAddressCreateInfo ) - + offsetof( BufferOpaqueCaptureAddressCreateInfo, pNext ) ); return *this; } @@ -26852,13 +32240,14 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - BufferOpaqueCaptureAddressCreateInfo& operator=( VkBufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + BufferOpaqueCaptureAddressCreateInfo & + operator=( VkBufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - BufferOpaqueCaptureAddressCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + BufferOpaqueCaptureAddressCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -26870,27 +32259,25 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkBufferOpaqueCaptureAddressCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkBufferOpaqueCaptureAddressCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkBufferOpaqueCaptureAddressCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BufferOpaqueCaptureAddressCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferOpaqueCaptureAddressCreateInfo const & ) const = default; #else - bool operator==( BufferOpaqueCaptureAddressCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BufferOpaqueCaptureAddressCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( opaqueCaptureAddress == rhs.opaqueCaptureAddress ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( opaqueCaptureAddress == rhs.opaqueCaptureAddress ); } - bool operator!=( BufferOpaqueCaptureAddressCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BufferOpaqueCaptureAddressCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -26898,19 +32285,22 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferOpaqueCaptureAddressCreateInfo; - const void* pNext = {}; - uint64_t opaqueCaptureAddress = {}; + const void * pNext = {}; + uint64_t opaqueCaptureAddress = {}; }; - static_assert( sizeof( BufferOpaqueCaptureAddressCreateInfo ) == sizeof( VkBufferOpaqueCaptureAddressCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( BufferOpaqueCaptureAddressCreateInfo ) == sizeof( VkBufferOpaqueCaptureAddressCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct BufferViewCreateInfo { - VULKAN_HPP_CONSTEXPR BufferViewCreateInfo( VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, - VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize range_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + BufferViewCreateInfo( VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize range_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , buffer( buffer_ ) , format( format_ ) @@ -26929,13 +32319,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - BufferViewCreateInfo& operator=( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + BufferViewCreateInfo & operator=( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - BufferViewCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + BufferViewCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -26971,57 +32361,56 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkBufferViewCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkBufferViewCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkBufferViewCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BufferViewCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferViewCreateInfo const & ) const = default; #else - bool operator==( BufferViewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BufferViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( buffer == rhs.buffer ) - && ( format == rhs.format ) - && ( offset == rhs.offset ) - && ( range == rhs.range ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( buffer == rhs.buffer ) && + ( format == rhs.format ) && ( offset == rhs.offset ) && ( range == rhs.range ); } - bool operator!=( BufferViewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BufferViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferViewCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::Buffer buffer = {}; - VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; - VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; - VULKAN_HPP_NAMESPACE::DeviceSize range = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferViewCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize range = {}; }; - static_assert( sizeof( BufferViewCreateInfo ) == sizeof( VkBufferViewCreateInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( BufferViewCreateInfo ) == sizeof( VkBufferViewCreateInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct CalibratedTimestampInfoEXT { - VULKAN_HPP_CONSTEXPR CalibratedTimestampInfoEXT( VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain_ = VULKAN_HPP_NAMESPACE::TimeDomainEXT::eDevice ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + CalibratedTimestampInfoEXT( VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain_ = + VULKAN_HPP_NAMESPACE::TimeDomainEXT::eDevice ) VULKAN_HPP_NOEXCEPT : timeDomain( timeDomain_ ) {} CalibratedTimestampInfoEXT & operator=( CalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( CalibratedTimestampInfoEXT ) - offsetof( CalibratedTimestampInfoEXT, pNext ) ); + memcpy( + &pNext, &rhs.pNext, sizeof( CalibratedTimestampInfoEXT ) - offsetof( CalibratedTimestampInfoEXT, pNext ) ); return *this; } @@ -27030,13 +32419,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - CalibratedTimestampInfoEXT& operator=( VkCalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + CalibratedTimestampInfoEXT & operator=( VkCalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - CalibratedTimestampInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + CalibratedTimestampInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -27048,44 +32437,45 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkCalibratedTimestampInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkCalibratedTimestampInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkCalibratedTimestampInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( CalibratedTimestampInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CalibratedTimestampInfoEXT const & ) const = default; #else - bool operator==( CalibratedTimestampInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CalibratedTimestampInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( timeDomain == rhs.timeDomain ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( timeDomain == rhs.timeDomain ); } - bool operator!=( CalibratedTimestampInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CalibratedTimestampInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCalibratedTimestampInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain = VULKAN_HPP_NAMESPACE::TimeDomainEXT::eDevice; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCalibratedTimestampInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain = VULKAN_HPP_NAMESPACE::TimeDomainEXT::eDevice; }; - static_assert( sizeof( CalibratedTimestampInfoEXT ) == sizeof( VkCalibratedTimestampInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( CalibratedTimestampInfoEXT ) == sizeof( VkCalibratedTimestampInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct CheckpointDataNV { - VULKAN_HPP_CONSTEXPR CheckpointDataNV( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe, - void* pCheckpointMarker_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR CheckpointDataNV( + VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe, + void * pCheckpointMarker_ = {} ) VULKAN_HPP_NOEXCEPT : stage( stage_ ) , pCheckpointMarker( pCheckpointMarker_ ) {} @@ -27101,110 +32491,103 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - CheckpointDataNV& operator=( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + CheckpointDataNV & operator=( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkCheckpointDataNV const&() const VULKAN_HPP_NOEXCEPT + operator VkCheckpointDataNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkCheckpointDataNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( CheckpointDataNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CheckpointDataNV const & ) const = default; #else - bool operator==( CheckpointDataNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CheckpointDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( stage == rhs.stage ) - && ( pCheckpointMarker == rhs.pCheckpointMarker ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stage == rhs.stage ) && + ( pCheckpointMarker == rhs.pCheckpointMarker ); } - bool operator!=( CheckpointDataNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CheckpointDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCheckpointDataNV; - void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCheckpointDataNV; + void * pNext = {}; VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe; - void* pCheckpointMarker = {}; + void * pCheckpointMarker = {}; }; - static_assert( sizeof( CheckpointDataNV ) == sizeof( VkCheckpointDataNV ), "struct and wrapper have different size!" ); + static_assert( sizeof( CheckpointDataNV ) == sizeof( VkCheckpointDataNV ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); union ClearColorValue { - ClearColorValue( VULKAN_HPP_NAMESPACE::ClearColorValue const& rhs ) VULKAN_HPP_NOEXCEPT + ClearColorValue( VULKAN_HPP_NAMESPACE::ClearColorValue const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearColorValue ) ); + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearColorValue ) ); } - ClearColorValue( const std::array& float32_ = {} ) - : float32( float32_ ) - {} + ClearColorValue( const std::array & float32_ = {} ) : float32( float32_ ) {} - ClearColorValue( const std::array& int32_ ) - : int32( int32_ ) - {} + ClearColorValue( const std::array & int32_ ) : int32( int32_ ) {} - ClearColorValue( const std::array& uint32_ ) - : uint32( uint32_ ) - {} + ClearColorValue( const std::array & uint32_ ) : uint32( uint32_ ) {} - ClearColorValue & setFloat32( std::array float32_ ) VULKAN_HPP_NOEXCEPT + ClearColorValue & setFloat32( std::array float32_ ) VULKAN_HPP_NOEXCEPT { float32 = float32_; return *this; } - ClearColorValue & setInt32( std::array int32_ ) VULKAN_HPP_NOEXCEPT + ClearColorValue & setInt32( std::array int32_ ) VULKAN_HPP_NOEXCEPT { int32 = int32_; return *this; } - ClearColorValue & setUint32( std::array uint32_ ) VULKAN_HPP_NOEXCEPT + ClearColorValue & setUint32( std::array uint32_ ) VULKAN_HPP_NOEXCEPT { uint32 = uint32_; return *this; } - VULKAN_HPP_NAMESPACE::ClearColorValue & operator=( VULKAN_HPP_NAMESPACE::ClearColorValue const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::ClearColorValue & + operator=( VULKAN_HPP_NAMESPACE::ClearColorValue const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearColorValue ) ); + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearColorValue ) ); return *this; } - operator VkClearColorValue const&() const + operator VkClearColorValue const &() const { - return *reinterpret_cast(this); + return *reinterpret_cast( this ); } operator VkClearColorValue &() { - return *reinterpret_cast(this); + return *reinterpret_cast( this ); } - VULKAN_HPP_NAMESPACE::ArrayWrapper1D float32; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D int32; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D float32; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D int32; VULKAN_HPP_NAMESPACE::ArrayWrapper1D uint32; }; struct ClearDepthStencilValue { - VULKAN_HPP_CONSTEXPR ClearDepthStencilValue( float depth_ = {}, - uint32_t stencil_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ClearDepthStencilValue( float depth_ = {}, uint32_t stencil_ = {} ) VULKAN_HPP_NOEXCEPT : depth( depth_ ) , stencil( stencil_ ) {} @@ -27214,9 +32597,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ClearDepthStencilValue& operator=( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT + ClearDepthStencilValue & operator=( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -27232,52 +32615,48 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkClearDepthStencilValue const&() const VULKAN_HPP_NOEXCEPT + operator VkClearDepthStencilValue const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkClearDepthStencilValue &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ClearDepthStencilValue const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ClearDepthStencilValue const & ) const = default; #else - bool operator==( ClearDepthStencilValue const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ClearDepthStencilValue const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( depth == rhs.depth ) - && ( stencil == rhs.stencil ); + return ( depth == rhs.depth ) && ( stencil == rhs.stencil ); } - bool operator!=( ClearDepthStencilValue const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ClearDepthStencilValue const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - float depth = {}; + float depth = {}; uint32_t stencil = {}; }; - static_assert( sizeof( ClearDepthStencilValue ) == sizeof( VkClearDepthStencilValue ), "struct and wrapper have different size!" ); + static_assert( sizeof( ClearDepthStencilValue ) == sizeof( VkClearDepthStencilValue ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); union ClearValue { - ClearValue( VULKAN_HPP_NAMESPACE::ClearValue const& rhs ) VULKAN_HPP_NOEXCEPT + ClearValue( VULKAN_HPP_NAMESPACE::ClearValue const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearValue ) ); + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearValue ) ); } - ClearValue( VULKAN_HPP_NAMESPACE::ClearColorValue color_ = {} ) - : color( color_ ) - {} + ClearValue( VULKAN_HPP_NAMESPACE::ClearColorValue color_ = {} ) : color( color_ ) {} - ClearValue( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil_ ) - : depthStencil( depthStencil_ ) - {} + ClearValue( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil_ ) : depthStencil( depthStencil_ ) {} ClearValue & setColor( VULKAN_HPP_NAMESPACE::ClearColorValue const & color_ ) VULKAN_HPP_NOEXCEPT { @@ -27285,7 +32664,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - ClearValue & setDepthStencil( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue const & depthStencil_ ) VULKAN_HPP_NOEXCEPT + ClearValue & + setDepthStencil( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue const & depthStencil_ ) VULKAN_HPP_NOEXCEPT { depthStencil = depthStencil_; return *this; @@ -27293,34 +32673,34 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ClearValue & operator=( VULKAN_HPP_NAMESPACE::ClearValue const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearValue ) ); + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearValue ) ); return *this; } - operator VkClearValue const&() const + operator VkClearValue const &() const { - return *reinterpret_cast(this); + return *reinterpret_cast( this ); } operator VkClearValue &() { - return *reinterpret_cast(this); + return *reinterpret_cast( this ); } #ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS - VULKAN_HPP_NAMESPACE::ClearColorValue color; + VULKAN_HPP_NAMESPACE::ClearColorValue color; VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil; #else - VkClearColorValue color; + VkClearColorValue color; VkClearDepthStencilValue depthStencil; -#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ }; struct ClearAttachment { - ClearAttachment( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, - uint32_t colorAttachment_ = {}, - VULKAN_HPP_NAMESPACE::ClearValue clearValue_ = {} ) VULKAN_HPP_NOEXCEPT + ClearAttachment( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, + uint32_t colorAttachment_ = {}, + VULKAN_HPP_NAMESPACE::ClearValue clearValue_ = {} ) VULKAN_HPP_NOEXCEPT : aspectMask( aspectMask_ ) , colorAttachment( colorAttachment_ ) , clearValue( clearValue_ ) @@ -27331,9 +32711,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ClearAttachment& operator=( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT + ClearAttachment & operator=( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -27355,29 +32735,29 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkClearAttachment const&() const VULKAN_HPP_NOEXCEPT + operator VkClearAttachment const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkClearAttachment &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } public: - VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; - uint32_t colorAttachment = {}; - VULKAN_HPP_NAMESPACE::ClearValue clearValue = {}; + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + uint32_t colorAttachment = {}; + VULKAN_HPP_NAMESPACE::ClearValue clearValue = {}; }; static_assert( sizeof( ClearAttachment ) == sizeof( VkClearAttachment ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct ClearRect { - VULKAN_HPP_CONSTEXPR ClearRect( VULKAN_HPP_NAMESPACE::Rect2D rect_ = {}, - uint32_t baseArrayLayer_ = {}, - uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ClearRect( VULKAN_HPP_NAMESPACE::Rect2D rect_ = {}, + uint32_t baseArrayLayer_ = {}, + uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT : rect( rect_ ) , baseArrayLayer( baseArrayLayer_ ) , layerCount( layerCount_ ) @@ -27388,9 +32768,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ClearRect& operator=( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT + ClearRect & operator=( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -27412,45 +32792,42 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkClearRect const&() const VULKAN_HPP_NOEXCEPT + operator VkClearRect const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkClearRect &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ClearRect const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ClearRect const & ) const = default; #else - bool operator==( ClearRect const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ClearRect const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( rect == rhs.rect ) - && ( baseArrayLayer == rhs.baseArrayLayer ) - && ( layerCount == rhs.layerCount ); + return ( rect == rhs.rect ) && ( baseArrayLayer == rhs.baseArrayLayer ) && ( layerCount == rhs.layerCount ); } - bool operator!=( ClearRect const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ClearRect const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::Rect2D rect = {}; - uint32_t baseArrayLayer = {}; - uint32_t layerCount = {}; + VULKAN_HPP_NAMESPACE::Rect2D rect = {}; + uint32_t baseArrayLayer = {}; + uint32_t layerCount = {}; }; static_assert( sizeof( ClearRect ) == sizeof( VkClearRect ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct CoarseSampleLocationNV { - VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV( uint32_t pixelX_ = {}, - uint32_t pixelY_ = {}, - uint32_t sample_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + CoarseSampleLocationNV( uint32_t pixelX_ = {}, uint32_t pixelY_ = {}, uint32_t sample_ = {} ) VULKAN_HPP_NOEXCEPT : pixelX( pixelX_ ) , pixelY( pixelY_ ) , sample( sample_ ) @@ -27461,9 +32838,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - CoarseSampleLocationNV& operator=( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT + CoarseSampleLocationNV & operator=( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -27485,27 +32862,25 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkCoarseSampleLocationNV const&() const VULKAN_HPP_NOEXCEPT + operator VkCoarseSampleLocationNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkCoarseSampleLocationNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( CoarseSampleLocationNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CoarseSampleLocationNV const & ) const = default; #else - bool operator==( CoarseSampleLocationNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CoarseSampleLocationNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( pixelX == rhs.pixelX ) - && ( pixelY == rhs.pixelY ) - && ( sample == rhs.sample ); + return ( pixelX == rhs.pixelX ) && ( pixelY == rhs.pixelY ) && ( sample == rhs.sample ); } - bool operator!=( CoarseSampleLocationNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CoarseSampleLocationNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -27516,15 +32891,18 @@ namespace VULKAN_HPP_NAMESPACE uint32_t pixelY = {}; uint32_t sample = {}; }; - static_assert( sizeof( CoarseSampleLocationNV ) == sizeof( VkCoarseSampleLocationNV ), "struct and wrapper have different size!" ); + static_assert( sizeof( CoarseSampleLocationNV ) == sizeof( VkCoarseSampleLocationNV ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct CoarseSampleOrderCustomNV { - VULKAN_HPP_CONSTEXPR CoarseSampleOrderCustomNV( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ = VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations, - uint32_t sampleCount_ = {}, - uint32_t sampleLocationCount_ = {}, - const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV* pSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR CoarseSampleOrderCustomNV( + VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ = + VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations, + uint32_t sampleCount_ = {}, + uint32_t sampleLocationCount_ = {}, + const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * pSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT : shadingRate( shadingRate_ ) , sampleCount( sampleCount_ ) , sampleLocationCount( sampleLocationCount_ ) @@ -27536,13 +32914,14 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - CoarseSampleOrderCustomNV& operator=( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT + CoarseSampleOrderCustomNV & operator=( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - CoarseSampleOrderCustomNV & setShadingRate( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ ) VULKAN_HPP_NOEXCEPT + CoarseSampleOrderCustomNV & + setShadingRate( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ ) VULKAN_HPP_NOEXCEPT { shadingRate = shadingRate_; return *this; @@ -27560,53 +32939,56 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - CoarseSampleOrderCustomNV & setPSampleLocations( const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV* pSampleLocations_ ) VULKAN_HPP_NOEXCEPT + CoarseSampleOrderCustomNV & + setPSampleLocations( const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * pSampleLocations_ ) VULKAN_HPP_NOEXCEPT { pSampleLocations = pSampleLocations_; return *this; } - operator VkCoarseSampleOrderCustomNV const&() const VULKAN_HPP_NOEXCEPT + operator VkCoarseSampleOrderCustomNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkCoarseSampleOrderCustomNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( CoarseSampleOrderCustomNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CoarseSampleOrderCustomNV const & ) const = default; #else - bool operator==( CoarseSampleOrderCustomNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CoarseSampleOrderCustomNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( shadingRate == rhs.shadingRate ) - && ( sampleCount == rhs.sampleCount ) - && ( sampleLocationCount == rhs.sampleLocationCount ) - && ( pSampleLocations == rhs.pSampleLocations ); + return ( shadingRate == rhs.shadingRate ) && ( sampleCount == rhs.sampleCount ) && + ( sampleLocationCount == rhs.sampleLocationCount ) && ( pSampleLocations == rhs.pSampleLocations ); } - bool operator!=( CoarseSampleOrderCustomNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CoarseSampleOrderCustomNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate = VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations; - uint32_t sampleCount = {}; - uint32_t sampleLocationCount = {}; - const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV* pSampleLocations = {}; + VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate = + VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations; + uint32_t sampleCount = {}; + uint32_t sampleLocationCount = {}; + const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * pSampleLocations = {}; }; - static_assert( sizeof( CoarseSampleOrderCustomNV ) == sizeof( VkCoarseSampleOrderCustomNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( CoarseSampleOrderCustomNV ) == sizeof( VkCoarseSampleOrderCustomNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct CommandBufferAllocateInfo { - VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo( VULKAN_HPP_NAMESPACE::CommandPool commandPool_ = {}, - VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary, - uint32_t commandBufferCount_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo( + VULKAN_HPP_NAMESPACE::CommandPool commandPool_ = {}, + VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary, + uint32_t commandBufferCount_ = {} ) VULKAN_HPP_NOEXCEPT : commandPool( commandPool_ ) , level( level_ ) , commandBufferCount( commandBufferCount_ ) @@ -27623,13 +33005,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - CommandBufferAllocateInfo& operator=( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + CommandBufferAllocateInfo & operator=( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - CommandBufferAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + CommandBufferAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -27653,52 +33035,52 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkCommandBufferAllocateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkCommandBufferAllocateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkCommandBufferAllocateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( CommandBufferAllocateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferAllocateInfo const & ) const = default; #else - bool operator==( CommandBufferAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CommandBufferAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( commandPool == rhs.commandPool ) - && ( level == rhs.level ) - && ( commandBufferCount == rhs.commandBufferCount ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( commandPool == rhs.commandPool ) && + ( level == rhs.level ) && ( commandBufferCount == rhs.commandBufferCount ); } - bool operator!=( CommandBufferAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CommandBufferAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferAllocateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::CommandPool commandPool = {}; - VULKAN_HPP_NAMESPACE::CommandBufferLevel level = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary; - uint32_t commandBufferCount = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferAllocateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CommandPool commandPool = {}; + VULKAN_HPP_NAMESPACE::CommandBufferLevel level = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary; + uint32_t commandBufferCount = {}; }; - static_assert( sizeof( CommandBufferAllocateInfo ) == sizeof( VkCommandBufferAllocateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( CommandBufferAllocateInfo ) == sizeof( VkCommandBufferAllocateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct CommandBufferInheritanceInfo { - VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, - uint32_t subpass_ = {}, - VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ = {}, - VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ = {}, - VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo( + VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, + uint32_t subpass_ = {}, + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ = {}, + VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ = {}, + VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {} ) VULKAN_HPP_NOEXCEPT : renderPass( renderPass_ ) , subpass( subpass_ ) , framebuffer( framebuffer_ ) @@ -27709,7 +33091,8 @@ namespace VULKAN_HPP_NAMESPACE CommandBufferInheritanceInfo & operator=( CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( CommandBufferInheritanceInfo ) - offsetof( CommandBufferInheritanceInfo, pNext ) ); + memcpy( + &pNext, &rhs.pNext, sizeof( CommandBufferInheritanceInfo ) - offsetof( CommandBufferInheritanceInfo, pNext ) ); return *this; } @@ -27718,13 +33101,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - CommandBufferInheritanceInfo& operator=( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT + CommandBufferInheritanceInfo & operator=( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - CommandBufferInheritanceInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + CommandBufferInheritanceInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -27748,72 +33131,74 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - CommandBufferInheritanceInfo & setOcclusionQueryEnable( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ ) VULKAN_HPP_NOEXCEPT + CommandBufferInheritanceInfo & + setOcclusionQueryEnable( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ ) VULKAN_HPP_NOEXCEPT { occlusionQueryEnable = occlusionQueryEnable_; return *this; } - CommandBufferInheritanceInfo & setQueryFlags( VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ ) VULKAN_HPP_NOEXCEPT + CommandBufferInheritanceInfo & + setQueryFlags( VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ ) VULKAN_HPP_NOEXCEPT { queryFlags = queryFlags_; return *this; } - CommandBufferInheritanceInfo & setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT + CommandBufferInheritanceInfo & + setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT { pipelineStatistics = pipelineStatistics_; return *this; } - operator VkCommandBufferInheritanceInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkCommandBufferInheritanceInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkCommandBufferInheritanceInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( CommandBufferInheritanceInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferInheritanceInfo const & ) const = default; #else - bool operator==( CommandBufferInheritanceInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CommandBufferInheritanceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( renderPass == rhs.renderPass ) - && ( subpass == rhs.subpass ) - && ( framebuffer == rhs.framebuffer ) - && ( occlusionQueryEnable == rhs.occlusionQueryEnable ) - && ( queryFlags == rhs.queryFlags ) - && ( pipelineStatistics == rhs.pipelineStatistics ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( renderPass == rhs.renderPass ) && + ( subpass == rhs.subpass ) && ( framebuffer == rhs.framebuffer ) && + ( occlusionQueryEnable == rhs.occlusionQueryEnable ) && ( queryFlags == rhs.queryFlags ) && + ( pipelineStatistics == rhs.pipelineStatistics ); } - bool operator!=( CommandBufferInheritanceInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CommandBufferInheritanceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; - uint32_t subpass = {}; - VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {}; - VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable = {}; - VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags = {}; - VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; + uint32_t subpass = {}; + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {}; + VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable = {}; + VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags = {}; + VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {}; }; - static_assert( sizeof( CommandBufferInheritanceInfo ) == sizeof( VkCommandBufferInheritanceInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( CommandBufferInheritanceInfo ) == sizeof( VkCommandBufferInheritanceInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct CommandBufferBeginInfo { - VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo( VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ = {}, - const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo* pInheritanceInfo_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo( + VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ = {}, + const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , pInheritanceInfo( pInheritanceInfo_ ) {} @@ -27829,13 +33214,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - CommandBufferBeginInfo& operator=( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + CommandBufferBeginInfo & operator=( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - CommandBufferBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + CommandBufferBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -27847,195 +33232,221 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - CommandBufferBeginInfo & setPInheritanceInfo( const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo* pInheritanceInfo_ ) VULKAN_HPP_NOEXCEPT + CommandBufferBeginInfo & setPInheritanceInfo( + const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo_ ) VULKAN_HPP_NOEXCEPT { pInheritanceInfo = pInheritanceInfo_; return *this; } - operator VkCommandBufferBeginInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkCommandBufferBeginInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( CommandBufferBeginInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferBeginInfo const & ) const = default; #else - bool operator==( CommandBufferBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( pInheritanceInfo == rhs.pInheritanceInfo ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( pInheritanceInfo == rhs.pInheritanceInfo ); } - bool operator!=( CommandBufferBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferBeginInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags = {}; - const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo* pInheritanceInfo = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferBeginInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags = {}; + const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo = {}; }; - static_assert( sizeof( CommandBufferBeginInfo ) == sizeof( VkCommandBufferBeginInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( CommandBufferBeginInfo ) == sizeof( VkCommandBufferBeginInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct CommandBufferInheritanceConditionalRenderingInfoEXT { - VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT( + VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ = {} ) VULKAN_HPP_NOEXCEPT : conditionalRenderingEnable( conditionalRenderingEnable_ ) {} - CommandBufferInheritanceConditionalRenderingInfoEXT & operator=( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + CommandBufferInheritanceConditionalRenderingInfoEXT & + operator=( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( CommandBufferInheritanceConditionalRenderingInfoEXT ) - offsetof( CommandBufferInheritanceConditionalRenderingInfoEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( CommandBufferInheritanceConditionalRenderingInfoEXT ) - + offsetof( CommandBufferInheritanceConditionalRenderingInfoEXT, pNext ) ); return *this; } - CommandBufferInheritanceConditionalRenderingInfoEXT( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + CommandBufferInheritanceConditionalRenderingInfoEXT( + VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - CommandBufferInheritanceConditionalRenderingInfoEXT& operator=( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + CommandBufferInheritanceConditionalRenderingInfoEXT & + operator=( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = + *reinterpret_cast( &rhs ); return *this; } - CommandBufferInheritanceConditionalRenderingInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + CommandBufferInheritanceConditionalRenderingInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - CommandBufferInheritanceConditionalRenderingInfoEXT & setConditionalRenderingEnable( VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ ) VULKAN_HPP_NOEXCEPT + CommandBufferInheritanceConditionalRenderingInfoEXT & + setConditionalRenderingEnable( VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ ) VULKAN_HPP_NOEXCEPT { conditionalRenderingEnable = conditionalRenderingEnable_; return *this; } - operator VkCommandBufferInheritanceConditionalRenderingInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkCommandBufferInheritanceConditionalRenderingInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkCommandBufferInheritanceConditionalRenderingInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( CommandBufferInheritanceConditionalRenderingInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferInheritanceConditionalRenderingInfoEXT const & ) const = default; #else - bool operator==( CommandBufferInheritanceConditionalRenderingInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( conditionalRenderingEnable == rhs.conditionalRenderingEnable ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( conditionalRenderingEnable == rhs.conditionalRenderingEnable ); } - bool operator!=( CommandBufferInheritanceConditionalRenderingInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT; - const void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = + StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable = {}; }; - static_assert( sizeof( CommandBufferInheritanceConditionalRenderingInfoEXT ) == sizeof( VkCommandBufferInheritanceConditionalRenderingInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( CommandBufferInheritanceConditionalRenderingInfoEXT ) == + sizeof( VkCommandBufferInheritanceConditionalRenderingInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct CommandBufferInheritanceRenderPassTransformInfoQCOM { - VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderPassTransformInfoQCOM( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, - VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderPassTransformInfoQCOM( + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {} ) VULKAN_HPP_NOEXCEPT : transform( transform_ ) , renderArea( renderArea_ ) {} - CommandBufferInheritanceRenderPassTransformInfoQCOM & operator=( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + CommandBufferInheritanceRenderPassTransformInfoQCOM & + operator=( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( CommandBufferInheritanceRenderPassTransformInfoQCOM ) - offsetof( CommandBufferInheritanceRenderPassTransformInfoQCOM, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( CommandBufferInheritanceRenderPassTransformInfoQCOM ) - + offsetof( CommandBufferInheritanceRenderPassTransformInfoQCOM, pNext ) ); return *this; } - CommandBufferInheritanceRenderPassTransformInfoQCOM( VkCommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + CommandBufferInheritanceRenderPassTransformInfoQCOM( + VkCommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - CommandBufferInheritanceRenderPassTransformInfoQCOM& operator=( VkCommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + CommandBufferInheritanceRenderPassTransformInfoQCOM & + operator=( VkCommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = + *reinterpret_cast( &rhs ); return *this; } - CommandBufferInheritanceRenderPassTransformInfoQCOM & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + CommandBufferInheritanceRenderPassTransformInfoQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - CommandBufferInheritanceRenderPassTransformInfoQCOM & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT + CommandBufferInheritanceRenderPassTransformInfoQCOM & + setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT { transform = transform_; return *this; } - CommandBufferInheritanceRenderPassTransformInfoQCOM & setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT + CommandBufferInheritanceRenderPassTransformInfoQCOM & + setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT { renderArea = renderArea_; return *this; } - operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM const&() const VULKAN_HPP_NOEXCEPT + operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( CommandBufferInheritanceRenderPassTransformInfoQCOM const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferInheritanceRenderPassTransformInfoQCOM const & ) const = default; #else - bool operator==( CommandBufferInheritanceRenderPassTransformInfoQCOM const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( transform == rhs.transform ) - && ( renderArea == rhs.renderArea ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transform == rhs.transform ) && + ( renderArea == rhs.renderArea ); } - bool operator!=( CommandBufferInheritanceRenderPassTransformInfoQCOM const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + const VULKAN_HPP_NAMESPACE::StructureType sType = + StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; VULKAN_HPP_NAMESPACE::Rect2D renderArea = {}; }; - static_assert( sizeof( CommandBufferInheritanceRenderPassTransformInfoQCOM ) == sizeof( VkCommandBufferInheritanceRenderPassTransformInfoQCOM ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( CommandBufferInheritanceRenderPassTransformInfoQCOM ) == + sizeof( VkCommandBufferInheritanceRenderPassTransformInfoQCOM ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct CommandPoolCreateInfo { @@ -28056,13 +33467,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - CommandPoolCreateInfo& operator=( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + CommandPoolCreateInfo & operator=( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - CommandPoolCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + CommandPoolCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -28080,47 +33491,45 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkCommandPoolCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkCommandPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkCommandPoolCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( CommandPoolCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandPoolCreateInfo const & ) const = default; #else - bool operator==( CommandPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CommandPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( queueFamilyIndex == rhs.queueFamilyIndex ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( queueFamilyIndex == rhs.queueFamilyIndex ); } - bool operator!=( CommandPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CommandPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandPoolCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags = {}; - uint32_t queueFamilyIndex = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandPoolCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags = {}; + uint32_t queueFamilyIndex = {}; }; - static_assert( sizeof( CommandPoolCreateInfo ) == sizeof( VkCommandPoolCreateInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( CommandPoolCreateInfo ) == sizeof( VkCommandPoolCreateInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct SpecializationMapEntry { - VULKAN_HPP_CONSTEXPR SpecializationMapEntry( uint32_t constantID_ = {}, - uint32_t offset_ = {}, - size_t size_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + SpecializationMapEntry( uint32_t constantID_ = {}, uint32_t offset_ = {}, size_t size_ = {} ) VULKAN_HPP_NOEXCEPT : constantID( constantID_ ) , offset( offset_ ) , size( size_ ) @@ -28131,9 +33540,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SpecializationMapEntry& operator=( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT + SpecializationMapEntry & operator=( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -28155,27 +33564,25 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkSpecializationMapEntry const&() const VULKAN_HPP_NOEXCEPT + operator VkSpecializationMapEntry const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSpecializationMapEntry &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SpecializationMapEntry const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SpecializationMapEntry const & ) const = default; #else - bool operator==( SpecializationMapEntry const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SpecializationMapEntry const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( constantID == rhs.constantID ) - && ( offset == rhs.offset ) - && ( size == rhs.size ); + return ( constantID == rhs.constantID ) && ( offset == rhs.offset ) && ( size == rhs.size ); } - bool operator!=( SpecializationMapEntry const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SpecializationMapEntry const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -28183,18 +33590,19 @@ namespace VULKAN_HPP_NAMESPACE public: uint32_t constantID = {}; - uint32_t offset = {}; - size_t size = {}; + uint32_t offset = {}; + size_t size = {}; }; - static_assert( sizeof( SpecializationMapEntry ) == sizeof( VkSpecializationMapEntry ), "struct and wrapper have different size!" ); + static_assert( sizeof( SpecializationMapEntry ) == sizeof( VkSpecializationMapEntry ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct SpecializationInfo { - VULKAN_HPP_CONSTEXPR SpecializationInfo( uint32_t mapEntryCount_ = {}, - const VULKAN_HPP_NAMESPACE::SpecializationMapEntry* pMapEntries_ = {}, - size_t dataSize_ = {}, - const void* pData_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR SpecializationInfo( uint32_t mapEntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * pMapEntries_ = {}, + size_t dataSize_ = {}, + const void * pData_ = {} ) VULKAN_HPP_NOEXCEPT : mapEntryCount( mapEntryCount_ ) , pMapEntries( pMapEntries_ ) , dataSize( dataSize_ ) @@ -28206,9 +33614,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SpecializationInfo& operator=( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT + SpecializationInfo & operator=( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -28218,7 +33626,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - SpecializationInfo & setPMapEntries( const VULKAN_HPP_NAMESPACE::SpecializationMapEntry* pMapEntries_ ) VULKAN_HPP_NOEXCEPT + SpecializationInfo & + setPMapEntries( const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * pMapEntries_ ) VULKAN_HPP_NOEXCEPT { pMapEntries = pMapEntries_; return *this; @@ -28230,55 +33639,55 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - SpecializationInfo & setPData( const void* pData_ ) VULKAN_HPP_NOEXCEPT + SpecializationInfo & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT { pData = pData_; return *this; } - operator VkSpecializationInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkSpecializationInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSpecializationInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SpecializationInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SpecializationInfo const & ) const = default; #else - bool operator==( SpecializationInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SpecializationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( mapEntryCount == rhs.mapEntryCount ) - && ( pMapEntries == rhs.pMapEntries ) - && ( dataSize == rhs.dataSize ) - && ( pData == rhs.pData ); + return ( mapEntryCount == rhs.mapEntryCount ) && ( pMapEntries == rhs.pMapEntries ) && + ( dataSize == rhs.dataSize ) && ( pData == rhs.pData ); } - bool operator!=( SpecializationInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SpecializationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint32_t mapEntryCount = {}; - const VULKAN_HPP_NAMESPACE::SpecializationMapEntry* pMapEntries = {}; - size_t dataSize = {}; - const void* pData = {}; + uint32_t mapEntryCount = {}; + const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * pMapEntries = {}; + size_t dataSize = {}; + const void * pData = {}; }; - static_assert( sizeof( SpecializationInfo ) == sizeof( VkSpecializationInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( SpecializationInfo ) == sizeof( VkSpecializationInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PipelineShaderStageCreateInfo { - VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex, - VULKAN_HPP_NAMESPACE::ShaderModule module_ = {}, - const char* pName_ = {}, - const VULKAN_HPP_NAMESPACE::SpecializationInfo* pSpecializationInfo_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex, + VULKAN_HPP_NAMESPACE::ShaderModule module_ = {}, + const char * pName_ = {}, + const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , stage( stage_ ) , module( module_ ) @@ -28288,7 +33697,9 @@ namespace VULKAN_HPP_NAMESPACE PipelineShaderStageCreateInfo & operator=( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineShaderStageCreateInfo ) - offsetof( PipelineShaderStageCreateInfo, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PipelineShaderStageCreateInfo ) - offsetof( PipelineShaderStageCreateInfo, pNext ) ); return *this; } @@ -28297,19 +33708,20 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PipelineShaderStageCreateInfo& operator=( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineShaderStageCreateInfo & operator=( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineShaderStageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineShaderStageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineShaderStageCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + PipelineShaderStageCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; @@ -28327,66 +33739,64 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PipelineShaderStageCreateInfo & setPName( const char* pName_ ) VULKAN_HPP_NOEXCEPT + PipelineShaderStageCreateInfo & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT { pName = pName_; return *this; } - PipelineShaderStageCreateInfo & setPSpecializationInfo( const VULKAN_HPP_NAMESPACE::SpecializationInfo* pSpecializationInfo_ ) VULKAN_HPP_NOEXCEPT + PipelineShaderStageCreateInfo & setPSpecializationInfo( + const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ ) VULKAN_HPP_NOEXCEPT { pSpecializationInfo = pSpecializationInfo_; return *this; } - operator VkPipelineShaderStageCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineShaderStageCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineShaderStageCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineShaderStageCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineShaderStageCreateInfo const & ) const = default; #else - bool operator==( PipelineShaderStageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineShaderStageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( stage == rhs.stage ) - && ( module == rhs.module ) - && ( pName == rhs.pName ) - && ( pSpecializationInfo == rhs.pSpecializationInfo ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stage == rhs.stage ) && + ( module == rhs.module ) && ( pName == rhs.pName ) && ( pSpecializationInfo == rhs.pSpecializationInfo ); } - bool operator!=( PipelineShaderStageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineShaderStageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex; - VULKAN_HPP_NAMESPACE::ShaderModule module = {}; - const char* pName = {}; - const VULKAN_HPP_NAMESPACE::SpecializationInfo* pSpecializationInfo = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex; + VULKAN_HPP_NAMESPACE::ShaderModule module = {}; + const char * pName = {}; + const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo = {}; }; - static_assert( sizeof( PipelineShaderStageCreateInfo ) == sizeof( VkPipelineShaderStageCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineShaderStageCreateInfo ) == sizeof( VkPipelineShaderStageCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct ComputePipelineCreateInfo { - VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage_ = {}, - VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, - VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , stage( stage_ ) @@ -28406,13 +33816,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ComputePipelineCreateInfo& operator=( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + ComputePipelineCreateInfo & operator=( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ComputePipelineCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ComputePipelineCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -28424,7 +33834,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - ComputePipelineCreateInfo & setStage( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const & stage_ ) VULKAN_HPP_NOEXCEPT + ComputePipelineCreateInfo & + setStage( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const & stage_ ) VULKAN_HPP_NOEXCEPT { stage = stage_; return *this; @@ -28436,7 +33847,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - ComputePipelineCreateInfo & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT + ComputePipelineCreateInfo & + setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT { basePipelineHandle = basePipelineHandle_; return *this; @@ -28448,53 +33860,52 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkComputePipelineCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkComputePipelineCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkComputePipelineCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ComputePipelineCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ComputePipelineCreateInfo const & ) const = default; #else - bool operator==( ComputePipelineCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ComputePipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( stage == rhs.stage ) - && ( layout == rhs.layout ) - && ( basePipelineHandle == rhs.basePipelineHandle ) - && ( basePipelineIndex == rhs.basePipelineIndex ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stage == rhs.stage ) && + ( layout == rhs.layout ) && ( basePipelineHandle == rhs.basePipelineHandle ) && + ( basePipelineIndex == rhs.basePipelineIndex ); } - bool operator!=( ComputePipelineCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ComputePipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eComputePipelineCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage = {}; - VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; - VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; - int32_t basePipelineIndex = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eComputePipelineCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; + int32_t basePipelineIndex = {}; }; - static_assert( sizeof( ComputePipelineCreateInfo ) == sizeof( VkComputePipelineCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ComputePipelineCreateInfo ) == sizeof( VkComputePipelineCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct ConditionalRenderingBeginInfoEXT { - VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, - VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT( + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ = {} ) VULKAN_HPP_NOEXCEPT : buffer( buffer_ ) , offset( offset_ ) , flags( flags_ ) @@ -28502,7 +33913,9 @@ namespace VULKAN_HPP_NAMESPACE ConditionalRenderingBeginInfoEXT & operator=( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( ConditionalRenderingBeginInfoEXT ) - offsetof( ConditionalRenderingBeginInfoEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( ConditionalRenderingBeginInfoEXT ) - offsetof( ConditionalRenderingBeginInfoEXT, pNext ) ); return *this; } @@ -28511,13 +33924,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ConditionalRenderingBeginInfoEXT& operator=( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ConditionalRenderingBeginInfoEXT & operator=( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ConditionalRenderingBeginInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ConditionalRenderingBeginInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -28535,56 +33948,56 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - ConditionalRenderingBeginInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + ConditionalRenderingBeginInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - operator VkConditionalRenderingBeginInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkConditionalRenderingBeginInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkConditionalRenderingBeginInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ConditionalRenderingBeginInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ConditionalRenderingBeginInfoEXT const & ) const = default; #else - bool operator==( ConditionalRenderingBeginInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ConditionalRenderingBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( buffer == rhs.buffer ) - && ( offset == rhs.offset ) - && ( flags == rhs.flags ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && + ( flags == rhs.flags ); } - bool operator!=( ConditionalRenderingBeginInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ConditionalRenderingBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eConditionalRenderingBeginInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Buffer buffer = {}; - VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; - VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eConditionalRenderingBeginInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags = {}; }; - static_assert( sizeof( ConditionalRenderingBeginInfoEXT ) == sizeof( VkConditionalRenderingBeginInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ConditionalRenderingBeginInfoEXT ) == sizeof( VkConditionalRenderingBeginInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct ConformanceVersion { - VULKAN_HPP_CONSTEXPR ConformanceVersion( uint8_t major_ = {}, - uint8_t minor_ = {}, + VULKAN_HPP_CONSTEXPR ConformanceVersion( uint8_t major_ = {}, + uint8_t minor_ = {}, uint8_t subminor_ = {}, - uint8_t patch_ = {} ) VULKAN_HPP_NOEXCEPT + uint8_t patch_ = {} ) VULKAN_HPP_NOEXCEPT : major( major_ ) , minor( minor_ ) , subminor( subminor_ ) @@ -28596,9 +34009,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ConformanceVersion& operator=( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT + ConformanceVersion & operator=( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -28626,52 +34039,51 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkConformanceVersion const&() const VULKAN_HPP_NOEXCEPT + operator VkConformanceVersion const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkConformanceVersion &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ConformanceVersion const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ConformanceVersion const & ) const = default; #else - bool operator==( ConformanceVersion const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ConformanceVersion const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( major == rhs.major ) - && ( minor == rhs.minor ) - && ( subminor == rhs.subminor ) - && ( patch == rhs.patch ); + return ( major == rhs.major ) && ( minor == rhs.minor ) && ( subminor == rhs.subminor ) && ( patch == rhs.patch ); } - bool operator!=( ConformanceVersion const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ConformanceVersion const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint8_t major = {}; - uint8_t minor = {}; + uint8_t major = {}; + uint8_t minor = {}; uint8_t subminor = {}; - uint8_t patch = {}; + uint8_t patch = {}; }; - static_assert( sizeof( ConformanceVersion ) == sizeof( VkConformanceVersion ), "struct and wrapper have different size!" ); + static_assert( sizeof( ConformanceVersion ) == sizeof( VkConformanceVersion ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct CooperativeMatrixPropertiesNV { - VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV( uint32_t MSize_ = {}, - uint32_t NSize_ = {}, - uint32_t KSize_ = {}, - VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, - VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, - VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, - VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, - VULKAN_HPP_NAMESPACE::ScopeNV scope_ = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV( + uint32_t MSize_ = {}, + uint32_t NSize_ = {}, + uint32_t KSize_ = {}, + VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, + VULKAN_HPP_NAMESPACE::ScopeNV scope_ = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice ) VULKAN_HPP_NOEXCEPT : MSize( MSize_ ) , NSize( NSize_ ) , KSize( KSize_ ) @@ -28684,7 +34096,9 @@ namespace VULKAN_HPP_NAMESPACE CooperativeMatrixPropertiesNV & operator=( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( CooperativeMatrixPropertiesNV ) - offsetof( CooperativeMatrixPropertiesNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( CooperativeMatrixPropertiesNV ) - offsetof( CooperativeMatrixPropertiesNV, pNext ) ); return *this; } @@ -28693,13 +34107,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - CooperativeMatrixPropertiesNV& operator=( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + CooperativeMatrixPropertiesNV & operator=( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - CooperativeMatrixPropertiesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + CooperativeMatrixPropertiesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -28753,34 +34167,27 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkCooperativeMatrixPropertiesNV const&() const VULKAN_HPP_NOEXCEPT + operator VkCooperativeMatrixPropertiesNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( CooperativeMatrixPropertiesNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CooperativeMatrixPropertiesNV const & ) const = default; #else - bool operator==( CooperativeMatrixPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( MSize == rhs.MSize ) - && ( NSize == rhs.NSize ) - && ( KSize == rhs.KSize ) - && ( AType == rhs.AType ) - && ( BType == rhs.BType ) - && ( CType == rhs.CType ) - && ( DType == rhs.DType ) - && ( scope == rhs.scope ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( MSize == rhs.MSize ) && ( NSize == rhs.NSize ) && + ( KSize == rhs.KSize ) && ( AType == rhs.AType ) && ( BType == rhs.BType ) && ( CType == rhs.CType ) && + ( DType == rhs.DType ) && ( scope == rhs.scope ); } - bool operator!=( CooperativeMatrixPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -28788,25 +34195,29 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCooperativeMatrixPropertiesNV; - void* pNext = {}; - uint32_t MSize = {}; - uint32_t NSize = {}; - uint32_t KSize = {}; - VULKAN_HPP_NAMESPACE::ComponentTypeNV AType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16; - VULKAN_HPP_NAMESPACE::ComponentTypeNV BType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16; - VULKAN_HPP_NAMESPACE::ComponentTypeNV CType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16; - VULKAN_HPP_NAMESPACE::ComponentTypeNV DType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16; - VULKAN_HPP_NAMESPACE::ScopeNV scope = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice; + void * pNext = {}; + uint32_t MSize = {}; + uint32_t NSize = {}; + uint32_t KSize = {}; + VULKAN_HPP_NAMESPACE::ComponentTypeNV AType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16; + VULKAN_HPP_NAMESPACE::ComponentTypeNV BType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16; + VULKAN_HPP_NAMESPACE::ComponentTypeNV CType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16; + VULKAN_HPP_NAMESPACE::ComponentTypeNV DType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16; + VULKAN_HPP_NAMESPACE::ScopeNV scope = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice; }; - static_assert( sizeof( CooperativeMatrixPropertiesNV ) == sizeof( VkCooperativeMatrixPropertiesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( CooperativeMatrixPropertiesNV ) == sizeof( VkCooperativeMatrixPropertiesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #ifdef VK_ENABLE_BETA_EXTENSIONS struct CopyAccelerationStructureInfoKHR { - VULKAN_HPP_CONSTEXPR CopyAccelerationStructureInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ = {}, - VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {}, - VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR CopyAccelerationStructureInfoKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {}, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone ) VULKAN_HPP_NOEXCEPT : src( src_ ) , dst( dst_ ) , mode( mode_ ) @@ -28814,7 +34225,9 @@ namespace VULKAN_HPP_NAMESPACE CopyAccelerationStructureInfoKHR & operator=( CopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( CopyAccelerationStructureInfoKHR ) - offsetof( CopyAccelerationStructureInfoKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( CopyAccelerationStructureInfoKHR ) - offsetof( CopyAccelerationStructureInfoKHR, pNext ) ); return *this; } @@ -28823,13 +34236,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - CopyAccelerationStructureInfoKHR& operator=( VkCopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + CopyAccelerationStructureInfoKHR & operator=( VkCopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - CopyAccelerationStructureInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + CopyAccelerationStructureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -28847,132 +34260,148 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - CopyAccelerationStructureInfoKHR & setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT + CopyAccelerationStructureInfoKHR & + setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT { mode = mode_; return *this; } - operator VkCopyAccelerationStructureInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkCopyAccelerationStructureInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkCopyAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( CopyAccelerationStructureInfoKHR const& ) const = default; -#else - bool operator==( CopyAccelerationStructureInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyAccelerationStructureInfoKHR const & ) const = default; +# else + bool operator==( CopyAccelerationStructureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( src == rhs.src ) - && ( dst == rhs.dst ) - && ( mode == rhs.mode ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( src == rhs.src ) && ( dst == rhs.dst ) && + ( mode == rhs.mode ); } - bool operator!=( CopyAccelerationStructureInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CopyAccelerationStructureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyAccelerationStructureInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src = {}; - VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst = {}; - VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyAccelerationStructureInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst = {}; + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone; }; - static_assert( sizeof( CopyAccelerationStructureInfoKHR ) == sizeof( VkCopyAccelerationStructureInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( CopyAccelerationStructureInfoKHR ) == sizeof( VkCopyAccelerationStructureInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_ENABLE_BETA_EXTENSIONS struct CopyAccelerationStructureToMemoryInfoKHR { - CopyAccelerationStructureToMemoryInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ = {}, - VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst_ = {}, - VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone ) VULKAN_HPP_NOEXCEPT + CopyAccelerationStructureToMemoryInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst_ = {}, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone ) + VULKAN_HPP_NOEXCEPT : src( src_ ) , dst( dst_ ) , mode( mode_ ) {} - CopyAccelerationStructureToMemoryInfoKHR & operator=( CopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + CopyAccelerationStructureToMemoryInfoKHR & + operator=( CopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( CopyAccelerationStructureToMemoryInfoKHR ) - offsetof( CopyAccelerationStructureToMemoryInfoKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( CopyAccelerationStructureToMemoryInfoKHR ) - + offsetof( CopyAccelerationStructureToMemoryInfoKHR, pNext ) ); return *this; } - CopyAccelerationStructureToMemoryInfoKHR( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + CopyAccelerationStructureToMemoryInfoKHR( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - CopyAccelerationStructureToMemoryInfoKHR& operator=( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + CopyAccelerationStructureToMemoryInfoKHR & + operator=( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - CopyAccelerationStructureToMemoryInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + CopyAccelerationStructureToMemoryInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - CopyAccelerationStructureToMemoryInfoKHR & setSrc( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ ) VULKAN_HPP_NOEXCEPT + CopyAccelerationStructureToMemoryInfoKHR & + setSrc( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ ) VULKAN_HPP_NOEXCEPT { src = src_; return *this; } - CopyAccelerationStructureToMemoryInfoKHR & setDst( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & dst_ ) VULKAN_HPP_NOEXCEPT + CopyAccelerationStructureToMemoryInfoKHR & + setDst( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & dst_ ) VULKAN_HPP_NOEXCEPT { dst = dst_; return *this; } - CopyAccelerationStructureToMemoryInfoKHR & setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT + CopyAccelerationStructureToMemoryInfoKHR & + setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT { mode = mode_; return *this; } - operator VkCopyAccelerationStructureToMemoryInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkCopyAccelerationStructureToMemoryInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkCopyAccelerationStructureToMemoryInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyAccelerationStructureToMemoryInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src = {}; - VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst = {}; - VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyAccelerationStructureToMemoryInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst = {}; + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone; }; - static_assert( sizeof( CopyAccelerationStructureToMemoryInfoKHR ) == sizeof( VkCopyAccelerationStructureToMemoryInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( CopyAccelerationStructureToMemoryInfoKHR ) == + sizeof( VkCopyAccelerationStructureToMemoryInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct CopyDescriptorSet { - VULKAN_HPP_CONSTEXPR CopyDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ = {}, - uint32_t srcBinding_ = {}, - uint32_t srcArrayElement_ = {}, - VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, - uint32_t dstBinding_ = {}, - uint32_t dstArrayElement_ = {}, + VULKAN_HPP_CONSTEXPR CopyDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ = {}, + uint32_t srcBinding_ = {}, + uint32_t srcArrayElement_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, + uint32_t dstBinding_ = {}, + uint32_t dstArrayElement_ = {}, uint32_t descriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT : srcSet( srcSet_ ) , srcBinding( srcBinding_ ) @@ -28994,13 +34423,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - CopyDescriptorSet& operator=( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT + CopyDescriptorSet & operator=( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - CopyDescriptorSet & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + CopyDescriptorSet & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -29048,132 +34477,143 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkCopyDescriptorSet const&() const VULKAN_HPP_NOEXCEPT + operator VkCopyDescriptorSet const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkCopyDescriptorSet &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( CopyDescriptorSet const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyDescriptorSet const & ) const = default; #else - bool operator==( CopyDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CopyDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( srcSet == rhs.srcSet ) - && ( srcBinding == rhs.srcBinding ) - && ( srcArrayElement == rhs.srcArrayElement ) - && ( dstSet == rhs.dstSet ) - && ( dstBinding == rhs.dstBinding ) - && ( dstArrayElement == rhs.dstArrayElement ) - && ( descriptorCount == rhs.descriptorCount ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSet == rhs.srcSet ) && + ( srcBinding == rhs.srcBinding ) && ( srcArrayElement == rhs.srcArrayElement ) && + ( dstSet == rhs.dstSet ) && ( dstBinding == rhs.dstBinding ) && + ( dstArrayElement == rhs.dstArrayElement ) && ( descriptorCount == rhs.descriptorCount ); } - bool operator!=( CopyDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CopyDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyDescriptorSet; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DescriptorSet srcSet = {}; - uint32_t srcBinding = {}; - uint32_t srcArrayElement = {}; - VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {}; - uint32_t dstBinding = {}; - uint32_t dstArrayElement = {}; - uint32_t descriptorCount = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyDescriptorSet; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorSet srcSet = {}; + uint32_t srcBinding = {}; + uint32_t srcArrayElement = {}; + VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {}; + uint32_t dstBinding = {}; + uint32_t dstArrayElement = {}; + uint32_t descriptorCount = {}; }; - static_assert( sizeof( CopyDescriptorSet ) == sizeof( VkCopyDescriptorSet ), "struct and wrapper have different size!" ); + static_assert( sizeof( CopyDescriptorSet ) == sizeof( VkCopyDescriptorSet ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); #ifdef VK_ENABLE_BETA_EXTENSIONS struct CopyMemoryToAccelerationStructureInfoKHR { - CopyMemoryToAccelerationStructureInfoKHR( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src_ = {}, - VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {}, - VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone ) VULKAN_HPP_NOEXCEPT + CopyMemoryToAccelerationStructureInfoKHR( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {}, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone ) + VULKAN_HPP_NOEXCEPT : src( src_ ) , dst( dst_ ) , mode( mode_ ) {} - CopyMemoryToAccelerationStructureInfoKHR & operator=( CopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + CopyMemoryToAccelerationStructureInfoKHR & + operator=( CopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( CopyMemoryToAccelerationStructureInfoKHR ) - offsetof( CopyMemoryToAccelerationStructureInfoKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( CopyMemoryToAccelerationStructureInfoKHR ) - + offsetof( CopyMemoryToAccelerationStructureInfoKHR, pNext ) ); return *this; } - CopyMemoryToAccelerationStructureInfoKHR( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + CopyMemoryToAccelerationStructureInfoKHR( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - CopyMemoryToAccelerationStructureInfoKHR& operator=( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + CopyMemoryToAccelerationStructureInfoKHR & + operator=( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - CopyMemoryToAccelerationStructureInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + CopyMemoryToAccelerationStructureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - CopyMemoryToAccelerationStructureInfoKHR & setSrc( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & src_ ) VULKAN_HPP_NOEXCEPT + CopyMemoryToAccelerationStructureInfoKHR & + setSrc( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & src_ ) VULKAN_HPP_NOEXCEPT { src = src_; return *this; } - CopyMemoryToAccelerationStructureInfoKHR & setDst( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ ) VULKAN_HPP_NOEXCEPT + CopyMemoryToAccelerationStructureInfoKHR & + setDst( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ ) VULKAN_HPP_NOEXCEPT { dst = dst_; return *this; } - CopyMemoryToAccelerationStructureInfoKHR & setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT + CopyMemoryToAccelerationStructureInfoKHR & + setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT { mode = mode_; return *this; } - operator VkCopyMemoryToAccelerationStructureInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkCopyMemoryToAccelerationStructureInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkCopyMemoryToAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMemoryToAccelerationStructureInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src = {}; - VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst = {}; - VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMemoryToAccelerationStructureInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst = {}; + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone; }; - static_assert( sizeof( CopyMemoryToAccelerationStructureInfoKHR ) == sizeof( VkCopyMemoryToAccelerationStructureInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( CopyMemoryToAccelerationStructureInfoKHR ) == + sizeof( VkCopyMemoryToAccelerationStructureInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_USE_PLATFORM_WIN32_KHR struct D3D12FenceSubmitInfoKHR { - VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( uint32_t waitSemaphoreValuesCount_ = {}, - const uint64_t* pWaitSemaphoreValues_ = {}, - uint32_t signalSemaphoreValuesCount_ = {}, - const uint64_t* pSignalSemaphoreValues_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( uint32_t waitSemaphoreValuesCount_ = {}, + const uint64_t * pWaitSemaphoreValues_ = {}, + uint32_t signalSemaphoreValuesCount_ = {}, + const uint64_t * pSignalSemaphoreValues_ = {} ) VULKAN_HPP_NOEXCEPT : waitSemaphoreValuesCount( waitSemaphoreValuesCount_ ) , pWaitSemaphoreValues( pWaitSemaphoreValues_ ) , signalSemaphoreValuesCount( signalSemaphoreValuesCount_ ) @@ -29191,13 +34631,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - D3D12FenceSubmitInfoKHR& operator=( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + D3D12FenceSubmitInfoKHR & operator=( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - D3D12FenceSubmitInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + D3D12FenceSubmitInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -29209,7 +34649,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - D3D12FenceSubmitInfoKHR & setPWaitSemaphoreValues( const uint64_t* pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + D3D12FenceSubmitInfoKHR & setPWaitSemaphoreValues( const uint64_t * pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT { pWaitSemaphoreValues = pWaitSemaphoreValues_; return *this; @@ -29221,57 +34661,57 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - D3D12FenceSubmitInfoKHR & setPSignalSemaphoreValues( const uint64_t* pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + D3D12FenceSubmitInfoKHR & setPSignalSemaphoreValues( const uint64_t * pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT { pSignalSemaphoreValues = pSignalSemaphoreValues_; return *this; } - operator VkD3D12FenceSubmitInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkD3D12FenceSubmitInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkD3D12FenceSubmitInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( D3D12FenceSubmitInfoKHR const& ) const = default; -#else - bool operator==( D3D12FenceSubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( D3D12FenceSubmitInfoKHR const & ) const = default; +# else + bool operator==( D3D12FenceSubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( waitSemaphoreValuesCount == rhs.waitSemaphoreValuesCount ) - && ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues ) - && ( signalSemaphoreValuesCount == rhs.signalSemaphoreValuesCount ) - && ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( waitSemaphoreValuesCount == rhs.waitSemaphoreValuesCount ) && + ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues ) && + ( signalSemaphoreValuesCount == rhs.signalSemaphoreValuesCount ) && + ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues ); } - bool operator!=( D3D12FenceSubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( D3D12FenceSubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eD3D12FenceSubmitInfoKHR; - const void* pNext = {}; - uint32_t waitSemaphoreValuesCount = {}; - const uint64_t* pWaitSemaphoreValues = {}; - uint32_t signalSemaphoreValuesCount = {}; - const uint64_t* pSignalSemaphoreValues = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eD3D12FenceSubmitInfoKHR; + const void * pNext = {}; + uint32_t waitSemaphoreValuesCount = {}; + const uint64_t * pWaitSemaphoreValues = {}; + uint32_t signalSemaphoreValuesCount = {}; + const uint64_t * pSignalSemaphoreValues = {}; }; - static_assert( sizeof( D3D12FenceSubmitInfoKHR ) == sizeof( VkD3D12FenceSubmitInfoKHR ), "struct and wrapper have different size!" ); + static_assert( sizeof( D3D12FenceSubmitInfoKHR ) == sizeof( VkD3D12FenceSubmitInfoKHR ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ struct DebugMarkerMarkerInfoEXT { - VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT( const char* pMarkerName_ = {}, - std::array const& color_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT( const char * pMarkerName_ = {}, + std::array const & color_ = {} ) VULKAN_HPP_NOEXCEPT : pMarkerName( pMarkerName_ ) , color( color_ ) {} @@ -29287,71 +34727,71 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DebugMarkerMarkerInfoEXT& operator=( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + DebugMarkerMarkerInfoEXT & operator=( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DebugMarkerMarkerInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DebugMarkerMarkerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - DebugMarkerMarkerInfoEXT & setPMarkerName( const char* pMarkerName_ ) VULKAN_HPP_NOEXCEPT + DebugMarkerMarkerInfoEXT & setPMarkerName( const char * pMarkerName_ ) VULKAN_HPP_NOEXCEPT { pMarkerName = pMarkerName_; return *this; } - DebugMarkerMarkerInfoEXT & setColor( std::array color_ ) VULKAN_HPP_NOEXCEPT + DebugMarkerMarkerInfoEXT & setColor( std::array color_ ) VULKAN_HPP_NOEXCEPT { color = color_; return *this; } - operator VkDebugMarkerMarkerInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkDebugMarkerMarkerInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDebugMarkerMarkerInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DebugMarkerMarkerInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugMarkerMarkerInfoEXT const & ) const = default; #else - bool operator==( DebugMarkerMarkerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DebugMarkerMarkerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pMarkerName == rhs.pMarkerName ) - && ( color == rhs.color ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pMarkerName == rhs.pMarkerName ) && + ( color == rhs.color ); } - bool operator!=( DebugMarkerMarkerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DebugMarkerMarkerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerMarkerInfoEXT; - const void* pNext = {}; - const char* pMarkerName = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D color = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerMarkerInfoEXT; + const void * pNext = {}; + const char * pMarkerName = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D color = {}; }; - static_assert( sizeof( DebugMarkerMarkerInfoEXT ) == sizeof( VkDebugMarkerMarkerInfoEXT ), "struct and wrapper have different size!" ); + static_assert( sizeof( DebugMarkerMarkerInfoEXT ) == sizeof( VkDebugMarkerMarkerInfoEXT ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct DebugMarkerObjectNameInfoEXT { - VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown, - uint64_t object_ = {}, - const char* pObjectName_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown, + uint64_t object_ = {}, + const char * pObjectName_ = {} ) VULKAN_HPP_NOEXCEPT : objectType( objectType_ ) , object( object_ ) , pObjectName( pObjectName_ ) @@ -29359,7 +34799,8 @@ namespace VULKAN_HPP_NAMESPACE DebugMarkerObjectNameInfoEXT & operator=( DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( DebugMarkerObjectNameInfoEXT ) - offsetof( DebugMarkerObjectNameInfoEXT, pNext ) ); + memcpy( + &pNext, &rhs.pNext, sizeof( DebugMarkerObjectNameInfoEXT ) - offsetof( DebugMarkerObjectNameInfoEXT, pNext ) ); return *this; } @@ -29368,19 +34809,20 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DebugMarkerObjectNameInfoEXT& operator=( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + DebugMarkerObjectNameInfoEXT & operator=( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DebugMarkerObjectNameInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DebugMarkerObjectNameInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - DebugMarkerObjectNameInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT + DebugMarkerObjectNameInfoEXT & + setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT { objectType = objectType_; return *this; @@ -29392,57 +34834,58 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DebugMarkerObjectNameInfoEXT & setPObjectName( const char* pObjectName_ ) VULKAN_HPP_NOEXCEPT + DebugMarkerObjectNameInfoEXT & setPObjectName( const char * pObjectName_ ) VULKAN_HPP_NOEXCEPT { pObjectName = pObjectName_; return *this; } - operator VkDebugMarkerObjectNameInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkDebugMarkerObjectNameInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDebugMarkerObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DebugMarkerObjectNameInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugMarkerObjectNameInfoEXT const & ) const = default; #else - bool operator==( DebugMarkerObjectNameInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DebugMarkerObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( objectType == rhs.objectType ) - && ( object == rhs.object ) - && ( pObjectName == rhs.pObjectName ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && + ( object == rhs.object ) && ( pObjectName == rhs.pObjectName ); } - bool operator!=( DebugMarkerObjectNameInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DebugMarkerObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectNameInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; - uint64_t object = {}; - const char* pObjectName = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectNameInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + uint64_t object = {}; + const char * pObjectName = {}; }; - static_assert( sizeof( DebugMarkerObjectNameInfoEXT ) == sizeof( VkDebugMarkerObjectNameInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DebugMarkerObjectNameInfoEXT ) == sizeof( VkDebugMarkerObjectNameInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct DebugMarkerObjectTagInfoEXT { - VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown, - uint64_t object_ = {}, - uint64_t tagName_ = {}, - size_t tagSize_ = {}, - const void* pTag_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown, + uint64_t object_ = {}, + uint64_t tagName_ = {}, + size_t tagSize_ = {}, + const void * pTag_ = {} ) VULKAN_HPP_NOEXCEPT : objectType( objectType_ ) , object( object_ ) , tagName( tagName_ ) @@ -29452,7 +34895,8 @@ namespace VULKAN_HPP_NAMESPACE DebugMarkerObjectTagInfoEXT & operator=( DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( DebugMarkerObjectTagInfoEXT ) - offsetof( DebugMarkerObjectTagInfoEXT, pNext ) ); + memcpy( + &pNext, &rhs.pNext, sizeof( DebugMarkerObjectTagInfoEXT ) - offsetof( DebugMarkerObjectTagInfoEXT, pNext ) ); return *this; } @@ -29461,19 +34905,20 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DebugMarkerObjectTagInfoEXT& operator=( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + DebugMarkerObjectTagInfoEXT & operator=( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DebugMarkerObjectTagInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DebugMarkerObjectTagInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - DebugMarkerObjectTagInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT + DebugMarkerObjectTagInfoEXT & + setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT { objectType = objectType_; return *this; @@ -29497,59 +34942,58 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DebugMarkerObjectTagInfoEXT & setPTag( const void* pTag_ ) VULKAN_HPP_NOEXCEPT + DebugMarkerObjectTagInfoEXT & setPTag( const void * pTag_ ) VULKAN_HPP_NOEXCEPT { pTag = pTag_; return *this; } - operator VkDebugMarkerObjectTagInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkDebugMarkerObjectTagInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDebugMarkerObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DebugMarkerObjectTagInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugMarkerObjectTagInfoEXT const & ) const = default; #else - bool operator==( DebugMarkerObjectTagInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DebugMarkerObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( objectType == rhs.objectType ) - && ( object == rhs.object ) - && ( tagName == rhs.tagName ) - && ( tagSize == rhs.tagSize ) - && ( pTag == rhs.pTag ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && + ( object == rhs.object ) && ( tagName == rhs.tagName ) && ( tagSize == rhs.tagSize ) && + ( pTag == rhs.pTag ); } - bool operator!=( DebugMarkerObjectTagInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DebugMarkerObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectTagInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; - uint64_t object = {}; - uint64_t tagName = {}; - size_t tagSize = {}; - const void* pTag = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectTagInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + uint64_t object = {}; + uint64_t tagName = {}; + size_t tagSize = {}; + const void * pTag = {}; }; - static_assert( sizeof( DebugMarkerObjectTagInfoEXT ) == sizeof( VkDebugMarkerObjectTagInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DebugMarkerObjectTagInfoEXT ) == sizeof( VkDebugMarkerObjectTagInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct DebugReportCallbackCreateInfoEXT { - VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ = {}, - PFN_vkDebugReportCallbackEXT pfnCallback_ = {}, - void* pUserData_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ = {}, + PFN_vkDebugReportCallbackEXT pfnCallback_ = {}, + void * pUserData_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , pfnCallback( pfnCallback_ ) , pUserData( pUserData_ ) @@ -29557,7 +35001,9 @@ namespace VULKAN_HPP_NAMESPACE DebugReportCallbackCreateInfoEXT & operator=( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( DebugReportCallbackCreateInfoEXT ) - offsetof( DebugReportCallbackCreateInfoEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( DebugReportCallbackCreateInfoEXT ) - offsetof( DebugReportCallbackCreateInfoEXT, pNext ) ); return *this; } @@ -29566,13 +35012,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DebugReportCallbackCreateInfoEXT& operator=( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + DebugReportCallbackCreateInfoEXT & operator=( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DebugReportCallbackCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DebugReportCallbackCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -29590,54 +35036,53 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DebugReportCallbackCreateInfoEXT & setPUserData( void* pUserData_ ) VULKAN_HPP_NOEXCEPT + DebugReportCallbackCreateInfoEXT & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT { pUserData = pUserData_; return *this; } - operator VkDebugReportCallbackCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkDebugReportCallbackCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDebugReportCallbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DebugReportCallbackCreateInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugReportCallbackCreateInfoEXT const & ) const = default; #else - bool operator==( DebugReportCallbackCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DebugReportCallbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( pfnCallback == rhs.pfnCallback ) - && ( pUserData == rhs.pUserData ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( pfnCallback == rhs.pfnCallback ) && ( pUserData == rhs.pUserData ); } - bool operator!=( DebugReportCallbackCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DebugReportCallbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugReportCallbackCreateInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags = {}; - PFN_vkDebugReportCallbackEXT pfnCallback = {}; - void* pUserData = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugReportCallbackCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags = {}; + PFN_vkDebugReportCallbackEXT pfnCallback = {}; + void * pUserData = {}; }; - static_assert( sizeof( DebugReportCallbackCreateInfoEXT ) == sizeof( VkDebugReportCallbackCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DebugReportCallbackCreateInfoEXT ) == sizeof( VkDebugReportCallbackCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct DebugUtilsLabelEXT { - VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT( const char* pLabelName_ = {}, - std::array const& color_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT( const char * pLabelName_ = {}, + std::array const & color_ = {} ) VULKAN_HPP_NOEXCEPT : pLabelName( pLabelName_ ) , color( color_ ) {} @@ -29653,71 +35098,71 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DebugUtilsLabelEXT& operator=( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT + DebugUtilsLabelEXT & operator=( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DebugUtilsLabelEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DebugUtilsLabelEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - DebugUtilsLabelEXT & setPLabelName( const char* pLabelName_ ) VULKAN_HPP_NOEXCEPT + DebugUtilsLabelEXT & setPLabelName( const char * pLabelName_ ) VULKAN_HPP_NOEXCEPT { pLabelName = pLabelName_; return *this; } - DebugUtilsLabelEXT & setColor( std::array color_ ) VULKAN_HPP_NOEXCEPT + DebugUtilsLabelEXT & setColor( std::array color_ ) VULKAN_HPP_NOEXCEPT { color = color_; return *this; } - operator VkDebugUtilsLabelEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkDebugUtilsLabelEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDebugUtilsLabelEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DebugUtilsLabelEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugUtilsLabelEXT const & ) const = default; #else - bool operator==( DebugUtilsLabelEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DebugUtilsLabelEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pLabelName == rhs.pLabelName ) - && ( color == rhs.color ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pLabelName == rhs.pLabelName ) && + ( color == rhs.color ); } - bool operator!=( DebugUtilsLabelEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DebugUtilsLabelEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsLabelEXT; - const void* pNext = {}; - const char* pLabelName = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D color = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsLabelEXT; + const void * pNext = {}; + const char * pLabelName = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D color = {}; }; - static_assert( sizeof( DebugUtilsLabelEXT ) == sizeof( VkDebugUtilsLabelEXT ), "struct and wrapper have different size!" ); + static_assert( sizeof( DebugUtilsLabelEXT ) == sizeof( VkDebugUtilsLabelEXT ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct DebugUtilsObjectNameInfoEXT { - VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, - uint64_t objectHandle_ = {}, - const char* pObjectName_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT( + VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, + uint64_t objectHandle_ = {}, + const char * pObjectName_ = {} ) VULKAN_HPP_NOEXCEPT : objectType( objectType_ ) , objectHandle( objectHandle_ ) , pObjectName( pObjectName_ ) @@ -29725,7 +35170,8 @@ namespace VULKAN_HPP_NAMESPACE DebugUtilsObjectNameInfoEXT & operator=( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( DebugUtilsObjectNameInfoEXT ) - offsetof( DebugUtilsObjectNameInfoEXT, pNext ) ); + memcpy( + &pNext, &rhs.pNext, sizeof( DebugUtilsObjectNameInfoEXT ) - offsetof( DebugUtilsObjectNameInfoEXT, pNext ) ); return *this; } @@ -29734,13 +35180,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DebugUtilsObjectNameInfoEXT& operator=( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + DebugUtilsObjectNameInfoEXT & operator=( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DebugUtilsObjectNameInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DebugUtilsObjectNameInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -29758,62 +35204,62 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DebugUtilsObjectNameInfoEXT & setPObjectName( const char* pObjectName_ ) VULKAN_HPP_NOEXCEPT + DebugUtilsObjectNameInfoEXT & setPObjectName( const char * pObjectName_ ) VULKAN_HPP_NOEXCEPT { pObjectName = pObjectName_; return *this; } - operator VkDebugUtilsObjectNameInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkDebugUtilsObjectNameInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDebugUtilsObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DebugUtilsObjectNameInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugUtilsObjectNameInfoEXT const & ) const = default; #else - bool operator==( DebugUtilsObjectNameInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DebugUtilsObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( objectType == rhs.objectType ) - && ( objectHandle == rhs.objectHandle ) - && ( pObjectName == rhs.pObjectName ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && + ( objectHandle == rhs.objectHandle ) && ( pObjectName == rhs.pObjectName ); } - bool operator!=( DebugUtilsObjectNameInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DebugUtilsObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectNameInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown; - uint64_t objectHandle = {}; - const char* pObjectName = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectNameInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown; + uint64_t objectHandle = {}; + const char * pObjectName = {}; }; - static_assert( sizeof( DebugUtilsObjectNameInfoEXT ) == sizeof( VkDebugUtilsObjectNameInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DebugUtilsObjectNameInfoEXT ) == sizeof( VkDebugUtilsObjectNameInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct DebugUtilsMessengerCallbackDataEXT { - VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ = {}, - const char* pMessageIdName_ = {}, - int32_t messageIdNumber_ = {}, - const char* pMessage_ = {}, - uint32_t queueLabelCount_ = {}, - const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pQueueLabels_ = {}, - uint32_t cmdBufLabelCount_ = {}, - const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pCmdBufLabels_ = {}, - uint32_t objectCount_ = {}, - const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pObjects_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT( + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ = {}, + const char * pMessageIdName_ = {}, + int32_t messageIdNumber_ = {}, + const char * pMessage_ = {}, + uint32_t queueLabelCount_ = {}, + const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pQueueLabels_ = {}, + uint32_t cmdBufLabelCount_ = {}, + const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pCmdBufLabels_ = {}, + uint32_t objectCount_ = {}, + const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pObjects_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , pMessageIdName( pMessageIdName_ ) , messageIdNumber( messageIdNumber_ ) @@ -29828,7 +35274,9 @@ namespace VULKAN_HPP_NAMESPACE DebugUtilsMessengerCallbackDataEXT & operator=( DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( DebugUtilsMessengerCallbackDataEXT ) - offsetof( DebugUtilsMessengerCallbackDataEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( DebugUtilsMessengerCallbackDataEXT ) - offsetof( DebugUtilsMessengerCallbackDataEXT, pNext ) ); return *this; } @@ -29837,25 +35285,27 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DebugUtilsMessengerCallbackDataEXT& operator=( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + DebugUtilsMessengerCallbackDataEXT & + operator=( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DebugUtilsMessengerCallbackDataEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DebugUtilsMessengerCallbackDataEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - DebugUtilsMessengerCallbackDataEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + DebugUtilsMessengerCallbackDataEXT & + setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - DebugUtilsMessengerCallbackDataEXT & setPMessageIdName( const char* pMessageIdName_ ) VULKAN_HPP_NOEXCEPT + DebugUtilsMessengerCallbackDataEXT & setPMessageIdName( const char * pMessageIdName_ ) VULKAN_HPP_NOEXCEPT { pMessageIdName = pMessageIdName_; return *this; @@ -29867,7 +35317,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DebugUtilsMessengerCallbackDataEXT & setPMessage( const char* pMessage_ ) VULKAN_HPP_NOEXCEPT + DebugUtilsMessengerCallbackDataEXT & setPMessage( const char * pMessage_ ) VULKAN_HPP_NOEXCEPT { pMessage = pMessage_; return *this; @@ -29879,7 +35329,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DebugUtilsMessengerCallbackDataEXT & setPQueueLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pQueueLabels_ ) VULKAN_HPP_NOEXCEPT + DebugUtilsMessengerCallbackDataEXT & + setPQueueLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pQueueLabels_ ) VULKAN_HPP_NOEXCEPT { pQueueLabels = pQueueLabels_; return *this; @@ -29891,7 +35342,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DebugUtilsMessengerCallbackDataEXT & setPCmdBufLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pCmdBufLabels_ ) VULKAN_HPP_NOEXCEPT + DebugUtilsMessengerCallbackDataEXT & + setPCmdBufLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pCmdBufLabels_ ) VULKAN_HPP_NOEXCEPT { pCmdBufLabels = pCmdBufLabels_; return *this; @@ -29903,42 +35355,37 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DebugUtilsMessengerCallbackDataEXT & setPObjects( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pObjects_ ) VULKAN_HPP_NOEXCEPT + DebugUtilsMessengerCallbackDataEXT & + setPObjects( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pObjects_ ) VULKAN_HPP_NOEXCEPT { pObjects = pObjects_; return *this; } - operator VkDebugUtilsMessengerCallbackDataEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkDebugUtilsMessengerCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDebugUtilsMessengerCallbackDataEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DebugUtilsMessengerCallbackDataEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugUtilsMessengerCallbackDataEXT const & ) const = default; #else - bool operator==( DebugUtilsMessengerCallbackDataEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DebugUtilsMessengerCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( pMessageIdName == rhs.pMessageIdName ) - && ( messageIdNumber == rhs.messageIdNumber ) - && ( pMessage == rhs.pMessage ) - && ( queueLabelCount == rhs.queueLabelCount ) - && ( pQueueLabels == rhs.pQueueLabels ) - && ( cmdBufLabelCount == rhs.cmdBufLabelCount ) - && ( pCmdBufLabels == rhs.pCmdBufLabels ) - && ( objectCount == rhs.objectCount ) - && ( pObjects == rhs.pObjects ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( pMessageIdName == rhs.pMessageIdName ) && ( messageIdNumber == rhs.messageIdNumber ) && + ( pMessage == rhs.pMessage ) && ( queueLabelCount == rhs.queueLabelCount ) && + ( pQueueLabels == rhs.pQueueLabels ) && ( cmdBufLabelCount == rhs.cmdBufLabelCount ) && + ( pCmdBufLabels == rhs.pCmdBufLabels ) && ( objectCount == rhs.objectCount ) && + ( pObjects == rhs.pObjects ); } - bool operator!=( DebugUtilsMessengerCallbackDataEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DebugUtilsMessengerCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -29946,28 +35393,31 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCallbackDataEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags = {}; - const char* pMessageIdName = {}; - int32_t messageIdNumber = {}; - const char* pMessage = {}; - uint32_t queueLabelCount = {}; - const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pQueueLabels = {}; - uint32_t cmdBufLabelCount = {}; - const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pCmdBufLabels = {}; - uint32_t objectCount = {}; - const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pObjects = {}; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags = {}; + const char * pMessageIdName = {}; + int32_t messageIdNumber = {}; + const char * pMessage = {}; + uint32_t queueLabelCount = {}; + const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pQueueLabels = {}; + uint32_t cmdBufLabelCount = {}; + const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pCmdBufLabels = {}; + uint32_t objectCount = {}; + const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pObjects = {}; }; - static_assert( sizeof( DebugUtilsMessengerCallbackDataEXT ) == sizeof( VkDebugUtilsMessengerCallbackDataEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DebugUtilsMessengerCallbackDataEXT ) == sizeof( VkDebugUtilsMessengerCallbackDataEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct DebugUtilsMessengerCreateInfoEXT { - VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ = {}, - VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ = {}, - VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ = {}, - PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ = {}, - void* pUserData_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + DebugUtilsMessengerCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ = {}, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ = {}, + PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ = {}, + void * pUserData_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , messageSeverity( messageSeverity_ ) , messageType( messageType_ ) @@ -29977,7 +35427,9 @@ namespace VULKAN_HPP_NAMESPACE DebugUtilsMessengerCreateInfoEXT & operator=( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( DebugUtilsMessengerCreateInfoEXT ) - offsetof( DebugUtilsMessengerCreateInfoEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( DebugUtilsMessengerCreateInfoEXT ) - offsetof( DebugUtilsMessengerCreateInfoEXT, pNext ) ); return *this; } @@ -29986,97 +35438,100 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DebugUtilsMessengerCreateInfoEXT& operator=( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + DebugUtilsMessengerCreateInfoEXT & operator=( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DebugUtilsMessengerCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DebugUtilsMessengerCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - DebugUtilsMessengerCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + DebugUtilsMessengerCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - DebugUtilsMessengerCreateInfoEXT & setMessageSeverity( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ ) VULKAN_HPP_NOEXCEPT + DebugUtilsMessengerCreateInfoEXT & + setMessageSeverity( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ ) VULKAN_HPP_NOEXCEPT { messageSeverity = messageSeverity_; return *this; } - DebugUtilsMessengerCreateInfoEXT & setMessageType( VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ ) VULKAN_HPP_NOEXCEPT + DebugUtilsMessengerCreateInfoEXT & + setMessageType( VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ ) VULKAN_HPP_NOEXCEPT { messageType = messageType_; return *this; } - DebugUtilsMessengerCreateInfoEXT & setPfnUserCallback( PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT + DebugUtilsMessengerCreateInfoEXT & + setPfnUserCallback( PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT { pfnUserCallback = pfnUserCallback_; return *this; } - DebugUtilsMessengerCreateInfoEXT & setPUserData( void* pUserData_ ) VULKAN_HPP_NOEXCEPT + DebugUtilsMessengerCreateInfoEXT & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT { pUserData = pUserData_; return *this; } - operator VkDebugUtilsMessengerCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkDebugUtilsMessengerCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDebugUtilsMessengerCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DebugUtilsMessengerCreateInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugUtilsMessengerCreateInfoEXT const & ) const = default; #else - bool operator==( DebugUtilsMessengerCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DebugUtilsMessengerCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( messageSeverity == rhs.messageSeverity ) - && ( messageType == rhs.messageType ) - && ( pfnUserCallback == rhs.pfnUserCallback ) - && ( pUserData == rhs.pUserData ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( messageSeverity == rhs.messageSeverity ) && ( messageType == rhs.messageType ) && + ( pfnUserCallback == rhs.pfnUserCallback ) && ( pUserData == rhs.pUserData ); } - bool operator!=( DebugUtilsMessengerCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DebugUtilsMessengerCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCreateInfoEXT; - const void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCreateInfoEXT; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags = {}; VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity = {}; - VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType = {}; - PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback = {}; - void* pUserData = {}; + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType = {}; + PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback = {}; + void * pUserData = {}; }; - static_assert( sizeof( DebugUtilsMessengerCreateInfoEXT ) == sizeof( VkDebugUtilsMessengerCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DebugUtilsMessengerCreateInfoEXT ) == sizeof( VkDebugUtilsMessengerCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct DebugUtilsObjectTagInfoEXT { - VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, - uint64_t objectHandle_ = {}, - uint64_t tagName_ = {}, - size_t tagSize_ = {}, - const void* pTag_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT( + VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, + uint64_t objectHandle_ = {}, + uint64_t tagName_ = {}, + size_t tagSize_ = {}, + const void * pTag_ = {} ) VULKAN_HPP_NOEXCEPT : objectType( objectType_ ) , objectHandle( objectHandle_ ) , tagName( tagName_ ) @@ -30086,7 +35541,8 @@ namespace VULKAN_HPP_NAMESPACE DebugUtilsObjectTagInfoEXT & operator=( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( DebugUtilsObjectTagInfoEXT ) - offsetof( DebugUtilsObjectTagInfoEXT, pNext ) ); + memcpy( + &pNext, &rhs.pNext, sizeof( DebugUtilsObjectTagInfoEXT ) - offsetof( DebugUtilsObjectTagInfoEXT, pNext ) ); return *this; } @@ -30095,13 +35551,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DebugUtilsObjectTagInfoEXT& operator=( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + DebugUtilsObjectTagInfoEXT & operator=( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DebugUtilsObjectTagInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DebugUtilsObjectTagInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -30131,63 +35587,65 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DebugUtilsObjectTagInfoEXT & setPTag( const void* pTag_ ) VULKAN_HPP_NOEXCEPT + DebugUtilsObjectTagInfoEXT & setPTag( const void * pTag_ ) VULKAN_HPP_NOEXCEPT { pTag = pTag_; return *this; } - operator VkDebugUtilsObjectTagInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkDebugUtilsObjectTagInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDebugUtilsObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DebugUtilsObjectTagInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugUtilsObjectTagInfoEXT const & ) const = default; #else - bool operator==( DebugUtilsObjectTagInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DebugUtilsObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( objectType == rhs.objectType ) - && ( objectHandle == rhs.objectHandle ) - && ( tagName == rhs.tagName ) - && ( tagSize == rhs.tagSize ) - && ( pTag == rhs.pTag ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && + ( objectHandle == rhs.objectHandle ) && ( tagName == rhs.tagName ) && ( tagSize == rhs.tagSize ) && + ( pTag == rhs.pTag ); } - bool operator!=( DebugUtilsObjectTagInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DebugUtilsObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectTagInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown; - uint64_t objectHandle = {}; - uint64_t tagName = {}; - size_t tagSize = {}; - const void* pTag = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectTagInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown; + uint64_t objectHandle = {}; + uint64_t tagName = {}; + size_t tagSize = {}; + const void * pTag = {}; }; - static_assert( sizeof( DebugUtilsObjectTagInfoEXT ) == sizeof( VkDebugUtilsObjectTagInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DebugUtilsObjectTagInfoEXT ) == sizeof( VkDebugUtilsObjectTagInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct DedicatedAllocationBufferCreateInfoNV { - VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {} ) VULKAN_HPP_NOEXCEPT - : dedicatedAllocation( dedicatedAllocation_ ) + VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {} ) + VULKAN_HPP_NOEXCEPT : dedicatedAllocation( dedicatedAllocation_ ) {} - DedicatedAllocationBufferCreateInfoNV & operator=( DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + DedicatedAllocationBufferCreateInfoNV & + operator=( DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( DedicatedAllocationBufferCreateInfoNV ) - offsetof( DedicatedAllocationBufferCreateInfoNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( DedicatedAllocationBufferCreateInfoNV ) - + offsetof( DedicatedAllocationBufferCreateInfoNV, pNext ) ); return *this; } @@ -30196,45 +35654,45 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DedicatedAllocationBufferCreateInfoNV& operator=( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + DedicatedAllocationBufferCreateInfoNV & + operator=( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DedicatedAllocationBufferCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DedicatedAllocationBufferCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - DedicatedAllocationBufferCreateInfoNV & setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT + DedicatedAllocationBufferCreateInfoNV & + setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT { dedicatedAllocation = dedicatedAllocation_; return *this; } - operator VkDedicatedAllocationBufferCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + operator VkDedicatedAllocationBufferCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDedicatedAllocationBufferCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DedicatedAllocationBufferCreateInfoNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DedicatedAllocationBufferCreateInfoNV const & ) const = default; #else - bool operator==( DedicatedAllocationBufferCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DedicatedAllocationBufferCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( dedicatedAllocation == rhs.dedicatedAllocation ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dedicatedAllocation == rhs.dedicatedAllocation ); } - bool operator!=( DedicatedAllocationBufferCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DedicatedAllocationBufferCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -30242,21 +35700,28 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationBufferCreateInfoNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {}; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {}; }; - static_assert( sizeof( DedicatedAllocationBufferCreateInfoNV ) == sizeof( VkDedicatedAllocationBufferCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DedicatedAllocationBufferCreateInfoNV ) == sizeof( VkDedicatedAllocationBufferCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct DedicatedAllocationImageCreateInfoNV { - VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + DedicatedAllocationImageCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {} ) VULKAN_HPP_NOEXCEPT : dedicatedAllocation( dedicatedAllocation_ ) {} - DedicatedAllocationImageCreateInfoNV & operator=( DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + DedicatedAllocationImageCreateInfoNV & + operator=( DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( DedicatedAllocationImageCreateInfoNV ) - offsetof( DedicatedAllocationImageCreateInfoNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( DedicatedAllocationImageCreateInfoNV ) - + offsetof( DedicatedAllocationImageCreateInfoNV, pNext ) ); return *this; } @@ -30265,45 +35730,45 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DedicatedAllocationImageCreateInfoNV& operator=( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + DedicatedAllocationImageCreateInfoNV & + operator=( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DedicatedAllocationImageCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DedicatedAllocationImageCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - DedicatedAllocationImageCreateInfoNV & setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT + DedicatedAllocationImageCreateInfoNV & + setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT { dedicatedAllocation = dedicatedAllocation_; return *this; } - operator VkDedicatedAllocationImageCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + operator VkDedicatedAllocationImageCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDedicatedAllocationImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DedicatedAllocationImageCreateInfoNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DedicatedAllocationImageCreateInfoNV const & ) const = default; #else - bool operator==( DedicatedAllocationImageCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DedicatedAllocationImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( dedicatedAllocation == rhs.dedicatedAllocation ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dedicatedAllocation == rhs.dedicatedAllocation ); } - bool operator!=( DedicatedAllocationImageCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DedicatedAllocationImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -30311,23 +35776,30 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationImageCreateInfoNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {}; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {}; }; - static_assert( sizeof( DedicatedAllocationImageCreateInfoNV ) == sizeof( VkDedicatedAllocationImageCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DedicatedAllocationImageCreateInfoNV ) == sizeof( VkDedicatedAllocationImageCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct DedicatedAllocationMemoryAllocateInfoNV { - VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV( VULKAN_HPP_NAMESPACE::Image image_ = {}, - VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + DedicatedAllocationMemoryAllocateInfoNV( VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT : image( image_ ) , buffer( buffer_ ) {} - DedicatedAllocationMemoryAllocateInfoNV & operator=( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + DedicatedAllocationMemoryAllocateInfoNV & + operator=( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( DedicatedAllocationMemoryAllocateInfoNV ) - offsetof( DedicatedAllocationMemoryAllocateInfoNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( DedicatedAllocationMemoryAllocateInfoNV ) - + offsetof( DedicatedAllocationMemoryAllocateInfoNV, pNext ) ); return *this; } @@ -30336,13 +35808,14 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DedicatedAllocationMemoryAllocateInfoNV& operator=( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + DedicatedAllocationMemoryAllocateInfoNV & + operator=( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DedicatedAllocationMemoryAllocateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DedicatedAllocationMemoryAllocateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -30360,46 +35833,47 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkDedicatedAllocationMemoryAllocateInfoNV const&() const VULKAN_HPP_NOEXCEPT + operator VkDedicatedAllocationMemoryAllocateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDedicatedAllocationMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DedicatedAllocationMemoryAllocateInfoNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DedicatedAllocationMemoryAllocateInfoNV const & ) const = default; #else - bool operator==( DedicatedAllocationMemoryAllocateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( image == rhs.image ) - && ( buffer == rhs.buffer ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( buffer == rhs.buffer ); } - bool operator!=( DedicatedAllocationMemoryAllocateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Image image = {}; - VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; }; - static_assert( sizeof( DedicatedAllocationMemoryAllocateInfoNV ) == sizeof( VkDedicatedAllocationMemoryAllocateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DedicatedAllocationMemoryAllocateInfoNV ) == + sizeof( VkDedicatedAllocationMemoryAllocateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #ifdef VK_ENABLE_BETA_EXTENSIONS struct DeferredOperationInfoKHR { - VULKAN_HPP_CONSTEXPR DeferredOperationInfoKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operationHandle_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + DeferredOperationInfoKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operationHandle_ = {} ) VULKAN_HPP_NOEXCEPT : operationHandle( operationHandle_ ) {} @@ -30414,64 +35888,64 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DeferredOperationInfoKHR& operator=( VkDeferredOperationInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + DeferredOperationInfoKHR & operator=( VkDeferredOperationInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DeferredOperationInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DeferredOperationInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - DeferredOperationInfoKHR & setOperationHandle( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operationHandle_ ) VULKAN_HPP_NOEXCEPT + DeferredOperationInfoKHR & + setOperationHandle( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operationHandle_ ) VULKAN_HPP_NOEXCEPT { operationHandle = operationHandle_; return *this; } - operator VkDeferredOperationInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkDeferredOperationInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDeferredOperationInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DeferredOperationInfoKHR const& ) const = default; -#else - bool operator==( DeferredOperationInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeferredOperationInfoKHR const & ) const = default; +# else + bool operator==( DeferredOperationInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( operationHandle == rhs.operationHandle ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( operationHandle == rhs.operationHandle ); } - bool operator!=( DeferredOperationInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeferredOperationInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeferredOperationInfoKHR; - const void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeferredOperationInfoKHR; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::DeferredOperationKHR operationHandle = {}; }; - static_assert( sizeof( DeferredOperationInfoKHR ) == sizeof( VkDeferredOperationInfoKHR ), "struct and wrapper have different size!" ); + static_assert( sizeof( DeferredOperationInfoKHR ) == sizeof( VkDeferredOperationInfoKHR ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct DescriptorBufferInfo { - VULKAN_HPP_CONSTEXPR DescriptorBufferInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_CONSTEXPR DescriptorBufferInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize range_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::DeviceSize range_ = {} ) VULKAN_HPP_NOEXCEPT : buffer( buffer_ ) , offset( offset_ ) , range( range_ ) @@ -30482,9 +35956,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DescriptorBufferInfo& operator=( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorBufferInfo & operator=( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -30506,45 +35980,45 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkDescriptorBufferInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkDescriptorBufferInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDescriptorBufferInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DescriptorBufferInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorBufferInfo const & ) const = default; #else - bool operator==( DescriptorBufferInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( buffer == rhs.buffer ) - && ( offset == rhs.offset ) - && ( range == rhs.range ); + return ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && ( range == rhs.range ); } - bool operator!=( DescriptorBufferInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; - VULKAN_HPP_NAMESPACE::DeviceSize range = {}; + VULKAN_HPP_NAMESPACE::DeviceSize range = {}; }; - static_assert( sizeof( DescriptorBufferInfo ) == sizeof( VkDescriptorBufferInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( DescriptorBufferInfo ) == sizeof( VkDescriptorBufferInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct DescriptorImageInfo { - VULKAN_HPP_CONSTEXPR DescriptorImageInfo( VULKAN_HPP_NAMESPACE::Sampler sampler_ = {}, - VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, - VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DescriptorImageInfo( VULKAN_HPP_NAMESPACE::Sampler sampler_ = {}, + VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = + VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT : sampler( sampler_ ) , imageView( imageView_ ) , imageLayout( imageLayout_ ) @@ -30555,9 +36029,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DescriptorImageInfo& operator=( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorImageInfo & operator=( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -30579,44 +36053,44 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkDescriptorImageInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkDescriptorImageInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDescriptorImageInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DescriptorImageInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorImageInfo const & ) const = default; #else - bool operator==( DescriptorImageInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sampler == rhs.sampler ) - && ( imageView == rhs.imageView ) - && ( imageLayout == rhs.imageLayout ); + return ( sampler == rhs.sampler ) && ( imageView == rhs.imageView ) && ( imageLayout == rhs.imageLayout ); } - bool operator!=( DescriptorImageInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::Sampler sampler = {}; - VULKAN_HPP_NAMESPACE::ImageView imageView = {}; + VULKAN_HPP_NAMESPACE::Sampler sampler = {}; + VULKAN_HPP_NAMESPACE::ImageView imageView = {}; VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; }; - static_assert( sizeof( DescriptorImageInfo ) == sizeof( VkDescriptorImageInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( DescriptorImageInfo ) == sizeof( VkDescriptorImageInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct DescriptorPoolSize { - VULKAN_HPP_CONSTEXPR DescriptorPoolSize( VULKAN_HPP_NAMESPACE::DescriptorType type_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, - uint32_t descriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + DescriptorPoolSize( VULKAN_HPP_NAMESPACE::DescriptorType type_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, + uint32_t descriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT : type( type_ ) , descriptorCount( descriptorCount_ ) {} @@ -30626,9 +36100,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DescriptorPoolSize& operator=( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorPoolSize & operator=( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -30644,44 +36118,45 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkDescriptorPoolSize const&() const VULKAN_HPP_NOEXCEPT + operator VkDescriptorPoolSize const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDescriptorPoolSize &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DescriptorPoolSize const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorPoolSize const & ) const = default; #else - bool operator==( DescriptorPoolSize const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorPoolSize const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( type == rhs.type ) - && ( descriptorCount == rhs.descriptorCount ); + return ( type == rhs.type ) && ( descriptorCount == rhs.descriptorCount ); } - bool operator!=( DescriptorPoolSize const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorPoolSize const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::DescriptorType type = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; - uint32_t descriptorCount = {}; + VULKAN_HPP_NAMESPACE::DescriptorType type = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; + uint32_t descriptorCount = {}; }; - static_assert( sizeof( DescriptorPoolSize ) == sizeof( VkDescriptorPoolSize ), "struct and wrapper have different size!" ); + static_assert( sizeof( DescriptorPoolSize ) == sizeof( VkDescriptorPoolSize ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct DescriptorPoolCreateInfo { - VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ = {}, - uint32_t maxSets_ = {}, - uint32_t poolSizeCount_ = {}, - const VULKAN_HPP_NAMESPACE::DescriptorPoolSize* pPoolSizes_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + DescriptorPoolCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ = {}, + uint32_t maxSets_ = {}, + uint32_t poolSizeCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , maxSets( maxSets_ ) , poolSizeCount( poolSizeCount_ ) @@ -30699,13 +36174,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DescriptorPoolCreateInfo& operator=( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorPoolCreateInfo & operator=( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DescriptorPoolCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DescriptorPoolCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -30729,108 +36204,113 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DescriptorPoolCreateInfo & setPPoolSizes( const VULKAN_HPP_NAMESPACE::DescriptorPoolSize* pPoolSizes_ ) VULKAN_HPP_NOEXCEPT + DescriptorPoolCreateInfo & + setPPoolSizes( const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes_ ) VULKAN_HPP_NOEXCEPT { pPoolSizes = pPoolSizes_; return *this; } - operator VkDescriptorPoolCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkDescriptorPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDescriptorPoolCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DescriptorPoolCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorPoolCreateInfo const & ) const = default; #else - bool operator==( DescriptorPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( maxSets == rhs.maxSets ) - && ( poolSizeCount == rhs.poolSizeCount ) - && ( pPoolSizes == rhs.pPoolSizes ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( maxSets == rhs.maxSets ) && + ( poolSizeCount == rhs.poolSizeCount ) && ( pPoolSizes == rhs.pPoolSizes ); } - bool operator!=( DescriptorPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags = {}; - uint32_t maxSets = {}; - uint32_t poolSizeCount = {}; - const VULKAN_HPP_NAMESPACE::DescriptorPoolSize* pPoolSizes = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags = {}; + uint32_t maxSets = {}; + uint32_t poolSizeCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes = {}; }; - static_assert( sizeof( DescriptorPoolCreateInfo ) == sizeof( VkDescriptorPoolCreateInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( DescriptorPoolCreateInfo ) == sizeof( VkDescriptorPoolCreateInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct DescriptorPoolInlineUniformBlockCreateInfoEXT { - VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfoEXT( uint32_t maxInlineUniformBlockBindings_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + DescriptorPoolInlineUniformBlockCreateInfoEXT( uint32_t maxInlineUniformBlockBindings_ = {} ) VULKAN_HPP_NOEXCEPT : maxInlineUniformBlockBindings( maxInlineUniformBlockBindings_ ) {} - DescriptorPoolInlineUniformBlockCreateInfoEXT & operator=( DescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorPoolInlineUniformBlockCreateInfoEXT & + operator=( DescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( DescriptorPoolInlineUniformBlockCreateInfoEXT ) - offsetof( DescriptorPoolInlineUniformBlockCreateInfoEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( DescriptorPoolInlineUniformBlockCreateInfoEXT ) - + offsetof( DescriptorPoolInlineUniformBlockCreateInfoEXT, pNext ) ); return *this; } - DescriptorPoolInlineUniformBlockCreateInfoEXT( VkDescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorPoolInlineUniformBlockCreateInfoEXT( VkDescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - DescriptorPoolInlineUniformBlockCreateInfoEXT& operator=( VkDescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorPoolInlineUniformBlockCreateInfoEXT & + operator=( VkDescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DescriptorPoolInlineUniformBlockCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DescriptorPoolInlineUniformBlockCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - DescriptorPoolInlineUniformBlockCreateInfoEXT & setMaxInlineUniformBlockBindings( uint32_t maxInlineUniformBlockBindings_ ) VULKAN_HPP_NOEXCEPT + DescriptorPoolInlineUniformBlockCreateInfoEXT & + setMaxInlineUniformBlockBindings( uint32_t maxInlineUniformBlockBindings_ ) VULKAN_HPP_NOEXCEPT { maxInlineUniformBlockBindings = maxInlineUniformBlockBindings_; return *this; } - operator VkDescriptorPoolInlineUniformBlockCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkDescriptorPoolInlineUniformBlockCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDescriptorPoolInlineUniformBlockCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DescriptorPoolInlineUniformBlockCreateInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorPoolInlineUniformBlockCreateInfoEXT const & ) const = default; #else - bool operator==( DescriptorPoolInlineUniformBlockCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxInlineUniformBlockBindings == rhs.maxInlineUniformBlockBindings ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxInlineUniformBlockBindings == rhs.maxInlineUniformBlockBindings ); } - bool operator!=( DescriptorPoolInlineUniformBlockCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -30838,17 +36318,21 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT; - const void* pNext = {}; - uint32_t maxInlineUniformBlockBindings = {}; + const void * pNext = {}; + uint32_t maxInlineUniformBlockBindings = {}; }; - static_assert( sizeof( DescriptorPoolInlineUniformBlockCreateInfoEXT ) == sizeof( VkDescriptorPoolInlineUniformBlockCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DescriptorPoolInlineUniformBlockCreateInfoEXT ) == + sizeof( VkDescriptorPoolInlineUniformBlockCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct DescriptorSetAllocateInfo { - VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ = {}, - uint32_t descriptorSetCount_ = {}, - const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo( + VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ = {}, + uint32_t descriptorSetCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {} ) VULKAN_HPP_NOEXCEPT : descriptorPool( descriptorPool_ ) , descriptorSetCount( descriptorSetCount_ ) , pSetLayouts( pSetLayouts_ ) @@ -30865,19 +36349,20 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DescriptorSetAllocateInfo& operator=( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorSetAllocateInfo & operator=( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DescriptorSetAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DescriptorSetAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - DescriptorSetAllocateInfo & setDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ ) VULKAN_HPP_NOEXCEPT + DescriptorSetAllocateInfo & + setDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ ) VULKAN_HPP_NOEXCEPT { descriptorPool = descriptorPool_; return *this; @@ -30889,57 +36374,58 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DescriptorSetAllocateInfo & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts_ ) VULKAN_HPP_NOEXCEPT + DescriptorSetAllocateInfo & + setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT { pSetLayouts = pSetLayouts_; return *this; } - operator VkDescriptorSetAllocateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkDescriptorSetAllocateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDescriptorSetAllocateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DescriptorSetAllocateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetAllocateInfo const & ) const = default; #else - bool operator==( DescriptorSetAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorSetAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( descriptorPool == rhs.descriptorPool ) - && ( descriptorSetCount == rhs.descriptorSetCount ) - && ( pSetLayouts == rhs.pSetLayouts ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorPool == rhs.descriptorPool ) && + ( descriptorSetCount == rhs.descriptorSetCount ) && ( pSetLayouts == rhs.pSetLayouts ); } - bool operator!=( DescriptorSetAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorSetAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetAllocateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool = {}; - uint32_t descriptorSetCount = {}; - const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetAllocateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool = {}; + uint32_t descriptorSetCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts = {}; }; - static_assert( sizeof( DescriptorSetAllocateInfo ) == sizeof( VkDescriptorSetAllocateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DescriptorSetAllocateInfo ) == sizeof( VkDescriptorSetAllocateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct DescriptorSetLayoutBinding { - VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding( uint32_t binding_ = {}, - VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, - uint32_t descriptorCount_ = {}, - VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, - const VULKAN_HPP_NAMESPACE::Sampler* pImmutableSamplers_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding( + uint32_t binding_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, + uint32_t descriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, + const VULKAN_HPP_NAMESPACE::Sampler * pImmutableSamplers_ = {} ) VULKAN_HPP_NOEXCEPT : binding( binding_ ) , descriptorType( descriptorType_ ) , descriptorCount( descriptorCount_ ) @@ -30952,9 +36438,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DescriptorSetLayoutBinding& operator=( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorSetLayoutBinding & operator=( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -30964,7 +36450,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DescriptorSetLayoutBinding & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT + DescriptorSetLayoutBinding & + setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT { descriptorType = descriptorType_; return *this; @@ -30982,76 +36469,84 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DescriptorSetLayoutBinding & setPImmutableSamplers( const VULKAN_HPP_NAMESPACE::Sampler* pImmutableSamplers_ ) VULKAN_HPP_NOEXCEPT + DescriptorSetLayoutBinding & + setPImmutableSamplers( const VULKAN_HPP_NAMESPACE::Sampler * pImmutableSamplers_ ) VULKAN_HPP_NOEXCEPT { pImmutableSamplers = pImmutableSamplers_; return *this; } - operator VkDescriptorSetLayoutBinding const&() const VULKAN_HPP_NOEXCEPT + operator VkDescriptorSetLayoutBinding const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDescriptorSetLayoutBinding &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DescriptorSetLayoutBinding const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetLayoutBinding const & ) const = default; #else - bool operator==( DescriptorSetLayoutBinding const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorSetLayoutBinding const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( binding == rhs.binding ) - && ( descriptorType == rhs.descriptorType ) - && ( descriptorCount == rhs.descriptorCount ) - && ( stageFlags == rhs.stageFlags ) - && ( pImmutableSamplers == rhs.pImmutableSamplers ); + return ( binding == rhs.binding ) && ( descriptorType == rhs.descriptorType ) && + ( descriptorCount == rhs.descriptorCount ) && ( stageFlags == rhs.stageFlags ) && + ( pImmutableSamplers == rhs.pImmutableSamplers ); } - bool operator!=( DescriptorSetLayoutBinding const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorSetLayoutBinding const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint32_t binding = {}; - VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; - uint32_t descriptorCount = {}; - VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; - const VULKAN_HPP_NAMESPACE::Sampler* pImmutableSamplers = {}; + uint32_t binding = {}; + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; + uint32_t descriptorCount = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; + const VULKAN_HPP_NAMESPACE::Sampler * pImmutableSamplers = {}; }; - static_assert( sizeof( DescriptorSetLayoutBinding ) == sizeof( VkDescriptorSetLayoutBinding ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DescriptorSetLayoutBinding ) == sizeof( VkDescriptorSetLayoutBinding ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct DescriptorSetLayoutBindingFlagsCreateInfo { - VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo( uint32_t bindingCount_ = {}, - const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags* pBindingFlags_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo( + uint32_t bindingCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags_ = {} ) VULKAN_HPP_NOEXCEPT : bindingCount( bindingCount_ ) , pBindingFlags( pBindingFlags_ ) {} - DescriptorSetLayoutBindingFlagsCreateInfo & operator=( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorSetLayoutBindingFlagsCreateInfo & + operator=( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( DescriptorSetLayoutBindingFlagsCreateInfo ) - offsetof( DescriptorSetLayoutBindingFlagsCreateInfo, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( DescriptorSetLayoutBindingFlagsCreateInfo ) - + offsetof( DescriptorSetLayoutBindingFlagsCreateInfo, pNext ) ); return *this; } - DescriptorSetLayoutBindingFlagsCreateInfo( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorSetLayoutBindingFlagsCreateInfo( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - DescriptorSetLayoutBindingFlagsCreateInfo& operator=( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorSetLayoutBindingFlagsCreateInfo & + operator=( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DescriptorSetLayoutBindingFlagsCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DescriptorSetLayoutBindingFlagsCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -31063,53 +36558,56 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DescriptorSetLayoutBindingFlagsCreateInfo & setPBindingFlags( const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags* pBindingFlags_ ) VULKAN_HPP_NOEXCEPT + DescriptorSetLayoutBindingFlagsCreateInfo & + setPBindingFlags( const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags_ ) VULKAN_HPP_NOEXCEPT { pBindingFlags = pBindingFlags_; return *this; } - operator VkDescriptorSetLayoutBindingFlagsCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkDescriptorSetLayoutBindingFlagsCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDescriptorSetLayoutBindingFlagsCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DescriptorSetLayoutBindingFlagsCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetLayoutBindingFlagsCreateInfo const & ) const = default; #else - bool operator==( DescriptorSetLayoutBindingFlagsCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( bindingCount == rhs.bindingCount ) - && ( pBindingFlags == rhs.pBindingFlags ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bindingCount == rhs.bindingCount ) && + ( pBindingFlags == rhs.pBindingFlags ); } - bool operator!=( DescriptorSetLayoutBindingFlagsCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo; - const void* pNext = {}; - uint32_t bindingCount = {}; - const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags* pBindingFlags = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo; + const void * pNext = {}; + uint32_t bindingCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags = {}; }; - static_assert( sizeof( DescriptorSetLayoutBindingFlagsCreateInfo ) == sizeof( VkDescriptorSetLayoutBindingFlagsCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DescriptorSetLayoutBindingFlagsCreateInfo ) == + sizeof( VkDescriptorSetLayoutBindingFlagsCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct DescriptorSetLayoutCreateInfo { - VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ = {}, - uint32_t bindingCount_ = {}, - const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding* pBindings_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo( + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ = {}, + uint32_t bindingCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , bindingCount( bindingCount_ ) , pBindings( pBindings_ ) @@ -31117,7 +36615,9 @@ namespace VULKAN_HPP_NAMESPACE DescriptorSetLayoutCreateInfo & operator=( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( DescriptorSetLayoutCreateInfo ) - offsetof( DescriptorSetLayoutCreateInfo, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( DescriptorSetLayoutCreateInfo ) - offsetof( DescriptorSetLayoutCreateInfo, pNext ) ); return *this; } @@ -31126,19 +36626,20 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DescriptorSetLayoutCreateInfo& operator=( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorSetLayoutCreateInfo & operator=( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DescriptorSetLayoutCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DescriptorSetLayoutCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - DescriptorSetLayoutCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + DescriptorSetLayoutCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; @@ -31150,49 +36651,49 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DescriptorSetLayoutCreateInfo & setPBindings( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding* pBindings_ ) VULKAN_HPP_NOEXCEPT + DescriptorSetLayoutCreateInfo & + setPBindings( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings_ ) VULKAN_HPP_NOEXCEPT { pBindings = pBindings_; return *this; } - operator VkDescriptorSetLayoutCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkDescriptorSetLayoutCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDescriptorSetLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DescriptorSetLayoutCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetLayoutCreateInfo const & ) const = default; #else - bool operator==( DescriptorSetLayoutCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorSetLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( bindingCount == rhs.bindingCount ) - && ( pBindings == rhs.pBindings ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( bindingCount == rhs.bindingCount ) && ( pBindings == rhs.pBindings ); } - bool operator!=( DescriptorSetLayoutCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorSetLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags = {}; - uint32_t bindingCount = {}; - const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding* pBindings = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags = {}; + uint32_t bindingCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings = {}; }; - static_assert( sizeof( DescriptorSetLayoutCreateInfo ) == sizeof( VkDescriptorSetLayoutCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DescriptorSetLayoutCreateInfo ) == sizeof( VkDescriptorSetLayoutCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct DescriptorSetLayoutSupport { @@ -31202,7 +36703,8 @@ namespace VULKAN_HPP_NAMESPACE DescriptorSetLayoutSupport & operator=( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( DescriptorSetLayoutSupport ) - offsetof( DescriptorSetLayoutSupport, pNext ) ); + memcpy( + &pNext, &rhs.pNext, sizeof( DescriptorSetLayoutSupport ) - offsetof( DescriptorSetLayoutSupport, pNext ) ); return *this; } @@ -31211,111 +36713,118 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DescriptorSetLayoutSupport& operator=( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorSetLayoutSupport & operator=( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkDescriptorSetLayoutSupport const&() const VULKAN_HPP_NOEXCEPT + operator VkDescriptorSetLayoutSupport const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDescriptorSetLayoutSupport &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DescriptorSetLayoutSupport const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetLayoutSupport const & ) const = default; #else - bool operator==( DescriptorSetLayoutSupport const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorSetLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( supported == rhs.supported ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supported == rhs.supported ); } - bool operator!=( DescriptorSetLayoutSupport const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorSetLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutSupport; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 supported = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutSupport; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 supported = {}; }; - static_assert( sizeof( DescriptorSetLayoutSupport ) == sizeof( VkDescriptorSetLayoutSupport ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DescriptorSetLayoutSupport ) == sizeof( VkDescriptorSetLayoutSupport ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct DescriptorSetVariableDescriptorCountAllocateInfo { - VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfo( uint32_t descriptorSetCount_ = {}, - const uint32_t* pDescriptorCounts_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + DescriptorSetVariableDescriptorCountAllocateInfo( uint32_t descriptorSetCount_ = {}, + const uint32_t * pDescriptorCounts_ = {} ) VULKAN_HPP_NOEXCEPT : descriptorSetCount( descriptorSetCount_ ) , pDescriptorCounts( pDescriptorCounts_ ) {} - DescriptorSetVariableDescriptorCountAllocateInfo & operator=( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorSetVariableDescriptorCountAllocateInfo & + operator=( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( DescriptorSetVariableDescriptorCountAllocateInfo ) - offsetof( DescriptorSetVariableDescriptorCountAllocateInfo, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( DescriptorSetVariableDescriptorCountAllocateInfo ) - + offsetof( DescriptorSetVariableDescriptorCountAllocateInfo, pNext ) ); return *this; } - DescriptorSetVariableDescriptorCountAllocateInfo( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorSetVariableDescriptorCountAllocateInfo( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - DescriptorSetVariableDescriptorCountAllocateInfo& operator=( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorSetVariableDescriptorCountAllocateInfo & + operator=( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DescriptorSetVariableDescriptorCountAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DescriptorSetVariableDescriptorCountAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - DescriptorSetVariableDescriptorCountAllocateInfo & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT + DescriptorSetVariableDescriptorCountAllocateInfo & + setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT { descriptorSetCount = descriptorSetCount_; return *this; } - DescriptorSetVariableDescriptorCountAllocateInfo & setPDescriptorCounts( const uint32_t* pDescriptorCounts_ ) VULKAN_HPP_NOEXCEPT + DescriptorSetVariableDescriptorCountAllocateInfo & + setPDescriptorCounts( const uint32_t * pDescriptorCounts_ ) VULKAN_HPP_NOEXCEPT { pDescriptorCounts = pDescriptorCounts_; return *this; } - operator VkDescriptorSetVariableDescriptorCountAllocateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkDescriptorSetVariableDescriptorCountAllocateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDescriptorSetVariableDescriptorCountAllocateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DescriptorSetVariableDescriptorCountAllocateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetVariableDescriptorCountAllocateInfo const & ) const = default; #else - bool operator==( DescriptorSetVariableDescriptorCountAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( descriptorSetCount == rhs.descriptorSetCount ) - && ( pDescriptorCounts == rhs.pDescriptorCounts ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorSetCount == rhs.descriptorSetCount ) && + ( pDescriptorCounts == rhs.pDescriptorCounts ); } - bool operator!=( DescriptorSetVariableDescriptorCountAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -31323,57 +36832,67 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo; - const void* pNext = {}; - uint32_t descriptorSetCount = {}; - const uint32_t* pDescriptorCounts = {}; + const void * pNext = {}; + uint32_t descriptorSetCount = {}; + const uint32_t * pDescriptorCounts = {}; }; - static_assert( sizeof( DescriptorSetVariableDescriptorCountAllocateInfo ) == sizeof( VkDescriptorSetVariableDescriptorCountAllocateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DescriptorSetVariableDescriptorCountAllocateInfo ) == + sizeof( VkDescriptorSetVariableDescriptorCountAllocateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct DescriptorSetVariableDescriptorCountLayoutSupport { - VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountLayoutSupport( uint32_t maxVariableDescriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + DescriptorSetVariableDescriptorCountLayoutSupport( uint32_t maxVariableDescriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT : maxVariableDescriptorCount( maxVariableDescriptorCount_ ) {} - DescriptorSetVariableDescriptorCountLayoutSupport & operator=( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorSetVariableDescriptorCountLayoutSupport & + operator=( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( DescriptorSetVariableDescriptorCountLayoutSupport ) - offsetof( DescriptorSetVariableDescriptorCountLayoutSupport, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( DescriptorSetVariableDescriptorCountLayoutSupport ) - + offsetof( DescriptorSetVariableDescriptorCountLayoutSupport, pNext ) ); return *this; } - DescriptorSetVariableDescriptorCountLayoutSupport( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorSetVariableDescriptorCountLayoutSupport( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - DescriptorSetVariableDescriptorCountLayoutSupport& operator=( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorSetVariableDescriptorCountLayoutSupport & + operator=( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = + *reinterpret_cast( &rhs ); return *this; } - operator VkDescriptorSetVariableDescriptorCountLayoutSupport const&() const VULKAN_HPP_NOEXCEPT + operator VkDescriptorSetVariableDescriptorCountLayoutSupport const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDescriptorSetVariableDescriptorCountLayoutSupport &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DescriptorSetVariableDescriptorCountLayoutSupport const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetVariableDescriptorCountLayoutSupport const & ) const = default; #else - bool operator==( DescriptorSetVariableDescriptorCountLayoutSupport const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxVariableDescriptorCount == rhs.maxVariableDescriptorCount ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxVariableDescriptorCount == rhs.maxVariableDescriptorCount ); } - bool operator!=( DescriptorSetVariableDescriptorCountLayoutSupport const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -31381,20 +36900,24 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport; - void* pNext = {}; - uint32_t maxVariableDescriptorCount = {}; + void * pNext = {}; + uint32_t maxVariableDescriptorCount = {}; }; - static_assert( sizeof( DescriptorSetVariableDescriptorCountLayoutSupport ) == sizeof( VkDescriptorSetVariableDescriptorCountLayoutSupport ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DescriptorSetVariableDescriptorCountLayoutSupport ) == + sizeof( VkDescriptorSetVariableDescriptorCountLayoutSupport ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct DescriptorUpdateTemplateEntry { - VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry( uint32_t dstBinding_ = {}, - uint32_t dstArrayElement_ = {}, - uint32_t descriptorCount_ = {}, - VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, - size_t offset_ = {}, - size_t stride_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry( + uint32_t dstBinding_ = {}, + uint32_t dstArrayElement_ = {}, + uint32_t descriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, + size_t offset_ = {}, + size_t stride_ = {} ) VULKAN_HPP_NOEXCEPT : dstBinding( dstBinding_ ) , dstArrayElement( dstArrayElement_ ) , descriptorCount( descriptorCount_ ) @@ -31408,9 +36931,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DescriptorUpdateTemplateEntry& operator=( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorUpdateTemplateEntry & operator=( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -31432,7 +36955,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DescriptorUpdateTemplateEntry & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT + DescriptorUpdateTemplateEntry & + setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT { descriptorType = descriptorType_; return *this; @@ -31450,56 +36974,57 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkDescriptorUpdateTemplateEntry const&() const VULKAN_HPP_NOEXCEPT + operator VkDescriptorUpdateTemplateEntry const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDescriptorUpdateTemplateEntry &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DescriptorUpdateTemplateEntry const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorUpdateTemplateEntry const & ) const = default; #else - bool operator==( DescriptorUpdateTemplateEntry const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorUpdateTemplateEntry const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( dstBinding == rhs.dstBinding ) - && ( dstArrayElement == rhs.dstArrayElement ) - && ( descriptorCount == rhs.descriptorCount ) - && ( descriptorType == rhs.descriptorType ) - && ( offset == rhs.offset ) - && ( stride == rhs.stride ); + return ( dstBinding == rhs.dstBinding ) && ( dstArrayElement == rhs.dstArrayElement ) && + ( descriptorCount == rhs.descriptorCount ) && ( descriptorType == rhs.descriptorType ) && + ( offset == rhs.offset ) && ( stride == rhs.stride ); } - bool operator!=( DescriptorUpdateTemplateEntry const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorUpdateTemplateEntry const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint32_t dstBinding = {}; - uint32_t dstArrayElement = {}; - uint32_t descriptorCount = {}; - VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; - size_t offset = {}; - size_t stride = {}; + uint32_t dstBinding = {}; + uint32_t dstArrayElement = {}; + uint32_t descriptorCount = {}; + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; + size_t offset = {}; + size_t stride = {}; }; - static_assert( sizeof( DescriptorUpdateTemplateEntry ) == sizeof( VkDescriptorUpdateTemplateEntry ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DescriptorUpdateTemplateEntry ) == sizeof( VkDescriptorUpdateTemplateEntry ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct DescriptorUpdateTemplateCreateInfo { - VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ = {}, - uint32_t descriptorUpdateEntryCount_ = {}, - const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries_ = {}, - VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet, - VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, - VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, - VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, - uint32_t set_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo( + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ = {}, + uint32_t descriptorUpdateEntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ = + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, + uint32_t set_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , descriptorUpdateEntryCount( descriptorUpdateEntryCount_ ) , pDescriptorUpdateEntries( pDescriptorUpdateEntries_ ) @@ -31512,7 +37037,9 @@ namespace VULKAN_HPP_NAMESPACE DescriptorUpdateTemplateCreateInfo & operator=( DescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( DescriptorUpdateTemplateCreateInfo ) - offsetof( DescriptorUpdateTemplateCreateInfo, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( DescriptorUpdateTemplateCreateInfo ) - offsetof( DescriptorUpdateTemplateCreateInfo, pNext ) ); return *this; } @@ -31521,55 +37048,63 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DescriptorUpdateTemplateCreateInfo& operator=( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorUpdateTemplateCreateInfo & + operator=( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DescriptorUpdateTemplateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DescriptorUpdateTemplateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - DescriptorUpdateTemplateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + DescriptorUpdateTemplateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - DescriptorUpdateTemplateCreateInfo & setDescriptorUpdateEntryCount( uint32_t descriptorUpdateEntryCount_ ) VULKAN_HPP_NOEXCEPT + DescriptorUpdateTemplateCreateInfo & + setDescriptorUpdateEntryCount( uint32_t descriptorUpdateEntryCount_ ) VULKAN_HPP_NOEXCEPT { descriptorUpdateEntryCount = descriptorUpdateEntryCount_; return *this; } - DescriptorUpdateTemplateCreateInfo & setPDescriptorUpdateEntries( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries_ ) VULKAN_HPP_NOEXCEPT + DescriptorUpdateTemplateCreateInfo & setPDescriptorUpdateEntries( + const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries_ ) VULKAN_HPP_NOEXCEPT { pDescriptorUpdateEntries = pDescriptorUpdateEntries_; return *this; } - DescriptorUpdateTemplateCreateInfo & setTemplateType( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ ) VULKAN_HPP_NOEXCEPT + DescriptorUpdateTemplateCreateInfo & + setTemplateType( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ ) VULKAN_HPP_NOEXCEPT { templateType = templateType_; return *this; } - DescriptorUpdateTemplateCreateInfo & setDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ ) VULKAN_HPP_NOEXCEPT + DescriptorUpdateTemplateCreateInfo & + setDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ ) VULKAN_HPP_NOEXCEPT { descriptorSetLayout = descriptorSetLayout_; return *this; } - DescriptorUpdateTemplateCreateInfo & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + DescriptorUpdateTemplateCreateInfo & + setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT { pipelineBindPoint = pipelineBindPoint_; return *this; } - DescriptorUpdateTemplateCreateInfo & setPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT + DescriptorUpdateTemplateCreateInfo & + setPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT { pipelineLayout = pipelineLayout_; return *this; @@ -31581,34 +37116,29 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkDescriptorUpdateTemplateCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkDescriptorUpdateTemplateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDescriptorUpdateTemplateCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DescriptorUpdateTemplateCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorUpdateTemplateCreateInfo const & ) const = default; #else - bool operator==( DescriptorUpdateTemplateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorUpdateTemplateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( descriptorUpdateEntryCount == rhs.descriptorUpdateEntryCount ) - && ( pDescriptorUpdateEntries == rhs.pDescriptorUpdateEntries ) - && ( templateType == rhs.templateType ) - && ( descriptorSetLayout == rhs.descriptorSetLayout ) - && ( pipelineBindPoint == rhs.pipelineBindPoint ) - && ( pipelineLayout == rhs.pipelineLayout ) - && ( set == rhs.set ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( descriptorUpdateEntryCount == rhs.descriptorUpdateEntryCount ) && + ( pDescriptorUpdateEntries == rhs.pDescriptorUpdateEntries ) && ( templateType == rhs.templateType ) && + ( descriptorSetLayout == rhs.descriptorSetLayout ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && + ( pipelineLayout == rhs.pipelineLayout ) && ( set == rhs.set ); } - bool operator!=( DescriptorUpdateTemplateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorUpdateTemplateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -31616,25 +37146,28 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorUpdateTemplateCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags = {}; - uint32_t descriptorUpdateEntryCount = {}; - const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries = {}; - VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags = {}; + uint32_t descriptorUpdateEntryCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries = {}; + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType = + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet; VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout = {}; - VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; - VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout = {}; - uint32_t set = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout = {}; + uint32_t set = {}; }; - static_assert( sizeof( DescriptorUpdateTemplateCreateInfo ) == sizeof( VkDescriptorUpdateTemplateCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DescriptorUpdateTemplateCreateInfo ) == sizeof( VkDescriptorUpdateTemplateCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct DeviceQueueCreateInfo { - VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {}, - uint32_t queueFamilyIndex_ = {}, - uint32_t queueCount_ = {}, - const float* pQueuePriorities_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {}, + uint32_t queueFamilyIndex_ = {}, + uint32_t queueCount_ = {}, + const float * pQueuePriorities_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , queueFamilyIndex( queueFamilyIndex_ ) , queueCount( queueCount_ ) @@ -31652,13 +37185,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DeviceQueueCreateInfo& operator=( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceQueueCreateInfo & operator=( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DeviceQueueCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DeviceQueueCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -31682,109 +37215,108 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DeviceQueueCreateInfo & setPQueuePriorities( const float* pQueuePriorities_ ) VULKAN_HPP_NOEXCEPT + DeviceQueueCreateInfo & setPQueuePriorities( const float * pQueuePriorities_ ) VULKAN_HPP_NOEXCEPT { pQueuePriorities = pQueuePriorities_; return *this; } - operator VkDeviceQueueCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkDeviceQueueCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDeviceQueueCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DeviceQueueCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceQueueCreateInfo const & ) const = default; #else - bool operator==( DeviceQueueCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceQueueCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( queueFamilyIndex == rhs.queueFamilyIndex ) - && ( queueCount == rhs.queueCount ) - && ( pQueuePriorities == rhs.pQueuePriorities ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( queueFamilyIndex == rhs.queueFamilyIndex ) && ( queueCount == rhs.queueCount ) && + ( pQueuePriorities == rhs.pQueuePriorities ); } - bool operator!=( DeviceQueueCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceQueueCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {}; - uint32_t queueFamilyIndex = {}; - uint32_t queueCount = {}; - const float* pQueuePriorities = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {}; + uint32_t queueFamilyIndex = {}; + uint32_t queueCount = {}; + const float * pQueuePriorities = {}; }; - static_assert( sizeof( DeviceQueueCreateInfo ) == sizeof( VkDeviceQueueCreateInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( DeviceQueueCreateInfo ) == sizeof( VkDeviceQueueCreateInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PhysicalDeviceFeatures { - VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 logicOp_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 wideLines_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 largePoints_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + PhysicalDeviceFeatures( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 logicOp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 wideLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 largePoints_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ = {} ) VULKAN_HPP_NOEXCEPT : robustBufferAccess( robustBufferAccess_ ) , fullDrawIndexUint32( fullDrawIndexUint32_ ) , imageCubeArray( imageCubeArray_ ) @@ -31847,19 +37379,21 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceFeatures& operator=( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & operator=( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceFeatures & setRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ ) VULKAN_HPP_NOEXCEPT { robustBufferAccess = robustBufferAccess_; return *this; } - PhysicalDeviceFeatures & setFullDrawIndexUint32( VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setFullDrawIndexUint32( VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ ) VULKAN_HPP_NOEXCEPT { fullDrawIndexUint32 = fullDrawIndexUint32_; return *this; @@ -31883,7 +37417,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PhysicalDeviceFeatures & setTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ ) VULKAN_HPP_NOEXCEPT { tessellationShader = tessellationShader_; return *this; @@ -31913,7 +37448,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PhysicalDeviceFeatures & setDrawIndirectFirstInstance( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setDrawIndirectFirstInstance( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ ) VULKAN_HPP_NOEXCEPT { drawIndirectFirstInstance = drawIndirectFirstInstance_; return *this; @@ -31973,115 +37509,134 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PhysicalDeviceFeatures & setTextureCompressionETC2( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setTextureCompressionETC2( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ ) VULKAN_HPP_NOEXCEPT { textureCompressionETC2 = textureCompressionETC2_; return *this; } - PhysicalDeviceFeatures & setTextureCompressionASTC_LDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setTextureCompressionASTC_LDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ ) VULKAN_HPP_NOEXCEPT { textureCompressionASTC_LDR = textureCompressionASTC_LDR_; return *this; } - PhysicalDeviceFeatures & setTextureCompressionBC( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setTextureCompressionBC( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ ) VULKAN_HPP_NOEXCEPT { textureCompressionBC = textureCompressionBC_; return *this; } - PhysicalDeviceFeatures & setOcclusionQueryPrecise( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setOcclusionQueryPrecise( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ ) VULKAN_HPP_NOEXCEPT { occlusionQueryPrecise = occlusionQueryPrecise_; return *this; } - PhysicalDeviceFeatures & setPipelineStatisticsQuery( VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setPipelineStatisticsQuery( VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ ) VULKAN_HPP_NOEXCEPT { pipelineStatisticsQuery = pipelineStatisticsQuery_; return *this; } - PhysicalDeviceFeatures & setVertexPipelineStoresAndAtomics( VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & setVertexPipelineStoresAndAtomics( + VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT { vertexPipelineStoresAndAtomics = vertexPipelineStoresAndAtomics_; return *this; } - PhysicalDeviceFeatures & setFragmentStoresAndAtomics( VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setFragmentStoresAndAtomics( VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT { fragmentStoresAndAtomics = fragmentStoresAndAtomics_; return *this; } - PhysicalDeviceFeatures & setShaderTessellationAndGeometryPointSize( VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & setShaderTessellationAndGeometryPointSize( + VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ ) VULKAN_HPP_NOEXCEPT { shaderTessellationAndGeometryPointSize = shaderTessellationAndGeometryPointSize_; return *this; } - PhysicalDeviceFeatures & setShaderImageGatherExtended( VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setShaderImageGatherExtended( VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ ) VULKAN_HPP_NOEXCEPT { shaderImageGatherExtended = shaderImageGatherExtended_; return *this; } - PhysicalDeviceFeatures & setShaderStorageImageExtendedFormats( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & setShaderStorageImageExtendedFormats( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ ) VULKAN_HPP_NOEXCEPT { shaderStorageImageExtendedFormats = shaderStorageImageExtendedFormats_; return *this; } - PhysicalDeviceFeatures & setShaderStorageImageMultisample( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & setShaderStorageImageMultisample( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ ) VULKAN_HPP_NOEXCEPT { shaderStorageImageMultisample = shaderStorageImageMultisample_; return *this; } - PhysicalDeviceFeatures & setShaderStorageImageReadWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & setShaderStorageImageReadWithoutFormat( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ ) VULKAN_HPP_NOEXCEPT { shaderStorageImageReadWithoutFormat = shaderStorageImageReadWithoutFormat_; return *this; } - PhysicalDeviceFeatures & setShaderStorageImageWriteWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & setShaderStorageImageWriteWithoutFormat( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ ) VULKAN_HPP_NOEXCEPT { shaderStorageImageWriteWithoutFormat = shaderStorageImageWriteWithoutFormat_; return *this; } - PhysicalDeviceFeatures & setShaderUniformBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & setShaderUniformBufferArrayDynamicIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT { shaderUniformBufferArrayDynamicIndexing = shaderUniformBufferArrayDynamicIndexing_; return *this; } - PhysicalDeviceFeatures & setShaderSampledImageArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & setShaderSampledImageArrayDynamicIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT { shaderSampledImageArrayDynamicIndexing = shaderSampledImageArrayDynamicIndexing_; return *this; } - PhysicalDeviceFeatures & setShaderStorageBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & setShaderStorageBufferArrayDynamicIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT { shaderStorageBufferArrayDynamicIndexing = shaderStorageBufferArrayDynamicIndexing_; return *this; } - PhysicalDeviceFeatures & setShaderStorageImageArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & setShaderStorageImageArrayDynamicIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT { shaderStorageImageArrayDynamicIndexing = shaderStorageImageArrayDynamicIndexing_; return *this; } - PhysicalDeviceFeatures & setShaderClipDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setShaderClipDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ ) VULKAN_HPP_NOEXCEPT { shaderClipDistance = shaderClipDistance_; return *this; } - PhysicalDeviceFeatures & setShaderCullDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setShaderCullDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ ) VULKAN_HPP_NOEXCEPT { shaderCullDistance = shaderCullDistance_; return *this; @@ -32105,13 +37660,15 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PhysicalDeviceFeatures & setShaderResourceResidency( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setShaderResourceResidency( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ ) VULKAN_HPP_NOEXCEPT { shaderResourceResidency = shaderResourceResidency_; return *this; } - PhysicalDeviceFeatures & setShaderResourceMinLod( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setShaderResourceMinLod( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ ) VULKAN_HPP_NOEXCEPT { shaderResourceMinLod = shaderResourceMinLod_; return *this; @@ -32123,55 +37680,64 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PhysicalDeviceFeatures & setSparseResidencyBuffer( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setSparseResidencyBuffer( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ ) VULKAN_HPP_NOEXCEPT { sparseResidencyBuffer = sparseResidencyBuffer_; return *this; } - PhysicalDeviceFeatures & setSparseResidencyImage2D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setSparseResidencyImage2D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ ) VULKAN_HPP_NOEXCEPT { sparseResidencyImage2D = sparseResidencyImage2D_; return *this; } - PhysicalDeviceFeatures & setSparseResidencyImage3D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setSparseResidencyImage3D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ ) VULKAN_HPP_NOEXCEPT { sparseResidencyImage3D = sparseResidencyImage3D_; return *this; } - PhysicalDeviceFeatures & setSparseResidency2Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setSparseResidency2Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ ) VULKAN_HPP_NOEXCEPT { sparseResidency2Samples = sparseResidency2Samples_; return *this; } - PhysicalDeviceFeatures & setSparseResidency4Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setSparseResidency4Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ ) VULKAN_HPP_NOEXCEPT { sparseResidency4Samples = sparseResidency4Samples_; return *this; } - PhysicalDeviceFeatures & setSparseResidency8Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setSparseResidency8Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ ) VULKAN_HPP_NOEXCEPT { sparseResidency8Samples = sparseResidency8Samples_; return *this; } - PhysicalDeviceFeatures & setSparseResidency16Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setSparseResidency16Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ ) VULKAN_HPP_NOEXCEPT { sparseResidency16Samples = sparseResidency16Samples_; return *this; } - PhysicalDeviceFeatures & setSparseResidencyAliased( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setSparseResidencyAliased( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ ) VULKAN_HPP_NOEXCEPT { sparseResidencyAliased = sparseResidencyAliased_; return *this; } - PhysicalDeviceFeatures & setVariableMultisampleRate( VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setVariableMultisampleRate( VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ ) VULKAN_HPP_NOEXCEPT { variableMultisampleRate = variableMultisampleRate_; return *this; @@ -32183,154 +37749,141 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkPhysicalDeviceFeatures const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceFeatures const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceFeatures &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceFeatures const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFeatures const & ) const = default; #else - bool operator==( PhysicalDeviceFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( robustBufferAccess == rhs.robustBufferAccess ) - && ( fullDrawIndexUint32 == rhs.fullDrawIndexUint32 ) - && ( imageCubeArray == rhs.imageCubeArray ) - && ( independentBlend == rhs.independentBlend ) - && ( geometryShader == rhs.geometryShader ) - && ( tessellationShader == rhs.tessellationShader ) - && ( sampleRateShading == rhs.sampleRateShading ) - && ( dualSrcBlend == rhs.dualSrcBlend ) - && ( logicOp == rhs.logicOp ) - && ( multiDrawIndirect == rhs.multiDrawIndirect ) - && ( drawIndirectFirstInstance == rhs.drawIndirectFirstInstance ) - && ( depthClamp == rhs.depthClamp ) - && ( depthBiasClamp == rhs.depthBiasClamp ) - && ( fillModeNonSolid == rhs.fillModeNonSolid ) - && ( depthBounds == rhs.depthBounds ) - && ( wideLines == rhs.wideLines ) - && ( largePoints == rhs.largePoints ) - && ( alphaToOne == rhs.alphaToOne ) - && ( multiViewport == rhs.multiViewport ) - && ( samplerAnisotropy == rhs.samplerAnisotropy ) - && ( textureCompressionETC2 == rhs.textureCompressionETC2 ) - && ( textureCompressionASTC_LDR == rhs.textureCompressionASTC_LDR ) - && ( textureCompressionBC == rhs.textureCompressionBC ) - && ( occlusionQueryPrecise == rhs.occlusionQueryPrecise ) - && ( pipelineStatisticsQuery == rhs.pipelineStatisticsQuery ) - && ( vertexPipelineStoresAndAtomics == rhs.vertexPipelineStoresAndAtomics ) - && ( fragmentStoresAndAtomics == rhs.fragmentStoresAndAtomics ) - && ( shaderTessellationAndGeometryPointSize == rhs.shaderTessellationAndGeometryPointSize ) - && ( shaderImageGatherExtended == rhs.shaderImageGatherExtended ) - && ( shaderStorageImageExtendedFormats == rhs.shaderStorageImageExtendedFormats ) - && ( shaderStorageImageMultisample == rhs.shaderStorageImageMultisample ) - && ( shaderStorageImageReadWithoutFormat == rhs.shaderStorageImageReadWithoutFormat ) - && ( shaderStorageImageWriteWithoutFormat == rhs.shaderStorageImageWriteWithoutFormat ) - && ( shaderUniformBufferArrayDynamicIndexing == rhs.shaderUniformBufferArrayDynamicIndexing ) - && ( shaderSampledImageArrayDynamicIndexing == rhs.shaderSampledImageArrayDynamicIndexing ) - && ( shaderStorageBufferArrayDynamicIndexing == rhs.shaderStorageBufferArrayDynamicIndexing ) - && ( shaderStorageImageArrayDynamicIndexing == rhs.shaderStorageImageArrayDynamicIndexing ) - && ( shaderClipDistance == rhs.shaderClipDistance ) - && ( shaderCullDistance == rhs.shaderCullDistance ) - && ( shaderFloat64 == rhs.shaderFloat64 ) - && ( shaderInt64 == rhs.shaderInt64 ) - && ( shaderInt16 == rhs.shaderInt16 ) - && ( shaderResourceResidency == rhs.shaderResourceResidency ) - && ( shaderResourceMinLod == rhs.shaderResourceMinLod ) - && ( sparseBinding == rhs.sparseBinding ) - && ( sparseResidencyBuffer == rhs.sparseResidencyBuffer ) - && ( sparseResidencyImage2D == rhs.sparseResidencyImage2D ) - && ( sparseResidencyImage3D == rhs.sparseResidencyImage3D ) - && ( sparseResidency2Samples == rhs.sparseResidency2Samples ) - && ( sparseResidency4Samples == rhs.sparseResidency4Samples ) - && ( sparseResidency8Samples == rhs.sparseResidency8Samples ) - && ( sparseResidency16Samples == rhs.sparseResidency16Samples ) - && ( sparseResidencyAliased == rhs.sparseResidencyAliased ) - && ( variableMultisampleRate == rhs.variableMultisampleRate ) - && ( inheritedQueries == rhs.inheritedQueries ); + return ( robustBufferAccess == rhs.robustBufferAccess ) && ( fullDrawIndexUint32 == rhs.fullDrawIndexUint32 ) && + ( imageCubeArray == rhs.imageCubeArray ) && ( independentBlend == rhs.independentBlend ) && + ( geometryShader == rhs.geometryShader ) && ( tessellationShader == rhs.tessellationShader ) && + ( sampleRateShading == rhs.sampleRateShading ) && ( dualSrcBlend == rhs.dualSrcBlend ) && + ( logicOp == rhs.logicOp ) && ( multiDrawIndirect == rhs.multiDrawIndirect ) && + ( drawIndirectFirstInstance == rhs.drawIndirectFirstInstance ) && ( depthClamp == rhs.depthClamp ) && + ( depthBiasClamp == rhs.depthBiasClamp ) && ( fillModeNonSolid == rhs.fillModeNonSolid ) && + ( depthBounds == rhs.depthBounds ) && ( wideLines == rhs.wideLines ) && + ( largePoints == rhs.largePoints ) && ( alphaToOne == rhs.alphaToOne ) && + ( multiViewport == rhs.multiViewport ) && ( samplerAnisotropy == rhs.samplerAnisotropy ) && + ( textureCompressionETC2 == rhs.textureCompressionETC2 ) && + ( textureCompressionASTC_LDR == rhs.textureCompressionASTC_LDR ) && + ( textureCompressionBC == rhs.textureCompressionBC ) && + ( occlusionQueryPrecise == rhs.occlusionQueryPrecise ) && + ( pipelineStatisticsQuery == rhs.pipelineStatisticsQuery ) && + ( vertexPipelineStoresAndAtomics == rhs.vertexPipelineStoresAndAtomics ) && + ( fragmentStoresAndAtomics == rhs.fragmentStoresAndAtomics ) && + ( shaderTessellationAndGeometryPointSize == rhs.shaderTessellationAndGeometryPointSize ) && + ( shaderImageGatherExtended == rhs.shaderImageGatherExtended ) && + ( shaderStorageImageExtendedFormats == rhs.shaderStorageImageExtendedFormats ) && + ( shaderStorageImageMultisample == rhs.shaderStorageImageMultisample ) && + ( shaderStorageImageReadWithoutFormat == rhs.shaderStorageImageReadWithoutFormat ) && + ( shaderStorageImageWriteWithoutFormat == rhs.shaderStorageImageWriteWithoutFormat ) && + ( shaderUniformBufferArrayDynamicIndexing == rhs.shaderUniformBufferArrayDynamicIndexing ) && + ( shaderSampledImageArrayDynamicIndexing == rhs.shaderSampledImageArrayDynamicIndexing ) && + ( shaderStorageBufferArrayDynamicIndexing == rhs.shaderStorageBufferArrayDynamicIndexing ) && + ( shaderStorageImageArrayDynamicIndexing == rhs.shaderStorageImageArrayDynamicIndexing ) && + ( shaderClipDistance == rhs.shaderClipDistance ) && ( shaderCullDistance == rhs.shaderCullDistance ) && + ( shaderFloat64 == rhs.shaderFloat64 ) && ( shaderInt64 == rhs.shaderInt64 ) && + ( shaderInt16 == rhs.shaderInt16 ) && ( shaderResourceResidency == rhs.shaderResourceResidency ) && + ( shaderResourceMinLod == rhs.shaderResourceMinLod ) && ( sparseBinding == rhs.sparseBinding ) && + ( sparseResidencyBuffer == rhs.sparseResidencyBuffer ) && + ( sparseResidencyImage2D == rhs.sparseResidencyImage2D ) && + ( sparseResidencyImage3D == rhs.sparseResidencyImage3D ) && + ( sparseResidency2Samples == rhs.sparseResidency2Samples ) && + ( sparseResidency4Samples == rhs.sparseResidency4Samples ) && + ( sparseResidency8Samples == rhs.sparseResidency8Samples ) && + ( sparseResidency16Samples == rhs.sparseResidency16Samples ) && + ( sparseResidencyAliased == rhs.sparseResidencyAliased ) && + ( variableMultisampleRate == rhs.variableMultisampleRate ) && ( inheritedQueries == rhs.inheritedQueries ); } - bool operator!=( PhysicalDeviceFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess = {}; - VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32 = {}; - VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray = {}; - VULKAN_HPP_NAMESPACE::Bool32 independentBlend = {}; - VULKAN_HPP_NAMESPACE::Bool32 geometryShader = {}; - VULKAN_HPP_NAMESPACE::Bool32 tessellationShader = {}; - VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading = {}; - VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend = {}; - VULKAN_HPP_NAMESPACE::Bool32 logicOp = {}; - VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect = {}; - VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance = {}; - VULKAN_HPP_NAMESPACE::Bool32 depthClamp = {}; - VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp = {}; - VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid = {}; - VULKAN_HPP_NAMESPACE::Bool32 depthBounds = {}; - VULKAN_HPP_NAMESPACE::Bool32 wideLines = {}; - VULKAN_HPP_NAMESPACE::Bool32 largePoints = {}; - VULKAN_HPP_NAMESPACE::Bool32 alphaToOne = {}; - VULKAN_HPP_NAMESPACE::Bool32 multiViewport = {}; - VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy = {}; - VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2 = {}; - VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR = {}; - VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC = {}; - VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise = {}; - VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery = {}; - VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics = {}; - VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray = {}; + VULKAN_HPP_NAMESPACE::Bool32 independentBlend = {}; + VULKAN_HPP_NAMESPACE::Bool32 geometryShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 tessellationShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading = {}; + VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend = {}; + VULKAN_HPP_NAMESPACE::Bool32 logicOp = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect = {}; + VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthClamp = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp = {}; + VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthBounds = {}; + VULKAN_HPP_NAMESPACE::Bool32 wideLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 largePoints = {}; + VULKAN_HPP_NAMESPACE::Bool32 alphaToOne = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiViewport = {}; + VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2 = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC = {}; + VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderInt64 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderInt16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod = {}; - VULKAN_HPP_NAMESPACE::Bool32 sparseBinding = {}; - VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer = {}; - VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D = {}; - VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D = {}; - VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples = {}; - VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples = {}; - VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples = {}; - VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples = {}; - VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased = {}; - VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate = {}; - VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInt64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInt16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseBinding = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased = {}; + VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate = {}; + VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries = {}; }; - static_assert( sizeof( PhysicalDeviceFeatures ) == sizeof( VkPhysicalDeviceFeatures ), "struct and wrapper have different size!" ); + static_assert( sizeof( PhysicalDeviceFeatures ) == sizeof( VkPhysicalDeviceFeatures ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct DeviceCreateInfo { - VULKAN_HPP_CONSTEXPR DeviceCreateInfo( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ = {}, - uint32_t queueCreateInfoCount_ = {}, - const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo* pQueueCreateInfos_ = {}, - uint32_t enabledLayerCount_ = {}, - const char* const* ppEnabledLayerNames_ = {}, - uint32_t enabledExtensionCount_ = {}, - const char* const* ppEnabledExtensionNames_ = {}, - const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pEnabledFeatures_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DeviceCreateInfo( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ = {}, + uint32_t queueCreateInfoCount_ = {}, + const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * pQueueCreateInfos_ = {}, + uint32_t enabledLayerCount_ = {}, + const char * const * ppEnabledLayerNames_ = {}, + uint32_t enabledExtensionCount_ = {}, + const char * const * ppEnabledExtensionNames_ = {}, + const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures_ = {} ) + VULKAN_HPP_NOEXCEPT : flags( flags_ ) , queueCreateInfoCount( queueCreateInfoCount_ ) , pQueueCreateInfos( pQueueCreateInfos_ ) @@ -32352,13 +37905,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DeviceCreateInfo& operator=( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceCreateInfo & operator=( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DeviceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DeviceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -32376,7 +37929,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DeviceCreateInfo & setPQueueCreateInfos( const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo* pQueueCreateInfos_ ) VULKAN_HPP_NOEXCEPT + DeviceCreateInfo & + setPQueueCreateInfos( const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * pQueueCreateInfos_ ) VULKAN_HPP_NOEXCEPT { pQueueCreateInfos = pQueueCreateInfos_; return *this; @@ -32388,7 +37942,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DeviceCreateInfo & setPpEnabledLayerNames( const char* const* ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT + DeviceCreateInfo & setPpEnabledLayerNames( const char * const * ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT { ppEnabledLayerNames = ppEnabledLayerNames_; return *this; @@ -32400,75 +37954,75 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DeviceCreateInfo & setPpEnabledExtensionNames( const char* const* ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT + DeviceCreateInfo & setPpEnabledExtensionNames( const char * const * ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT { ppEnabledExtensionNames = ppEnabledExtensionNames_; return *this; } - DeviceCreateInfo & setPEnabledFeatures( const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pEnabledFeatures_ ) VULKAN_HPP_NOEXCEPT + DeviceCreateInfo & + setPEnabledFeatures( const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures_ ) VULKAN_HPP_NOEXCEPT { pEnabledFeatures = pEnabledFeatures_; return *this; } - operator VkDeviceCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkDeviceCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DeviceCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceCreateInfo const & ) const = default; #else - bool operator==( DeviceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( queueCreateInfoCount == rhs.queueCreateInfoCount ) - && ( pQueueCreateInfos == rhs.pQueueCreateInfos ) - && ( enabledLayerCount == rhs.enabledLayerCount ) - && ( ppEnabledLayerNames == rhs.ppEnabledLayerNames ) - && ( enabledExtensionCount == rhs.enabledExtensionCount ) - && ( ppEnabledExtensionNames == rhs.ppEnabledExtensionNames ) - && ( pEnabledFeatures == rhs.pEnabledFeatures ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( queueCreateInfoCount == rhs.queueCreateInfoCount ) && ( pQueueCreateInfos == rhs.pQueueCreateInfos ) && + ( enabledLayerCount == rhs.enabledLayerCount ) && ( ppEnabledLayerNames == rhs.ppEnabledLayerNames ) && + ( enabledExtensionCount == rhs.enabledExtensionCount ) && + ( ppEnabledExtensionNames == rhs.ppEnabledExtensionNames ) && ( pEnabledFeatures == rhs.pEnabledFeatures ); } - bool operator!=( DeviceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags = {}; - uint32_t queueCreateInfoCount = {}; - const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo* pQueueCreateInfos = {}; - uint32_t enabledLayerCount = {}; - const char* const* ppEnabledLayerNames = {}; - uint32_t enabledExtensionCount = {}; - const char* const* ppEnabledExtensionNames = {}; - const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pEnabledFeatures = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags = {}; + uint32_t queueCreateInfoCount = {}; + const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * pQueueCreateInfos = {}; + uint32_t enabledLayerCount = {}; + const char * const * ppEnabledLayerNames = {}; + uint32_t enabledExtensionCount = {}; + const char * const * ppEnabledExtensionNames = {}; + const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures = {}; }; - static_assert( sizeof( DeviceCreateInfo ) == sizeof( VkDeviceCreateInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( DeviceCreateInfo ) == sizeof( VkDeviceCreateInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct DeviceDiagnosticsConfigCreateInfoNV { - VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) + VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV( + VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) {} - DeviceDiagnosticsConfigCreateInfoNV & operator=( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceDiagnosticsConfigCreateInfoNV & + operator=( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( DeviceDiagnosticsConfigCreateInfoNV ) - offsetof( DeviceDiagnosticsConfigCreateInfoNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( DeviceDiagnosticsConfigCreateInfoNV ) - offsetof( DeviceDiagnosticsConfigCreateInfoNV, pNext ) ); return *this; } @@ -32477,61 +38031,65 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DeviceDiagnosticsConfigCreateInfoNV& operator=( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceDiagnosticsConfigCreateInfoNV & + operator=( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DeviceDiagnosticsConfigCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DeviceDiagnosticsConfigCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - DeviceDiagnosticsConfigCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + DeviceDiagnosticsConfigCreateInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - operator VkDeviceDiagnosticsConfigCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + operator VkDeviceDiagnosticsConfigCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDeviceDiagnosticsConfigCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DeviceDiagnosticsConfigCreateInfoNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceDiagnosticsConfigCreateInfoNV const & ) const = default; #else - bool operator==( DeviceDiagnosticsConfigCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); } - bool operator!=( DeviceDiagnosticsConfigCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceDiagnosticsConfigCreateInfoNV; - const void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceDiagnosticsConfigCreateInfoNV; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags = {}; }; - static_assert( sizeof( DeviceDiagnosticsConfigCreateInfoNV ) == sizeof( VkDeviceDiagnosticsConfigCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DeviceDiagnosticsConfigCreateInfoNV ) == sizeof( VkDeviceDiagnosticsConfigCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct DeviceEventInfoEXT { - VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + DeviceEventInfoEXT( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ = + VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug ) VULKAN_HPP_NOEXCEPT : deviceEvent( deviceEvent_ ) {} @@ -32546,13 +38104,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DeviceEventInfoEXT& operator=( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceEventInfoEXT & operator=( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DeviceEventInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DeviceEventInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -32564,44 +38122,43 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkDeviceEventInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkDeviceEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDeviceEventInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DeviceEventInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceEventInfoEXT const & ) const = default; #else - bool operator==( DeviceEventInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( deviceEvent == rhs.deviceEvent ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceEvent == rhs.deviceEvent ); } - bool operator!=( DeviceEventInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceEventInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceEventInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug; }; - static_assert( sizeof( DeviceEventInfoEXT ) == sizeof( VkDeviceEventInfoEXT ), "struct and wrapper have different size!" ); + static_assert( sizeof( DeviceEventInfoEXT ) == sizeof( VkDeviceEventInfoEXT ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct DeviceGroupBindSparseInfo { VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo( uint32_t resourceDeviceIndex_ = {}, - uint32_t memoryDeviceIndex_ = {} ) VULKAN_HPP_NOEXCEPT + uint32_t memoryDeviceIndex_ = {} ) VULKAN_HPP_NOEXCEPT : resourceDeviceIndex( resourceDeviceIndex_ ) , memoryDeviceIndex( memoryDeviceIndex_ ) {} @@ -32617,13 +38174,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DeviceGroupBindSparseInfo& operator=( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceGroupBindSparseInfo & operator=( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DeviceGroupBindSparseInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DeviceGroupBindSparseInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -32641,41 +38198,41 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkDeviceGroupBindSparseInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkDeviceGroupBindSparseInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDeviceGroupBindSparseInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DeviceGroupBindSparseInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupBindSparseInfo const & ) const = default; #else - bool operator==( DeviceGroupBindSparseInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceGroupBindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( resourceDeviceIndex == rhs.resourceDeviceIndex ) - && ( memoryDeviceIndex == rhs.memoryDeviceIndex ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( resourceDeviceIndex == rhs.resourceDeviceIndex ) && + ( memoryDeviceIndex == rhs.memoryDeviceIndex ); } - bool operator!=( DeviceGroupBindSparseInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceGroupBindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupBindSparseInfo; - const void* pNext = {}; - uint32_t resourceDeviceIndex = {}; - uint32_t memoryDeviceIndex = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupBindSparseInfo; + const void * pNext = {}; + uint32_t resourceDeviceIndex = {}; + uint32_t memoryDeviceIndex = {}; }; - static_assert( sizeof( DeviceGroupBindSparseInfo ) == sizeof( VkDeviceGroupBindSparseInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DeviceGroupBindSparseInfo ) == sizeof( VkDeviceGroupBindSparseInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct DeviceGroupCommandBufferBeginInfo { @@ -32685,7 +38242,9 @@ namespace VULKAN_HPP_NAMESPACE DeviceGroupCommandBufferBeginInfo & operator=( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( DeviceGroupCommandBufferBeginInfo ) - offsetof( DeviceGroupCommandBufferBeginInfo, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( DeviceGroupCommandBufferBeginInfo ) - offsetof( DeviceGroupCommandBufferBeginInfo, pNext ) ); return *this; } @@ -32694,13 +38253,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DeviceGroupCommandBufferBeginInfo& operator=( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceGroupCommandBufferBeginInfo & operator=( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DeviceGroupCommandBufferBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DeviceGroupCommandBufferBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -32712,51 +38271,53 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkDeviceGroupCommandBufferBeginInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkDeviceGroupCommandBufferBeginInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDeviceGroupCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DeviceGroupCommandBufferBeginInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupCommandBufferBeginInfo const & ) const = default; #else - bool operator==( DeviceGroupCommandBufferBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceGroupCommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( deviceMask == rhs.deviceMask ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceMask == rhs.deviceMask ); } - bool operator!=( DeviceGroupCommandBufferBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceGroupCommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupCommandBufferBeginInfo; - const void* pNext = {}; - uint32_t deviceMask = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupCommandBufferBeginInfo; + const void * pNext = {}; + uint32_t deviceMask = {}; }; - static_assert( sizeof( DeviceGroupCommandBufferBeginInfo ) == sizeof( VkDeviceGroupCommandBufferBeginInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DeviceGroupCommandBufferBeginInfo ) == sizeof( VkDeviceGroupCommandBufferBeginInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct DeviceGroupDeviceCreateInfo { - VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo( uint32_t physicalDeviceCount_ = {}, - const VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo( + uint32_t physicalDeviceCount_ = {}, + const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices_ = {} ) VULKAN_HPP_NOEXCEPT : physicalDeviceCount( physicalDeviceCount_ ) , pPhysicalDevices( pPhysicalDevices_ ) {} DeviceGroupDeviceCreateInfo & operator=( DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( DeviceGroupDeviceCreateInfo ) - offsetof( DeviceGroupDeviceCreateInfo, pNext ) ); + memcpy( + &pNext, &rhs.pNext, sizeof( DeviceGroupDeviceCreateInfo ) - offsetof( DeviceGroupDeviceCreateInfo, pNext ) ); return *this; } @@ -32765,13 +38326,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DeviceGroupDeviceCreateInfo& operator=( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceGroupDeviceCreateInfo & operator=( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DeviceGroupDeviceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DeviceGroupDeviceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -32783,59 +38344,63 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DeviceGroupDeviceCreateInfo & setPPhysicalDevices( const VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices_ ) VULKAN_HPP_NOEXCEPT + DeviceGroupDeviceCreateInfo & + setPPhysicalDevices( const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices_ ) VULKAN_HPP_NOEXCEPT { pPhysicalDevices = pPhysicalDevices_; return *this; } - operator VkDeviceGroupDeviceCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkDeviceGroupDeviceCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDeviceGroupDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DeviceGroupDeviceCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupDeviceCreateInfo const & ) const = default; #else - bool operator==( DeviceGroupDeviceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceGroupDeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( physicalDeviceCount == rhs.physicalDeviceCount ) - && ( pPhysicalDevices == rhs.pPhysicalDevices ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( physicalDeviceCount == rhs.physicalDeviceCount ) && + ( pPhysicalDevices == rhs.pPhysicalDevices ); } - bool operator!=( DeviceGroupDeviceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceGroupDeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupDeviceCreateInfo; - const void* pNext = {}; - uint32_t physicalDeviceCount = {}; - const VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupDeviceCreateInfo; + const void * pNext = {}; + uint32_t physicalDeviceCount = {}; + const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices = {}; }; - static_assert( sizeof( DeviceGroupDeviceCreateInfo ) == sizeof( VkDeviceGroupDeviceCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DeviceGroupDeviceCreateInfo ) == sizeof( VkDeviceGroupDeviceCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct DeviceGroupPresentCapabilitiesKHR { - VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR( std::array const& presentMask_ = {}, - VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR( + std::array const & presentMask_ = {}, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {} ) VULKAN_HPP_NOEXCEPT : presentMask( presentMask_ ) , modes( modes_ ) {} DeviceGroupPresentCapabilitiesKHR & operator=( DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( DeviceGroupPresentCapabilitiesKHR ) - offsetof( DeviceGroupPresentCapabilitiesKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( DeviceGroupPresentCapabilitiesKHR ) - offsetof( DeviceGroupPresentCapabilitiesKHR, pNext ) ); return *this; } @@ -32844,34 +38409,32 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DeviceGroupPresentCapabilitiesKHR& operator=( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceGroupPresentCapabilitiesKHR & operator=( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkDeviceGroupPresentCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkDeviceGroupPresentCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDeviceGroupPresentCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DeviceGroupPresentCapabilitiesKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupPresentCapabilitiesKHR const & ) const = default; #else - bool operator==( DeviceGroupPresentCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceGroupPresentCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( presentMask == rhs.presentMask ) - && ( modes == rhs.modes ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentMask == rhs.presentMask ) && + ( modes == rhs.modes ); } - bool operator!=( DeviceGroupPresentCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceGroupPresentCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -32879,18 +38442,22 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentCapabilitiesKHR; - const void* pNext = {}; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D presentMask = {}; - VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {}; + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {}; }; - static_assert( sizeof( DeviceGroupPresentCapabilitiesKHR ) == sizeof( VkDeviceGroupPresentCapabilitiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DeviceGroupPresentCapabilitiesKHR ) == sizeof( VkDeviceGroupPresentCapabilitiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct DeviceGroupPresentInfoKHR { - VULKAN_HPP_CONSTEXPR DeviceGroupPresentInfoKHR( uint32_t swapchainCount_ = {}, - const uint32_t* pDeviceMasks_ = {}, - VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + DeviceGroupPresentInfoKHR( uint32_t swapchainCount_ = {}, + const uint32_t * pDeviceMasks_ = {}, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ = + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal ) VULKAN_HPP_NOEXCEPT : swapchainCount( swapchainCount_ ) , pDeviceMasks( pDeviceMasks_ ) , mode( mode_ ) @@ -32907,13 +38474,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DeviceGroupPresentInfoKHR& operator=( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceGroupPresentInfoKHR & operator=( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DeviceGroupPresentInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DeviceGroupPresentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -32925,61 +38492,63 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DeviceGroupPresentInfoKHR & setPDeviceMasks( const uint32_t* pDeviceMasks_ ) VULKAN_HPP_NOEXCEPT + DeviceGroupPresentInfoKHR & setPDeviceMasks( const uint32_t * pDeviceMasks_ ) VULKAN_HPP_NOEXCEPT { pDeviceMasks = pDeviceMasks_; return *this; } - DeviceGroupPresentInfoKHR & setMode( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ ) VULKAN_HPP_NOEXCEPT + DeviceGroupPresentInfoKHR & + setMode( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ ) VULKAN_HPP_NOEXCEPT { mode = mode_; return *this; } - operator VkDeviceGroupPresentInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkDeviceGroupPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDeviceGroupPresentInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DeviceGroupPresentInfoKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupPresentInfoKHR const & ) const = default; #else - bool operator==( DeviceGroupPresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceGroupPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( swapchainCount == rhs.swapchainCount ) - && ( pDeviceMasks == rhs.pDeviceMasks ) - && ( mode == rhs.mode ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && + ( pDeviceMasks == rhs.pDeviceMasks ) && ( mode == rhs.mode ); } - bool operator!=( DeviceGroupPresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceGroupPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentInfoKHR; - const void* pNext = {}; - uint32_t swapchainCount = {}; - const uint32_t* pDeviceMasks = {}; - VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentInfoKHR; + const void * pNext = {}; + uint32_t swapchainCount = {}; + const uint32_t * pDeviceMasks = {}; + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode = + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal; }; - static_assert( sizeof( DeviceGroupPresentInfoKHR ) == sizeof( VkDeviceGroupPresentInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DeviceGroupPresentInfoKHR ) == sizeof( VkDeviceGroupPresentInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct DeviceGroupRenderPassBeginInfo { - VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo( uint32_t deviceMask_ = {}, - uint32_t deviceRenderAreaCount_ = {}, - const VULKAN_HPP_NAMESPACE::Rect2D* pDeviceRenderAreas_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo( uint32_t deviceMask_ = {}, + uint32_t deviceRenderAreaCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas_ = {} ) + VULKAN_HPP_NOEXCEPT : deviceMask( deviceMask_ ) , deviceRenderAreaCount( deviceRenderAreaCount_ ) , pDeviceRenderAreas( pDeviceRenderAreas_ ) @@ -32987,7 +38556,9 @@ namespace VULKAN_HPP_NAMESPACE DeviceGroupRenderPassBeginInfo & operator=( DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( DeviceGroupRenderPassBeginInfo ) - offsetof( DeviceGroupRenderPassBeginInfo, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( DeviceGroupRenderPassBeginInfo ) - offsetof( DeviceGroupRenderPassBeginInfo, pNext ) ); return *this; } @@ -32996,13 +38567,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DeviceGroupRenderPassBeginInfo& operator=( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceGroupRenderPassBeginInfo & operator=( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DeviceGroupRenderPassBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DeviceGroupRenderPassBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -33020,58 +38591,59 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DeviceGroupRenderPassBeginInfo & setPDeviceRenderAreas( const VULKAN_HPP_NAMESPACE::Rect2D* pDeviceRenderAreas_ ) VULKAN_HPP_NOEXCEPT + DeviceGroupRenderPassBeginInfo & + setPDeviceRenderAreas( const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas_ ) VULKAN_HPP_NOEXCEPT { pDeviceRenderAreas = pDeviceRenderAreas_; return *this; } - operator VkDeviceGroupRenderPassBeginInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkDeviceGroupRenderPassBeginInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDeviceGroupRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DeviceGroupRenderPassBeginInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupRenderPassBeginInfo const & ) const = default; #else - bool operator==( DeviceGroupRenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceGroupRenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( deviceMask == rhs.deviceMask ) - && ( deviceRenderAreaCount == rhs.deviceRenderAreaCount ) - && ( pDeviceRenderAreas == rhs.pDeviceRenderAreas ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceMask == rhs.deviceMask ) && + ( deviceRenderAreaCount == rhs.deviceRenderAreaCount ) && ( pDeviceRenderAreas == rhs.pDeviceRenderAreas ); } - bool operator!=( DeviceGroupRenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceGroupRenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupRenderPassBeginInfo; - const void* pNext = {}; - uint32_t deviceMask = {}; - uint32_t deviceRenderAreaCount = {}; - const VULKAN_HPP_NAMESPACE::Rect2D* pDeviceRenderAreas = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupRenderPassBeginInfo; + const void * pNext = {}; + uint32_t deviceMask = {}; + uint32_t deviceRenderAreaCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas = {}; }; - static_assert( sizeof( DeviceGroupRenderPassBeginInfo ) == sizeof( VkDeviceGroupRenderPassBeginInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DeviceGroupRenderPassBeginInfo ) == sizeof( VkDeviceGroupRenderPassBeginInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct DeviceGroupSubmitInfo { - VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo( uint32_t waitSemaphoreCount_ = {}, - const uint32_t* pWaitSemaphoreDeviceIndices_ = {}, - uint32_t commandBufferCount_ = {}, - const uint32_t* pCommandBufferDeviceMasks_ = {}, - uint32_t signalSemaphoreCount_ = {}, - const uint32_t* pSignalSemaphoreDeviceIndices_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + DeviceGroupSubmitInfo( uint32_t waitSemaphoreCount_ = {}, + const uint32_t * pWaitSemaphoreDeviceIndices_ = {}, + uint32_t commandBufferCount_ = {}, + const uint32_t * pCommandBufferDeviceMasks_ = {}, + uint32_t signalSemaphoreCount_ = {}, + const uint32_t * pSignalSemaphoreDeviceIndices_ = {} ) VULKAN_HPP_NOEXCEPT : waitSemaphoreCount( waitSemaphoreCount_ ) , pWaitSemaphoreDeviceIndices( pWaitSemaphoreDeviceIndices_ ) , commandBufferCount( commandBufferCount_ ) @@ -33091,13 +38663,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DeviceGroupSubmitInfo& operator=( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceGroupSubmitInfo & operator=( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DeviceGroupSubmitInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DeviceGroupSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -33109,7 +38681,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DeviceGroupSubmitInfo & setPWaitSemaphoreDeviceIndices( const uint32_t* pWaitSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + DeviceGroupSubmitInfo & + setPWaitSemaphoreDeviceIndices( const uint32_t * pWaitSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT { pWaitSemaphoreDeviceIndices = pWaitSemaphoreDeviceIndices_; return *this; @@ -33121,7 +38694,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DeviceGroupSubmitInfo & setPCommandBufferDeviceMasks( const uint32_t* pCommandBufferDeviceMasks_ ) VULKAN_HPP_NOEXCEPT + DeviceGroupSubmitInfo & + setPCommandBufferDeviceMasks( const uint32_t * pCommandBufferDeviceMasks_ ) VULKAN_HPP_NOEXCEPT { pCommandBufferDeviceMasks = pCommandBufferDeviceMasks_; return *this; @@ -33133,65 +38707,67 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DeviceGroupSubmitInfo & setPSignalSemaphoreDeviceIndices( const uint32_t* pSignalSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + DeviceGroupSubmitInfo & + setPSignalSemaphoreDeviceIndices( const uint32_t * pSignalSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT { pSignalSemaphoreDeviceIndices = pSignalSemaphoreDeviceIndices_; return *this; } - operator VkDeviceGroupSubmitInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkDeviceGroupSubmitInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDeviceGroupSubmitInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DeviceGroupSubmitInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupSubmitInfo const & ) const = default; #else - bool operator==( DeviceGroupSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceGroupSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) - && ( pWaitSemaphoreDeviceIndices == rhs.pWaitSemaphoreDeviceIndices ) - && ( commandBufferCount == rhs.commandBufferCount ) - && ( pCommandBufferDeviceMasks == rhs.pCommandBufferDeviceMasks ) - && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) - && ( pSignalSemaphoreDeviceIndices == rhs.pSignalSemaphoreDeviceIndices ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) && + ( pWaitSemaphoreDeviceIndices == rhs.pWaitSemaphoreDeviceIndices ) && + ( commandBufferCount == rhs.commandBufferCount ) && + ( pCommandBufferDeviceMasks == rhs.pCommandBufferDeviceMasks ) && + ( signalSemaphoreCount == rhs.signalSemaphoreCount ) && + ( pSignalSemaphoreDeviceIndices == rhs.pSignalSemaphoreDeviceIndices ); } - bool operator!=( DeviceGroupSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceGroupSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSubmitInfo; - const void* pNext = {}; - uint32_t waitSemaphoreCount = {}; - const uint32_t* pWaitSemaphoreDeviceIndices = {}; - uint32_t commandBufferCount = {}; - const uint32_t* pCommandBufferDeviceMasks = {}; - uint32_t signalSemaphoreCount = {}; - const uint32_t* pSignalSemaphoreDeviceIndices = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSubmitInfo; + const void * pNext = {}; + uint32_t waitSemaphoreCount = {}; + const uint32_t * pWaitSemaphoreDeviceIndices = {}; + uint32_t commandBufferCount = {}; + const uint32_t * pCommandBufferDeviceMasks = {}; + uint32_t signalSemaphoreCount = {}; + const uint32_t * pSignalSemaphoreDeviceIndices = {}; }; - static_assert( sizeof( DeviceGroupSubmitInfo ) == sizeof( VkDeviceGroupSubmitInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( DeviceGroupSubmitInfo ) == sizeof( VkDeviceGroupSubmitInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct DeviceGroupSwapchainCreateInfoKHR { - VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {} ) VULKAN_HPP_NOEXCEPT - : modes( modes_ ) + VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR( + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {} ) VULKAN_HPP_NOEXCEPT : modes( modes_ ) {} DeviceGroupSwapchainCreateInfoKHR & operator=( DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( DeviceGroupSwapchainCreateInfoKHR ) - offsetof( DeviceGroupSwapchainCreateInfoKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( DeviceGroupSwapchainCreateInfoKHR ) - offsetof( DeviceGroupSwapchainCreateInfoKHR, pNext ) ); return *this; } @@ -33200,67 +38776,73 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DeviceGroupSwapchainCreateInfoKHR& operator=( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceGroupSwapchainCreateInfoKHR & operator=( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DeviceGroupSwapchainCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DeviceGroupSwapchainCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - DeviceGroupSwapchainCreateInfoKHR & setModes( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ ) VULKAN_HPP_NOEXCEPT + DeviceGroupSwapchainCreateInfoKHR & + setModes( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ ) VULKAN_HPP_NOEXCEPT { modes = modes_; return *this; } - operator VkDeviceGroupSwapchainCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkDeviceGroupSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDeviceGroupSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DeviceGroupSwapchainCreateInfoKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupSwapchainCreateInfoKHR const & ) const = default; #else - bool operator==( DeviceGroupSwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceGroupSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( modes == rhs.modes ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( modes == rhs.modes ); } - bool operator!=( DeviceGroupSwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceGroupSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSwapchainCreateInfoKHR; - const void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSwapchainCreateInfoKHR; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {}; }; - static_assert( sizeof( DeviceGroupSwapchainCreateInfoKHR ) == sizeof( VkDeviceGroupSwapchainCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DeviceGroupSwapchainCreateInfoKHR ) == sizeof( VkDeviceGroupSwapchainCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct DeviceMemoryOpaqueCaptureAddressInfo { - VULKAN_HPP_CONSTEXPR DeviceMemoryOpaqueCaptureAddressInfo( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + DeviceMemoryOpaqueCaptureAddressInfo( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {} ) VULKAN_HPP_NOEXCEPT : memory( memory_ ) {} - DeviceMemoryOpaqueCaptureAddressInfo & operator=( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceMemoryOpaqueCaptureAddressInfo & + operator=( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( DeviceMemoryOpaqueCaptureAddressInfo ) - offsetof( DeviceMemoryOpaqueCaptureAddressInfo, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( DeviceMemoryOpaqueCaptureAddressInfo ) - + offsetof( DeviceMemoryOpaqueCaptureAddressInfo, pNext ) ); return *this; } @@ -33269,13 +38851,14 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DeviceMemoryOpaqueCaptureAddressInfo& operator=( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceMemoryOpaqueCaptureAddressInfo & + operator=( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DeviceMemoryOpaqueCaptureAddressInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DeviceMemoryOpaqueCaptureAddressInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -33287,49 +38870,55 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkDeviceMemoryOpaqueCaptureAddressInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkDeviceMemoryOpaqueCaptureAddressInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDeviceMemoryOpaqueCaptureAddressInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DeviceMemoryOpaqueCaptureAddressInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceMemoryOpaqueCaptureAddressInfo const & ) const = default; #else - bool operator==( DeviceMemoryOpaqueCaptureAddressInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memory == rhs.memory ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ); } - bool operator!=( DeviceMemoryOpaqueCaptureAddressInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOpaqueCaptureAddressInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOpaqueCaptureAddressInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; }; - static_assert( sizeof( DeviceMemoryOpaqueCaptureAddressInfo ) == sizeof( VkDeviceMemoryOpaqueCaptureAddressInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DeviceMemoryOpaqueCaptureAddressInfo ) == sizeof( VkDeviceMemoryOpaqueCaptureAddressInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct DeviceMemoryOverallocationCreateInfoAMD { - VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD( VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ = VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD( + VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ = + VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault ) VULKAN_HPP_NOEXCEPT : overallocationBehavior( overallocationBehavior_ ) {} - DeviceMemoryOverallocationCreateInfoAMD & operator=( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceMemoryOverallocationCreateInfoAMD & + operator=( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( DeviceMemoryOverallocationCreateInfoAMD ) - offsetof( DeviceMemoryOverallocationCreateInfoAMD, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( DeviceMemoryOverallocationCreateInfoAMD ) - + offsetof( DeviceMemoryOverallocationCreateInfoAMD, pNext ) ); return *this; } @@ -33338,45 +38927,46 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DeviceMemoryOverallocationCreateInfoAMD& operator=( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceMemoryOverallocationCreateInfoAMD & + operator=( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DeviceMemoryOverallocationCreateInfoAMD & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DeviceMemoryOverallocationCreateInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - DeviceMemoryOverallocationCreateInfoAMD & setOverallocationBehavior( VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ ) VULKAN_HPP_NOEXCEPT + DeviceMemoryOverallocationCreateInfoAMD & setOverallocationBehavior( + VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ ) VULKAN_HPP_NOEXCEPT { overallocationBehavior = overallocationBehavior_; return *this; } - operator VkDeviceMemoryOverallocationCreateInfoAMD const&() const VULKAN_HPP_NOEXCEPT + operator VkDeviceMemoryOverallocationCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDeviceMemoryOverallocationCreateInfoAMD &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DeviceMemoryOverallocationCreateInfoAMD const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceMemoryOverallocationCreateInfoAMD const & ) const = default; #else - bool operator==( DeviceMemoryOverallocationCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( overallocationBehavior == rhs.overallocationBehavior ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( overallocationBehavior == rhs.overallocationBehavior ); } - bool operator!=( DeviceMemoryOverallocationCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -33384,21 +38974,31 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior = VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior = + VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault; }; - static_assert( sizeof( DeviceMemoryOverallocationCreateInfoAMD ) == sizeof( VkDeviceMemoryOverallocationCreateInfoAMD ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DeviceMemoryOverallocationCreateInfoAMD ) == + sizeof( VkDeviceMemoryOverallocationCreateInfoAMD ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct DeviceQueueGlobalPriorityCreateInfoEXT { - VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfoEXT( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority_ = VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + DeviceQueueGlobalPriorityCreateInfoEXT( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority_ = + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow ) VULKAN_HPP_NOEXCEPT : globalPriority( globalPriority_ ) {} - DeviceQueueGlobalPriorityCreateInfoEXT & operator=( DeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceQueueGlobalPriorityCreateInfoEXT & + operator=( DeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( DeviceQueueGlobalPriorityCreateInfoEXT ) - offsetof( DeviceQueueGlobalPriorityCreateInfoEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( DeviceQueueGlobalPriorityCreateInfoEXT ) - + offsetof( DeviceQueueGlobalPriorityCreateInfoEXT, pNext ) ); return *this; } @@ -33407,62 +39007,64 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DeviceQueueGlobalPriorityCreateInfoEXT& operator=( VkDeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceQueueGlobalPriorityCreateInfoEXT & + operator=( VkDeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DeviceQueueGlobalPriorityCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DeviceQueueGlobalPriorityCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - DeviceQueueGlobalPriorityCreateInfoEXT & setGlobalPriority( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority_ ) VULKAN_HPP_NOEXCEPT + DeviceQueueGlobalPriorityCreateInfoEXT & + setGlobalPriority( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority_ ) VULKAN_HPP_NOEXCEPT { globalPriority = globalPriority_; return *this; } - operator VkDeviceQueueGlobalPriorityCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkDeviceQueueGlobalPriorityCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDeviceQueueGlobalPriorityCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DeviceQueueGlobalPriorityCreateInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceQueueGlobalPriorityCreateInfoEXT const & ) const = default; #else - bool operator==( DeviceQueueGlobalPriorityCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( globalPriority == rhs.globalPriority ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( globalPriority == rhs.globalPriority ); } - bool operator!=( DeviceQueueGlobalPriorityCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT; - const void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority = VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow; }; - static_assert( sizeof( DeviceQueueGlobalPriorityCreateInfoEXT ) == sizeof( VkDeviceQueueGlobalPriorityCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DeviceQueueGlobalPriorityCreateInfoEXT ) == sizeof( VkDeviceQueueGlobalPriorityCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct DeviceQueueInfo2 { - VULKAN_HPP_CONSTEXPR DeviceQueueInfo2( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {}, - uint32_t queueFamilyIndex_ = {}, + VULKAN_HPP_CONSTEXPR DeviceQueueInfo2( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {}, + uint32_t queueFamilyIndex_ = {}, uint32_t queueIndex_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , queueFamilyIndex( queueFamilyIndex_ ) @@ -33480,13 +39082,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DeviceQueueInfo2& operator=( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceQueueInfo2 & operator=( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DeviceQueueInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DeviceQueueInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -33510,49 +39112,46 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkDeviceQueueInfo2 const&() const VULKAN_HPP_NOEXCEPT + operator VkDeviceQueueInfo2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDeviceQueueInfo2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DeviceQueueInfo2 const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceQueueInfo2 const & ) const = default; #else - bool operator==( DeviceQueueInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceQueueInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( queueFamilyIndex == rhs.queueFamilyIndex ) - && ( queueIndex == rhs.queueIndex ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( queueFamilyIndex == rhs.queueFamilyIndex ) && ( queueIndex == rhs.queueIndex ); } - bool operator!=( DeviceQueueInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceQueueInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueInfo2; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {}; - uint32_t queueFamilyIndex = {}; - uint32_t queueIndex = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {}; + uint32_t queueFamilyIndex = {}; + uint32_t queueIndex = {}; }; - static_assert( sizeof( DeviceQueueInfo2 ) == sizeof( VkDeviceQueueInfo2 ), "struct and wrapper have different size!" ); + static_assert( sizeof( DeviceQueueInfo2 ) == sizeof( VkDeviceQueueInfo2 ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct DispatchIndirectCommand { - VULKAN_HPP_CONSTEXPR DispatchIndirectCommand( uint32_t x_ = {}, - uint32_t y_ = {}, - uint32_t z_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + DispatchIndirectCommand( uint32_t x_ = {}, uint32_t y_ = {}, uint32_t z_ = {} ) VULKAN_HPP_NOEXCEPT : x( x_ ) , y( y_ ) , z( z_ ) @@ -33563,9 +39162,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DispatchIndirectCommand& operator=( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + DispatchIndirectCommand & operator=( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -33587,27 +39186,25 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkDispatchIndirectCommand const&() const VULKAN_HPP_NOEXCEPT + operator VkDispatchIndirectCommand const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDispatchIndirectCommand &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DispatchIndirectCommand const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DispatchIndirectCommand const & ) const = default; #else - bool operator==( DispatchIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DispatchIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( x == rhs.x ) - && ( y == rhs.y ) - && ( z == rhs.z ); + return ( x == rhs.x ) && ( y == rhs.y ) && ( z == rhs.z ); } - bool operator!=( DispatchIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DispatchIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -33618,12 +39215,15 @@ namespace VULKAN_HPP_NAMESPACE uint32_t y = {}; uint32_t z = {}; }; - static_assert( sizeof( DispatchIndirectCommand ) == sizeof( VkDispatchIndirectCommand ), "struct and wrapper have different size!" ); + static_assert( sizeof( DispatchIndirectCommand ) == sizeof( VkDispatchIndirectCommand ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct DisplayEventInfoEXT { - VULKAN_HPP_CONSTEXPR DisplayEventInfoEXT( VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + DisplayEventInfoEXT( VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ = + VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut ) VULKAN_HPP_NOEXCEPT : displayEvent( displayEvent_ ) {} @@ -33638,13 +39238,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DisplayEventInfoEXT& operator=( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + DisplayEventInfoEXT & operator=( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DisplayEventInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DisplayEventInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -33656,38 +39256,37 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkDisplayEventInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkDisplayEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDisplayEventInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DisplayEventInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayEventInfoEXT const & ) const = default; #else - bool operator==( DisplayEventInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DisplayEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( displayEvent == rhs.displayEvent ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayEvent == rhs.displayEvent ); } - bool operator!=( DisplayEventInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DisplayEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayEventInfoEXT; - const void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayEventInfoEXT; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut; }; - static_assert( sizeof( DisplayEventInfoEXT ) == sizeof( VkDisplayEventInfoEXT ), "struct and wrapper have different size!" ); + static_assert( sizeof( DisplayEventInfoEXT ) == sizeof( VkDisplayEventInfoEXT ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct DisplayModeParametersKHR @@ -33703,13 +39302,14 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DisplayModeParametersKHR& operator=( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT + DisplayModeParametersKHR & operator=( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DisplayModeParametersKHR & setVisibleRegion( VULKAN_HPP_NAMESPACE::Extent2D const & visibleRegion_ ) VULKAN_HPP_NOEXCEPT + DisplayModeParametersKHR & + setVisibleRegion( VULKAN_HPP_NAMESPACE::Extent2D const & visibleRegion_ ) VULKAN_HPP_NOEXCEPT { visibleRegion = visibleRegion_; return *this; @@ -33721,26 +39321,25 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkDisplayModeParametersKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkDisplayModeParametersKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDisplayModeParametersKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DisplayModeParametersKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayModeParametersKHR const & ) const = default; #else - bool operator==( DisplayModeParametersKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DisplayModeParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( visibleRegion == rhs.visibleRegion ) - && ( refreshRate == rhs.refreshRate ); + return ( visibleRegion == rhs.visibleRegion ) && ( refreshRate == rhs.refreshRate ); } - bool operator!=( DisplayModeParametersKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DisplayModeParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -33748,15 +39347,17 @@ namespace VULKAN_HPP_NAMESPACE public: VULKAN_HPP_NAMESPACE::Extent2D visibleRegion = {}; - uint32_t refreshRate = {}; + uint32_t refreshRate = {}; }; - static_assert( sizeof( DisplayModeParametersKHR ) == sizeof( VkDisplayModeParametersKHR ), "struct and wrapper have different size!" ); + static_assert( sizeof( DisplayModeParametersKHR ) == sizeof( VkDisplayModeParametersKHR ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct DisplayModeCreateInfoKHR { - VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR( VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ = {}, - VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + DisplayModeCreateInfoKHR( VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , parameters( parameters_ ) {} @@ -33772,13 +39373,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DisplayModeCreateInfoKHR& operator=( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + DisplayModeCreateInfoKHR & operator=( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DisplayModeCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DisplayModeCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -33790,52 +39391,53 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DisplayModeCreateInfoKHR & setParameters( VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const & parameters_ ) VULKAN_HPP_NOEXCEPT + DisplayModeCreateInfoKHR & + setParameters( VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const & parameters_ ) VULKAN_HPP_NOEXCEPT { parameters = parameters_; return *this; } - operator VkDisplayModeCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkDisplayModeCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDisplayModeCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DisplayModeCreateInfoKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayModeCreateInfoKHR const & ) const = default; #else - bool operator==( DisplayModeCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DisplayModeCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( parameters == rhs.parameters ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( parameters == rhs.parameters ); } - bool operator!=( DisplayModeCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DisplayModeCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeCreateInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags = {}; - VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {}; }; - static_assert( sizeof( DisplayModeCreateInfoKHR ) == sizeof( VkDisplayModeCreateInfoKHR ), "struct and wrapper have different size!" ); + static_assert( sizeof( DisplayModeCreateInfoKHR ) == sizeof( VkDisplayModeCreateInfoKHR ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct DisplayModePropertiesKHR { - VULKAN_HPP_CONSTEXPR DisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {}, - VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + DisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {}, + VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {} ) VULKAN_HPP_NOEXCEPT : displayMode( displayMode_ ) , parameters( parameters_ ) {} @@ -33845,47 +39447,48 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DisplayModePropertiesKHR& operator=( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + DisplayModePropertiesKHR & operator=( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkDisplayModePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkDisplayModePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDisplayModePropertiesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DisplayModePropertiesKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayModePropertiesKHR const & ) const = default; #else - bool operator==( DisplayModePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DisplayModePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( displayMode == rhs.displayMode ) - && ( parameters == rhs.parameters ); + return ( displayMode == rhs.displayMode ) && ( parameters == rhs.parameters ); } - bool operator!=( DisplayModePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DisplayModePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {}; - VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {}; + VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {}; + VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {}; }; - static_assert( sizeof( DisplayModePropertiesKHR ) == sizeof( VkDisplayModePropertiesKHR ), "struct and wrapper have different size!" ); + static_assert( sizeof( DisplayModePropertiesKHR ) == sizeof( VkDisplayModePropertiesKHR ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct DisplayModeProperties2KHR { - VULKAN_HPP_CONSTEXPR DisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DisplayModeProperties2KHR( + VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties_ = {} ) VULKAN_HPP_NOEXCEPT : displayModeProperties( displayModeProperties_ ) {} @@ -33900,55 +39503,60 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DisplayModeProperties2KHR& operator=( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + DisplayModeProperties2KHR & operator=( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkDisplayModeProperties2KHR const&() const VULKAN_HPP_NOEXCEPT + operator VkDisplayModeProperties2KHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDisplayModeProperties2KHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DisplayModeProperties2KHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayModeProperties2KHR const & ) const = default; #else - bool operator==( DisplayModeProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DisplayModeProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( displayModeProperties == rhs.displayModeProperties ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayModeProperties == rhs.displayModeProperties ); } - bool operator!=( DisplayModeProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DisplayModeProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeProperties2KHR; - void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeProperties2KHR; + void * pNext = {}; VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties = {}; }; - static_assert( sizeof( DisplayModeProperties2KHR ) == sizeof( VkDisplayModeProperties2KHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DisplayModeProperties2KHR ) == sizeof( VkDisplayModeProperties2KHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct DisplayNativeHdrSurfaceCapabilitiesAMD { - VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD( + VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport_ = {} ) VULKAN_HPP_NOEXCEPT : localDimmingSupport( localDimmingSupport_ ) {} - DisplayNativeHdrSurfaceCapabilitiesAMD & operator=( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + DisplayNativeHdrSurfaceCapabilitiesAMD & + operator=( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( DisplayNativeHdrSurfaceCapabilitiesAMD ) - offsetof( DisplayNativeHdrSurfaceCapabilitiesAMD, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( DisplayNativeHdrSurfaceCapabilitiesAMD ) - + offsetof( DisplayNativeHdrSurfaceCapabilitiesAMD, pNext ) ); return *this; } @@ -33957,33 +39565,32 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DisplayNativeHdrSurfaceCapabilitiesAMD& operator=( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + DisplayNativeHdrSurfaceCapabilitiesAMD & + operator=( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkDisplayNativeHdrSurfaceCapabilitiesAMD const&() const VULKAN_HPP_NOEXCEPT + operator VkDisplayNativeHdrSurfaceCapabilitiesAMD const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDisplayNativeHdrSurfaceCapabilitiesAMD &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DisplayNativeHdrSurfaceCapabilitiesAMD const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayNativeHdrSurfaceCapabilitiesAMD const & ) const = default; #else - bool operator==( DisplayNativeHdrSurfaceCapabilitiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( localDimmingSupport == rhs.localDimmingSupport ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( localDimmingSupport == rhs.localDimmingSupport ); } - bool operator!=( DisplayNativeHdrSurfaceCapabilitiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -33991,23 +39598,26 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport = {}; }; - static_assert( sizeof( DisplayNativeHdrSurfaceCapabilitiesAMD ) == sizeof( VkDisplayNativeHdrSurfaceCapabilitiesAMD ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DisplayNativeHdrSurfaceCapabilitiesAMD ) == sizeof( VkDisplayNativeHdrSurfaceCapabilitiesAMD ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct DisplayPlaneCapabilitiesKHR { - VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha_ = {}, - VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition_ = {}, - VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent_ = {}, - VULKAN_HPP_NAMESPACE::Offset2D minDstPosition_ = {}, - VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D minDstExtent_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + DisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha_ = {}, + VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition_ = {}, + VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent_ = {}, + VULKAN_HPP_NAMESPACE::Offset2D minDstPosition_ = {}, + VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minDstExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent_ = {} ) VULKAN_HPP_NOEXCEPT : supportedAlpha( supportedAlpha_ ) , minSrcPosition( minSrcPosition_ ) , maxSrcPosition( maxSrcPosition_ ) @@ -34024,39 +39634,35 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DisplayPlaneCapabilitiesKHR& operator=( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + DisplayPlaneCapabilitiesKHR & operator=( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkDisplayPlaneCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkDisplayPlaneCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDisplayPlaneCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DisplayPlaneCapabilitiesKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPlaneCapabilitiesKHR const & ) const = default; #else - bool operator==( DisplayPlaneCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DisplayPlaneCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( supportedAlpha == rhs.supportedAlpha ) - && ( minSrcPosition == rhs.minSrcPosition ) - && ( maxSrcPosition == rhs.maxSrcPosition ) - && ( minSrcExtent == rhs.minSrcExtent ) - && ( maxSrcExtent == rhs.maxSrcExtent ) - && ( minDstPosition == rhs.minDstPosition ) - && ( maxDstPosition == rhs.maxDstPosition ) - && ( minDstExtent == rhs.minDstExtent ) - && ( maxDstExtent == rhs.maxDstExtent ); + return ( supportedAlpha == rhs.supportedAlpha ) && ( minSrcPosition == rhs.minSrcPosition ) && + ( maxSrcPosition == rhs.maxSrcPosition ) && ( minSrcExtent == rhs.minSrcExtent ) && + ( maxSrcExtent == rhs.maxSrcExtent ) && ( minDstPosition == rhs.minDstPosition ) && + ( maxDstPosition == rhs.maxDstPosition ) && ( minDstExtent == rhs.minDstExtent ) && + ( maxDstExtent == rhs.maxDstExtent ); } - bool operator!=( DisplayPlaneCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DisplayPlaneCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -34064,27 +39670,31 @@ namespace VULKAN_HPP_NAMESPACE public: VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha = {}; - VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition = {}; - VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition = {}; - VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent = {}; - VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent = {}; - VULKAN_HPP_NAMESPACE::Offset2D minDstPosition = {}; - VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition = {}; - VULKAN_HPP_NAMESPACE::Extent2D minDstExtent = {}; - VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent = {}; + VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition = {}; + VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition = {}; + VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent = {}; + VULKAN_HPP_NAMESPACE::Offset2D minDstPosition = {}; + VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition = {}; + VULKAN_HPP_NAMESPACE::Extent2D minDstExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent = {}; }; - static_assert( sizeof( DisplayPlaneCapabilitiesKHR ) == sizeof( VkDisplayPlaneCapabilitiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DisplayPlaneCapabilitiesKHR ) == sizeof( VkDisplayPlaneCapabilitiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct DisplayPlaneCapabilities2KHR { - VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilities2KHR( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilities2KHR( + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities_ = {} ) VULKAN_HPP_NOEXCEPT : capabilities( capabilities_ ) {} DisplayPlaneCapabilities2KHR & operator=( DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( DisplayPlaneCapabilities2KHR ) - offsetof( DisplayPlaneCapabilities2KHR, pNext ) ); + memcpy( + &pNext, &rhs.pNext, sizeof( DisplayPlaneCapabilities2KHR ) - offsetof( DisplayPlaneCapabilities2KHR, pNext ) ); return *this; } @@ -34093,50 +39703,50 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DisplayPlaneCapabilities2KHR& operator=( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + DisplayPlaneCapabilities2KHR & operator=( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkDisplayPlaneCapabilities2KHR const&() const VULKAN_HPP_NOEXCEPT + operator VkDisplayPlaneCapabilities2KHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDisplayPlaneCapabilities2KHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DisplayPlaneCapabilities2KHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPlaneCapabilities2KHR const & ) const = default; #else - bool operator==( DisplayPlaneCapabilities2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DisplayPlaneCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( capabilities == rhs.capabilities ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( capabilities == rhs.capabilities ); } - bool operator!=( DisplayPlaneCapabilities2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DisplayPlaneCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneCapabilities2KHR; - void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneCapabilities2KHR; + void * pNext = {}; VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities = {}; }; - static_assert( sizeof( DisplayPlaneCapabilities2KHR ) == sizeof( VkDisplayPlaneCapabilities2KHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DisplayPlaneCapabilities2KHR ) == sizeof( VkDisplayPlaneCapabilities2KHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct DisplayPlaneInfo2KHR { VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ = {}, - uint32_t planeIndex_ = {} ) VULKAN_HPP_NOEXCEPT + uint32_t planeIndex_ = {} ) VULKAN_HPP_NOEXCEPT : mode( mode_ ) , planeIndex( planeIndex_ ) {} @@ -34152,13 +39762,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DisplayPlaneInfo2KHR& operator=( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + DisplayPlaneInfo2KHR & operator=( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DisplayPlaneInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DisplayPlaneInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -34176,40 +39786,39 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkDisplayPlaneInfo2KHR const&() const VULKAN_HPP_NOEXCEPT + operator VkDisplayPlaneInfo2KHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDisplayPlaneInfo2KHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DisplayPlaneInfo2KHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPlaneInfo2KHR const & ) const = default; #else - bool operator==( DisplayPlaneInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DisplayPlaneInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( mode == rhs.mode ) - && ( planeIndex == rhs.planeIndex ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mode == rhs.mode ) && + ( planeIndex == rhs.planeIndex ); } - bool operator!=( DisplayPlaneInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DisplayPlaneInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneInfo2KHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DisplayModeKHR mode = {}; - uint32_t planeIndex = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneInfo2KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayModeKHR mode = {}; + uint32_t planeIndex = {}; }; - static_assert( sizeof( DisplayPlaneInfo2KHR ) == sizeof( VkDisplayPlaneInfo2KHR ), "struct and wrapper have different size!" ); + static_assert( sizeof( DisplayPlaneInfo2KHR ) == sizeof( VkDisplayPlaneInfo2KHR ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct DisplayPlanePropertiesKHR @@ -34225,53 +39834,56 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DisplayPlanePropertiesKHR& operator=( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + DisplayPlanePropertiesKHR & operator=( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkDisplayPlanePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkDisplayPlanePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDisplayPlanePropertiesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DisplayPlanePropertiesKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPlanePropertiesKHR const & ) const = default; #else - bool operator==( DisplayPlanePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DisplayPlanePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( currentDisplay == rhs.currentDisplay ) - && ( currentStackIndex == rhs.currentStackIndex ); + return ( currentDisplay == rhs.currentDisplay ) && ( currentStackIndex == rhs.currentStackIndex ); } - bool operator!=( DisplayPlanePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DisplayPlanePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay = {}; - uint32_t currentStackIndex = {}; + VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay = {}; + uint32_t currentStackIndex = {}; }; - static_assert( sizeof( DisplayPlanePropertiesKHR ) == sizeof( VkDisplayPlanePropertiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DisplayPlanePropertiesKHR ) == sizeof( VkDisplayPlanePropertiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct DisplayPlaneProperties2KHR { - VULKAN_HPP_CONSTEXPR DisplayPlaneProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DisplayPlaneProperties2KHR( + VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties_ = {} ) VULKAN_HPP_NOEXCEPT : displayPlaneProperties( displayPlaneProperties_ ) {} DisplayPlaneProperties2KHR & operator=( DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( DisplayPlaneProperties2KHR ) - offsetof( DisplayPlaneProperties2KHR, pNext ) ); + memcpy( + &pNext, &rhs.pNext, sizeof( DisplayPlaneProperties2KHR ) - offsetof( DisplayPlaneProperties2KHR, pNext ) ); return *this; } @@ -34280,49 +39892,51 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DisplayPlaneProperties2KHR& operator=( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + DisplayPlaneProperties2KHR & operator=( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkDisplayPlaneProperties2KHR const&() const VULKAN_HPP_NOEXCEPT + operator VkDisplayPlaneProperties2KHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDisplayPlaneProperties2KHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DisplayPlaneProperties2KHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPlaneProperties2KHR const & ) const = default; #else - bool operator==( DisplayPlaneProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DisplayPlaneProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( displayPlaneProperties == rhs.displayPlaneProperties ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( displayPlaneProperties == rhs.displayPlaneProperties ); } - bool operator!=( DisplayPlaneProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DisplayPlaneProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneProperties2KHR; - void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneProperties2KHR; + void * pNext = {}; VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties = {}; }; - static_assert( sizeof( DisplayPlaneProperties2KHR ) == sizeof( VkDisplayPlaneProperties2KHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DisplayPlaneProperties2KHR ) == sizeof( VkDisplayPlaneProperties2KHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct DisplayPowerInfoEXT { - VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT( VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT( VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ = + VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff ) VULKAN_HPP_NOEXCEPT : powerState( powerState_ ) {} @@ -34337,13 +39951,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DisplayPowerInfoEXT& operator=( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + DisplayPowerInfoEXT & operator=( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DisplayPowerInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DisplayPowerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -34355,44 +39969,43 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkDisplayPowerInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkDisplayPowerInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDisplayPowerInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DisplayPowerInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPowerInfoEXT const & ) const = default; #else - bool operator==( DisplayPowerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DisplayPowerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( powerState == rhs.powerState ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( powerState == rhs.powerState ); } - bool operator!=( DisplayPowerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DisplayPowerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPowerInfoEXT; - const void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPowerInfoEXT; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff; }; - static_assert( sizeof( DisplayPowerInfoEXT ) == sizeof( VkDisplayPowerInfoEXT ), "struct and wrapper have different size!" ); + static_assert( sizeof( DisplayPowerInfoEXT ) == sizeof( VkDisplayPowerInfoEXT ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct DisplayPresentInfoKHR { - VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR( VULKAN_HPP_NAMESPACE::Rect2D srcRect_ = {}, - VULKAN_HPP_NAMESPACE::Rect2D dstRect_ = {}, + VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR( VULKAN_HPP_NAMESPACE::Rect2D srcRect_ = {}, + VULKAN_HPP_NAMESPACE::Rect2D dstRect_ = {}, VULKAN_HPP_NAMESPACE::Bool32 persistent_ = {} ) VULKAN_HPP_NOEXCEPT : srcRect( srcRect_ ) , dstRect( dstRect_ ) @@ -34410,13 +40023,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DisplayPresentInfoKHR& operator=( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + DisplayPresentInfoKHR & operator=( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DisplayPresentInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DisplayPresentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -34440,53 +40053,52 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkDisplayPresentInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkDisplayPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDisplayPresentInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DisplayPresentInfoKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPresentInfoKHR const & ) const = default; #else - bool operator==( DisplayPresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DisplayPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( srcRect == rhs.srcRect ) - && ( dstRect == rhs.dstRect ) - && ( persistent == rhs.persistent ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcRect == rhs.srcRect ) && + ( dstRect == rhs.dstRect ) && ( persistent == rhs.persistent ); } - bool operator!=( DisplayPresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DisplayPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPresentInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Rect2D srcRect = {}; - VULKAN_HPP_NAMESPACE::Rect2D dstRect = {}; - VULKAN_HPP_NAMESPACE::Bool32 persistent = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPresentInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Rect2D srcRect = {}; + VULKAN_HPP_NAMESPACE::Rect2D dstRect = {}; + VULKAN_HPP_NAMESPACE::Bool32 persistent = {}; }; - static_assert( sizeof( DisplayPresentInfoKHR ) == sizeof( VkDisplayPresentInfoKHR ), "struct and wrapper have different size!" ); + static_assert( sizeof( DisplayPresentInfoKHR ) == sizeof( VkDisplayPresentInfoKHR ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct DisplayPropertiesKHR { - VULKAN_HPP_CONSTEXPR DisplayPropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display_ = {}, - const char* displayName_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D physicalResolution_ = {}, - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 persistentContent_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + DisplayPropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display_ = {}, + const char * displayName_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D physicalResolution_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 persistentContent_ = {} ) VULKAN_HPP_NOEXCEPT : display( display_ ) , displayName( displayName_ ) , physicalDimensions( physicalDimensions_ ) @@ -34501,57 +40113,56 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DisplayPropertiesKHR& operator=( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + DisplayPropertiesKHR & operator=( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkDisplayPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkDisplayPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDisplayPropertiesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DisplayPropertiesKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPropertiesKHR const & ) const = default; #else - bool operator==( DisplayPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DisplayPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( display == rhs.display ) - && ( displayName == rhs.displayName ) - && ( physicalDimensions == rhs.physicalDimensions ) - && ( physicalResolution == rhs.physicalResolution ) - && ( supportedTransforms == rhs.supportedTransforms ) - && ( planeReorderPossible == rhs.planeReorderPossible ) - && ( persistentContent == rhs.persistentContent ); + return ( display == rhs.display ) && ( displayName == rhs.displayName ) && + ( physicalDimensions == rhs.physicalDimensions ) && ( physicalResolution == rhs.physicalResolution ) && + ( supportedTransforms == rhs.supportedTransforms ) && + ( planeReorderPossible == rhs.planeReorderPossible ) && ( persistentContent == rhs.persistentContent ); } - bool operator!=( DisplayPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DisplayPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::DisplayKHR display = {}; - const char* displayName = {}; - VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions = {}; - VULKAN_HPP_NAMESPACE::Extent2D physicalResolution = {}; - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {}; - VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible = {}; - VULKAN_HPP_NAMESPACE::Bool32 persistentContent = {}; + VULKAN_HPP_NAMESPACE::DisplayKHR display = {}; + const char * displayName = {}; + VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions = {}; + VULKAN_HPP_NAMESPACE::Extent2D physicalResolution = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {}; + VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible = {}; + VULKAN_HPP_NAMESPACE::Bool32 persistentContent = {}; }; - static_assert( sizeof( DisplayPropertiesKHR ) == sizeof( VkDisplayPropertiesKHR ), "struct and wrapper have different size!" ); + static_assert( sizeof( DisplayPropertiesKHR ) == sizeof( VkDisplayPropertiesKHR ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct DisplayProperties2KHR { - VULKAN_HPP_CONSTEXPR DisplayProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + DisplayProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties_ = {} ) VULKAN_HPP_NOEXCEPT : displayProperties( displayProperties_ ) {} @@ -34566,56 +40177,58 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DisplayProperties2KHR& operator=( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + DisplayProperties2KHR & operator=( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkDisplayProperties2KHR const&() const VULKAN_HPP_NOEXCEPT + operator VkDisplayProperties2KHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDisplayProperties2KHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DisplayProperties2KHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayProperties2KHR const & ) const = default; #else - bool operator==( DisplayProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DisplayProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( displayProperties == rhs.displayProperties ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayProperties == rhs.displayProperties ); } - bool operator!=( DisplayProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DisplayProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayProperties2KHR; - void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayProperties2KHR; + void * pNext = {}; VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties = {}; }; - static_assert( sizeof( DisplayProperties2KHR ) == sizeof( VkDisplayProperties2KHR ), "struct and wrapper have different size!" ); + static_assert( sizeof( DisplayProperties2KHR ) == sizeof( VkDisplayProperties2KHR ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct DisplaySurfaceCreateInfoKHR { - VULKAN_HPP_CONSTEXPR DisplaySurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ = {}, - VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {}, - uint32_t planeIndex_ = {}, - uint32_t planeStackIndex_ = {}, - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, - float globalAlpha_ = {}, - VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ = VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque, - VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + DisplaySurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {}, + uint32_t planeIndex_ = {}, + uint32_t planeStackIndex_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + float globalAlpha_ = {}, + VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ = + VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque, + VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , displayMode( displayMode_ ) , planeIndex( planeIndex_ ) @@ -34628,7 +40241,8 @@ namespace VULKAN_HPP_NAMESPACE DisplaySurfaceCreateInfoKHR & operator=( DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( DisplaySurfaceCreateInfoKHR ) - offsetof( DisplaySurfaceCreateInfoKHR, pNext ) ); + memcpy( + &pNext, &rhs.pNext, sizeof( DisplaySurfaceCreateInfoKHR ) - offsetof( DisplaySurfaceCreateInfoKHR, pNext ) ); return *this; } @@ -34637,25 +40251,27 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DisplaySurfaceCreateInfoKHR& operator=( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + DisplaySurfaceCreateInfoKHR & operator=( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DisplaySurfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DisplaySurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - DisplaySurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + DisplaySurfaceCreateInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - DisplaySurfaceCreateInfoKHR & setDisplayMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ ) VULKAN_HPP_NOEXCEPT + DisplaySurfaceCreateInfoKHR & + setDisplayMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ ) VULKAN_HPP_NOEXCEPT { displayMode = displayMode_; return *this; @@ -34673,7 +40289,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DisplaySurfaceCreateInfoKHR & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT + DisplaySurfaceCreateInfoKHR & + setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT { transform = transform_; return *this; @@ -34685,72 +40302,72 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DisplaySurfaceCreateInfoKHR & setAlphaMode( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ ) VULKAN_HPP_NOEXCEPT + DisplaySurfaceCreateInfoKHR & + setAlphaMode( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ ) VULKAN_HPP_NOEXCEPT { alphaMode = alphaMode_; return *this; } - DisplaySurfaceCreateInfoKHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT + DisplaySurfaceCreateInfoKHR & + setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT { imageExtent = imageExtent_; return *this; } - operator VkDisplaySurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkDisplaySurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDisplaySurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DisplaySurfaceCreateInfoKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplaySurfaceCreateInfoKHR const & ) const = default; #else - bool operator==( DisplaySurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DisplaySurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( displayMode == rhs.displayMode ) - && ( planeIndex == rhs.planeIndex ) - && ( planeStackIndex == rhs.planeStackIndex ) - && ( transform == rhs.transform ) - && ( globalAlpha == rhs.globalAlpha ) - && ( alphaMode == rhs.alphaMode ) - && ( imageExtent == rhs.imageExtent ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( displayMode == rhs.displayMode ) && ( planeIndex == rhs.planeIndex ) && + ( planeStackIndex == rhs.planeStackIndex ) && ( transform == rhs.transform ) && + ( globalAlpha == rhs.globalAlpha ) && ( alphaMode == rhs.alphaMode ) && ( imageExtent == rhs.imageExtent ); } - bool operator!=( DisplaySurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DisplaySurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplaySurfaceCreateInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags = {}; - VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {}; - uint32_t planeIndex = {}; - uint32_t planeStackIndex = {}; - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; - float globalAlpha = {}; - VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode = VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplaySurfaceCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {}; + uint32_t planeIndex = {}; + uint32_t planeStackIndex = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + float globalAlpha = {}; + VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode = + VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque; VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {}; }; - static_assert( sizeof( DisplaySurfaceCreateInfoKHR ) == sizeof( VkDisplaySurfaceCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DisplaySurfaceCreateInfoKHR ) == sizeof( VkDisplaySurfaceCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct DrawIndexedIndirectCommand { - VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand( uint32_t indexCount_ = {}, + VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand( uint32_t indexCount_ = {}, uint32_t instanceCount_ = {}, - uint32_t firstIndex_ = {}, - int32_t vertexOffset_ = {}, + uint32_t firstIndex_ = {}, + int32_t vertexOffset_ = {}, uint32_t firstInstance_ = {} ) VULKAN_HPP_NOEXCEPT : indexCount( indexCount_ ) , instanceCount( instanceCount_ ) @@ -34764,9 +40381,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DrawIndexedIndirectCommand& operator=( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + DrawIndexedIndirectCommand & operator=( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -34800,49 +40417,49 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkDrawIndexedIndirectCommand const&() const VULKAN_HPP_NOEXCEPT + operator VkDrawIndexedIndirectCommand const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDrawIndexedIndirectCommand &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DrawIndexedIndirectCommand const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DrawIndexedIndirectCommand const & ) const = default; #else - bool operator==( DrawIndexedIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DrawIndexedIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( indexCount == rhs.indexCount ) - && ( instanceCount == rhs.instanceCount ) - && ( firstIndex == rhs.firstIndex ) - && ( vertexOffset == rhs.vertexOffset ) - && ( firstInstance == rhs.firstInstance ); + return ( indexCount == rhs.indexCount ) && ( instanceCount == rhs.instanceCount ) && + ( firstIndex == rhs.firstIndex ) && ( vertexOffset == rhs.vertexOffset ) && + ( firstInstance == rhs.firstInstance ); } - bool operator!=( DrawIndexedIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DrawIndexedIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint32_t indexCount = {}; + uint32_t indexCount = {}; uint32_t instanceCount = {}; - uint32_t firstIndex = {}; - int32_t vertexOffset = {}; + uint32_t firstIndex = {}; + int32_t vertexOffset = {}; uint32_t firstInstance = {}; }; - static_assert( sizeof( DrawIndexedIndirectCommand ) == sizeof( VkDrawIndexedIndirectCommand ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DrawIndexedIndirectCommand ) == sizeof( VkDrawIndexedIndirectCommand ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct DrawIndirectCommand { - VULKAN_HPP_CONSTEXPR DrawIndirectCommand( uint32_t vertexCount_ = {}, + VULKAN_HPP_CONSTEXPR DrawIndirectCommand( uint32_t vertexCount_ = {}, uint32_t instanceCount_ = {}, - uint32_t firstVertex_ = {}, + uint32_t firstVertex_ = {}, uint32_t firstInstance_ = {} ) VULKAN_HPP_NOEXCEPT : vertexCount( vertexCount_ ) , instanceCount( instanceCount_ ) @@ -34855,9 +40472,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DrawIndirectCommand& operator=( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + DrawIndirectCommand & operator=( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -34885,40 +40502,39 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkDrawIndirectCommand const&() const VULKAN_HPP_NOEXCEPT + operator VkDrawIndirectCommand const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDrawIndirectCommand &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DrawIndirectCommand const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DrawIndirectCommand const & ) const = default; #else - bool operator==( DrawIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DrawIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( vertexCount == rhs.vertexCount ) - && ( instanceCount == rhs.instanceCount ) - && ( firstVertex == rhs.firstVertex ) - && ( firstInstance == rhs.firstInstance ); + return ( vertexCount == rhs.vertexCount ) && ( instanceCount == rhs.instanceCount ) && + ( firstVertex == rhs.firstVertex ) && ( firstInstance == rhs.firstInstance ); } - bool operator!=( DrawIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DrawIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint32_t vertexCount = {}; + uint32_t vertexCount = {}; uint32_t instanceCount = {}; - uint32_t firstVertex = {}; + uint32_t firstVertex = {}; uint32_t firstInstance = {}; }; - static_assert( sizeof( DrawIndirectCommand ) == sizeof( VkDrawIndirectCommand ), "struct and wrapper have different size!" ); + static_assert( sizeof( DrawIndirectCommand ) == sizeof( VkDrawIndirectCommand ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct DrawMeshTasksIndirectCommandNV @@ -34934,9 +40550,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DrawMeshTasksIndirectCommandNV& operator=( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + DrawMeshTasksIndirectCommandNV & operator=( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -34952,26 +40568,25 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkDrawMeshTasksIndirectCommandNV const&() const VULKAN_HPP_NOEXCEPT + operator VkDrawMeshTasksIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDrawMeshTasksIndirectCommandNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DrawMeshTasksIndirectCommandNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DrawMeshTasksIndirectCommandNV const & ) const = default; #else - bool operator==( DrawMeshTasksIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DrawMeshTasksIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( taskCount == rhs.taskCount ) - && ( firstTask == rhs.firstTask ); + return ( taskCount == rhs.taskCount ) && ( firstTask == rhs.firstTask ); } - bool operator!=( DrawMeshTasksIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DrawMeshTasksIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -34981,14 +40596,17 @@ namespace VULKAN_HPP_NAMESPACE uint32_t taskCount = {}; uint32_t firstTask = {}; }; - static_assert( sizeof( DrawMeshTasksIndirectCommandNV ) == sizeof( VkDrawMeshTasksIndirectCommandNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DrawMeshTasksIndirectCommandNV ) == sizeof( VkDrawMeshTasksIndirectCommandNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct DrmFormatModifierPropertiesEXT { - VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesEXT( uint64_t drmFormatModifier_ = {}, - uint32_t drmFormatModifierPlaneCount_ = {}, - VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesEXT( + uint64_t drmFormatModifier_ = {}, + uint32_t drmFormatModifierPlaneCount_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures_ = {} ) VULKAN_HPP_NOEXCEPT : drmFormatModifier( drmFormatModifier_ ) , drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ ) , drmFormatModifierTilingFeatures( drmFormatModifierTilingFeatures_ ) @@ -34999,57 +40617,62 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DrmFormatModifierPropertiesEXT& operator=( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + DrmFormatModifierPropertiesEXT & operator=( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkDrmFormatModifierPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkDrmFormatModifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DrmFormatModifierPropertiesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DrmFormatModifierPropertiesEXT const & ) const = default; #else - bool operator==( DrmFormatModifierPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( drmFormatModifier == rhs.drmFormatModifier ) - && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) - && ( drmFormatModifierTilingFeatures == rhs.drmFormatModifierTilingFeatures ); + return ( drmFormatModifier == rhs.drmFormatModifier ) && + ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) && + ( drmFormatModifierTilingFeatures == rhs.drmFormatModifierTilingFeatures ); } - bool operator!=( DrmFormatModifierPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint64_t drmFormatModifier = {}; - uint32_t drmFormatModifierPlaneCount = {}; + uint64_t drmFormatModifier = {}; + uint32_t drmFormatModifierPlaneCount = {}; VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures = {}; }; - static_assert( sizeof( DrmFormatModifierPropertiesEXT ) == sizeof( VkDrmFormatModifierPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DrmFormatModifierPropertiesEXT ) == sizeof( VkDrmFormatModifierPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct DrmFormatModifierPropertiesListEXT { - VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT( uint32_t drmFormatModifierCount_ = {}, - VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT* pDrmFormatModifierProperties_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT( + uint32_t drmFormatModifierCount_ = {}, + VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT * pDrmFormatModifierProperties_ = {} ) VULKAN_HPP_NOEXCEPT : drmFormatModifierCount( drmFormatModifierCount_ ) , pDrmFormatModifierProperties( pDrmFormatModifierProperties_ ) {} DrmFormatModifierPropertiesListEXT & operator=( DrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( DrmFormatModifierPropertiesListEXT ) - offsetof( DrmFormatModifierPropertiesListEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( DrmFormatModifierPropertiesListEXT ) - offsetof( DrmFormatModifierPropertiesListEXT, pNext ) ); return *this; } @@ -35058,47 +40681,49 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - DrmFormatModifierPropertiesListEXT& operator=( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT + DrmFormatModifierPropertiesListEXT & + operator=( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkDrmFormatModifierPropertiesListEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkDrmFormatModifierPropertiesListEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDrmFormatModifierPropertiesListEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DrmFormatModifierPropertiesListEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DrmFormatModifierPropertiesListEXT const & ) const = default; #else - bool operator==( DrmFormatModifierPropertiesListEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DrmFormatModifierPropertiesListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( drmFormatModifierCount == rhs.drmFormatModifierCount ) - && ( pDrmFormatModifierProperties == rhs.pDrmFormatModifierProperties ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( drmFormatModifierCount == rhs.drmFormatModifierCount ) && + ( pDrmFormatModifierProperties == rhs.pDrmFormatModifierProperties ); } - bool operator!=( DrmFormatModifierPropertiesListEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DrmFormatModifierPropertiesListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDrmFormatModifierPropertiesListEXT; - void* pNext = {}; - uint32_t drmFormatModifierCount = {}; - VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT* pDrmFormatModifierProperties = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDrmFormatModifierPropertiesListEXT; + void * pNext = {}; + uint32_t drmFormatModifierCount = {}; + VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT * pDrmFormatModifierProperties = {}; }; - static_assert( sizeof( DrmFormatModifierPropertiesListEXT ) == sizeof( VkDrmFormatModifierPropertiesListEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DrmFormatModifierPropertiesListEXT ) == sizeof( VkDrmFormatModifierPropertiesListEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct EventCreateInfo { @@ -35117,13 +40742,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - EventCreateInfo& operator=( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + EventCreateInfo & operator=( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - EventCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + EventCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -35135,27 +40760,25 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkEventCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkEventCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkEventCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( EventCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( EventCreateInfo const & ) const = default; #else - bool operator==( EventCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( EventCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); } - bool operator!=( EventCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( EventCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -35163,15 +40786,16 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eEventCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::EventCreateFlags flags = {}; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::EventCreateFlags flags = {}; }; static_assert( sizeof( EventCreateInfo ) == sizeof( VkEventCreateInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct ExportFenceCreateInfo { - VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + ExportFenceCreateInfo( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT : handleTypes( handleTypes_ ) {} @@ -35186,64 +40810,64 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ExportFenceCreateInfo& operator=( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + ExportFenceCreateInfo & operator=( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ExportFenceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ExportFenceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - ExportFenceCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT + ExportFenceCreateInfo & + setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT { handleTypes = handleTypes_; return *this; } - operator VkExportFenceCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkExportFenceCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkExportFenceCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ExportFenceCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportFenceCreateInfo const & ) const = default; #else - bool operator==( ExportFenceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ExportFenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleTypes == rhs.handleTypes ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); } - bool operator!=( ExportFenceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExportFenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceCreateInfo; - const void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceCreateInfo; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes = {}; }; - static_assert( sizeof( ExportFenceCreateInfo ) == sizeof( VkExportFenceCreateInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( ExportFenceCreateInfo ) == sizeof( VkExportFenceCreateInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); #ifdef VK_USE_PLATFORM_WIN32_KHR struct ExportFenceWin32HandleInfoKHR { - VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = {}, - DWORD dwAccess_ = {}, - LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR( const SECURITY_ATTRIBUTES * pAttributes_ = {}, + DWORD dwAccess_ = {}, + LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT : pAttributes( pAttributes_ ) , dwAccess( dwAccess_ ) , name( name_ ) @@ -35251,7 +40875,9 @@ namespace VULKAN_HPP_NAMESPACE ExportFenceWin32HandleInfoKHR & operator=( ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( ExportFenceWin32HandleInfoKHR ) - offsetof( ExportFenceWin32HandleInfoKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( ExportFenceWin32HandleInfoKHR ) - offsetof( ExportFenceWin32HandleInfoKHR, pNext ) ); return *this; } @@ -35260,19 +40886,19 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ExportFenceWin32HandleInfoKHR& operator=( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + ExportFenceWin32HandleInfoKHR & operator=( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ExportFenceWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ExportFenceWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - ExportFenceWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) VULKAN_HPP_NOEXCEPT + ExportFenceWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT { pAttributes = pAttributes_; return *this; @@ -35290,48 +40916,48 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkExportFenceWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkExportFenceWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkExportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ExportFenceWin32HandleInfoKHR const& ) const = default; -#else - bool operator==( ExportFenceWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportFenceWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( ExportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pAttributes == rhs.pAttributes ) - && ( dwAccess == rhs.dwAccess ) - && ( name == rhs.name ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) && + ( dwAccess == rhs.dwAccess ) && ( name == rhs.name ); } - bool operator!=( ExportFenceWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceWin32HandleInfoKHR; - const void* pNext = {}; - const SECURITY_ATTRIBUTES* pAttributes = {}; - DWORD dwAccess = {}; - LPCWSTR name = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceWin32HandleInfoKHR; + const void * pNext = {}; + const SECURITY_ATTRIBUTES * pAttributes = {}; + DWORD dwAccess = {}; + LPCWSTR name = {}; }; - static_assert( sizeof( ExportFenceWin32HandleInfoKHR ) == sizeof( VkExportFenceWin32HandleInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ExportFenceWin32HandleInfoKHR ) == sizeof( VkExportFenceWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ struct ExportMemoryAllocateInfo { - VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT : handleTypes( handleTypes_ ) {} @@ -35346,67 +40972,69 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ExportMemoryAllocateInfo& operator=( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + ExportMemoryAllocateInfo & operator=( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ExportMemoryAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ExportMemoryAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - ExportMemoryAllocateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT + ExportMemoryAllocateInfo & + setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT { handleTypes = handleTypes_; return *this; } - operator VkExportMemoryAllocateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkExportMemoryAllocateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkExportMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ExportMemoryAllocateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMemoryAllocateInfo const & ) const = default; #else - bool operator==( ExportMemoryAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ExportMemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleTypes == rhs.handleTypes ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); } - bool operator!=( ExportMemoryAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExportMemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfo; - const void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfo; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {}; }; - static_assert( sizeof( ExportMemoryAllocateInfo ) == sizeof( VkExportMemoryAllocateInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( ExportMemoryAllocateInfo ) == sizeof( VkExportMemoryAllocateInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct ExportMemoryAllocateInfoNV { - VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT : handleTypes( handleTypes_ ) {} ExportMemoryAllocateInfoNV & operator=( ExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( ExportMemoryAllocateInfoNV ) - offsetof( ExportMemoryAllocateInfoNV, pNext ) ); + memcpy( + &pNext, &rhs.pNext, sizeof( ExportMemoryAllocateInfoNV ) - offsetof( ExportMemoryAllocateInfoNV, pNext ) ); return *this; } @@ -35415,64 +41043,65 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ExportMemoryAllocateInfoNV& operator=( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + ExportMemoryAllocateInfoNV & operator=( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ExportMemoryAllocateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ExportMemoryAllocateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - ExportMemoryAllocateInfoNV & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT + ExportMemoryAllocateInfoNV & + setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT { handleTypes = handleTypes_; return *this; } - operator VkExportMemoryAllocateInfoNV const&() const VULKAN_HPP_NOEXCEPT + operator VkExportMemoryAllocateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkExportMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ExportMemoryAllocateInfoNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMemoryAllocateInfoNV const & ) const = default; #else - bool operator==( ExportMemoryAllocateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ExportMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleTypes == rhs.handleTypes ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); } - bool operator!=( ExportMemoryAllocateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExportMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfoNV; - const void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfoNV; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {}; }; - static_assert( sizeof( ExportMemoryAllocateInfoNV ) == sizeof( VkExportMemoryAllocateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ExportMemoryAllocateInfoNV ) == sizeof( VkExportMemoryAllocateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #ifdef VK_USE_PLATFORM_WIN32_KHR struct ExportMemoryWin32HandleInfoKHR { - VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = {}, - DWORD dwAccess_ = {}, - LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR( const SECURITY_ATTRIBUTES * pAttributes_ = {}, + DWORD dwAccess_ = {}, + LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT : pAttributes( pAttributes_ ) , dwAccess( dwAccess_ ) , name( name_ ) @@ -35480,7 +41109,9 @@ namespace VULKAN_HPP_NAMESPACE ExportMemoryWin32HandleInfoKHR & operator=( ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( ExportMemoryWin32HandleInfoKHR ) - offsetof( ExportMemoryWin32HandleInfoKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( ExportMemoryWin32HandleInfoKHR ) - offsetof( ExportMemoryWin32HandleInfoKHR, pNext ) ); return *this; } @@ -35489,19 +41120,19 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ExportMemoryWin32HandleInfoKHR& operator=( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + ExportMemoryWin32HandleInfoKHR & operator=( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ExportMemoryWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ExportMemoryWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - ExportMemoryWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) VULKAN_HPP_NOEXCEPT + ExportMemoryWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT { pAttributes = pAttributes_; return *this; @@ -35519,57 +41150,58 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkExportMemoryWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkExportMemoryWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkExportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ExportMemoryWin32HandleInfoKHR const& ) const = default; -#else - bool operator==( ExportMemoryWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMemoryWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( ExportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pAttributes == rhs.pAttributes ) - && ( dwAccess == rhs.dwAccess ) - && ( name == rhs.name ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) && + ( dwAccess == rhs.dwAccess ) && ( name == rhs.name ); } - bool operator!=( ExportMemoryWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoKHR; - const void* pNext = {}; - const SECURITY_ATTRIBUTES* pAttributes = {}; - DWORD dwAccess = {}; - LPCWSTR name = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoKHR; + const void * pNext = {}; + const SECURITY_ATTRIBUTES * pAttributes = {}; + DWORD dwAccess = {}; + LPCWSTR name = {}; }; - static_assert( sizeof( ExportMemoryWin32HandleInfoKHR ) == sizeof( VkExportMemoryWin32HandleInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ExportMemoryWin32HandleInfoKHR ) == sizeof( VkExportMemoryWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #ifdef VK_USE_PLATFORM_WIN32_KHR struct ExportMemoryWin32HandleInfoNV { - VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoNV( const SECURITY_ATTRIBUTES* pAttributes_ = {}, - DWORD dwAccess_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoNV( const SECURITY_ATTRIBUTES * pAttributes_ = {}, + DWORD dwAccess_ = {} ) VULKAN_HPP_NOEXCEPT : pAttributes( pAttributes_ ) , dwAccess( dwAccess_ ) {} ExportMemoryWin32HandleInfoNV & operator=( ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( ExportMemoryWin32HandleInfoNV ) - offsetof( ExportMemoryWin32HandleInfoNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( ExportMemoryWin32HandleInfoNV ) - offsetof( ExportMemoryWin32HandleInfoNV, pNext ) ); return *this; } @@ -35578,19 +41210,19 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ExportMemoryWin32HandleInfoNV& operator=( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + ExportMemoryWin32HandleInfoNV & operator=( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ExportMemoryWin32HandleInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ExportMemoryWin32HandleInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - ExportMemoryWin32HandleInfoNV & setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) VULKAN_HPP_NOEXCEPT + ExportMemoryWin32HandleInfoNV & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT { pAttributes = pAttributes_; return *this; @@ -35602,46 +41234,47 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkExportMemoryWin32HandleInfoNV const&() const VULKAN_HPP_NOEXCEPT + operator VkExportMemoryWin32HandleInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkExportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ExportMemoryWin32HandleInfoNV const& ) const = default; -#else - bool operator==( ExportMemoryWin32HandleInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMemoryWin32HandleInfoNV const & ) const = default; +# else + bool operator==( ExportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pAttributes == rhs.pAttributes ) - && ( dwAccess == rhs.dwAccess ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) && + ( dwAccess == rhs.dwAccess ); } - bool operator!=( ExportMemoryWin32HandleInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoNV; - const void* pNext = {}; - const SECURITY_ATTRIBUTES* pAttributes = {}; - DWORD dwAccess = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoNV; + const void * pNext = {}; + const SECURITY_ATTRIBUTES * pAttributes = {}; + DWORD dwAccess = {}; }; - static_assert( sizeof( ExportMemoryWin32HandleInfoNV ) == sizeof( VkExportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ExportMemoryWin32HandleInfoNV ) == sizeof( VkExportMemoryWin32HandleInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ struct ExportSemaphoreCreateInfo { - VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo( + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT : handleTypes( handleTypes_ ) {} @@ -35656,64 +41289,65 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ExportSemaphoreCreateInfo& operator=( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + ExportSemaphoreCreateInfo & operator=( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ExportSemaphoreCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ExportSemaphoreCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - ExportSemaphoreCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT + ExportSemaphoreCreateInfo & + setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT { handleTypes = handleTypes_; return *this; } - operator VkExportSemaphoreCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkExportSemaphoreCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkExportSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ExportSemaphoreCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportSemaphoreCreateInfo const & ) const = default; #else - bool operator==( ExportSemaphoreCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ExportSemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleTypes == rhs.handleTypes ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); } - bool operator!=( ExportSemaphoreCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExportSemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreCreateInfo; - const void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreCreateInfo; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes = {}; }; - static_assert( sizeof( ExportSemaphoreCreateInfo ) == sizeof( VkExportSemaphoreCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ExportSemaphoreCreateInfo ) == sizeof( VkExportSemaphoreCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #ifdef VK_USE_PLATFORM_WIN32_KHR struct ExportSemaphoreWin32HandleInfoKHR { - VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = {}, - DWORD dwAccess_ = {}, - LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR( const SECURITY_ATTRIBUTES * pAttributes_ = {}, + DWORD dwAccess_ = {}, + LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT : pAttributes( pAttributes_ ) , dwAccess( dwAccess_ ) , name( name_ ) @@ -35721,7 +41355,9 @@ namespace VULKAN_HPP_NAMESPACE ExportSemaphoreWin32HandleInfoKHR & operator=( ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( ExportSemaphoreWin32HandleInfoKHR ) - offsetof( ExportSemaphoreWin32HandleInfoKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( ExportSemaphoreWin32HandleInfoKHR ) - offsetof( ExportSemaphoreWin32HandleInfoKHR, pNext ) ); return *this; } @@ -35730,19 +41366,19 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ExportSemaphoreWin32HandleInfoKHR& operator=( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + ExportSemaphoreWin32HandleInfoKHR & operator=( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ExportSemaphoreWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ExportSemaphoreWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - ExportSemaphoreWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) VULKAN_HPP_NOEXCEPT + ExportSemaphoreWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT { pAttributes = pAttributes_; return *this; @@ -35760,49 +41396,49 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkExportSemaphoreWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkExportSemaphoreWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkExportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ExportSemaphoreWin32HandleInfoKHR const& ) const = default; -#else - bool operator==( ExportSemaphoreWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportSemaphoreWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( ExportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pAttributes == rhs.pAttributes ) - && ( dwAccess == rhs.dwAccess ) - && ( name == rhs.name ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) && + ( dwAccess == rhs.dwAccess ) && ( name == rhs.name ); } - bool operator!=( ExportSemaphoreWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreWin32HandleInfoKHR; - const void* pNext = {}; - const SECURITY_ATTRIBUTES* pAttributes = {}; - DWORD dwAccess = {}; - LPCWSTR name = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreWin32HandleInfoKHR; + const void * pNext = {}; + const SECURITY_ATTRIBUTES * pAttributes = {}; + DWORD dwAccess = {}; + LPCWSTR name = {}; }; - static_assert( sizeof( ExportSemaphoreWin32HandleInfoKHR ) == sizeof( VkExportSemaphoreWin32HandleInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ExportSemaphoreWin32HandleInfoKHR ) == sizeof( VkExportSemaphoreWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ struct ExtensionProperties { - VULKAN_HPP_CONSTEXPR_14 ExtensionProperties( std::array const& extensionName_ = {}, - uint32_t specVersion_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 + ExtensionProperties( std::array const & extensionName_ = {}, + uint32_t specVersion_ = {} ) VULKAN_HPP_NOEXCEPT : extensionName( extensionName_ ) , specVersion( specVersion_ ) {} @@ -35812,32 +41448,31 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ExtensionProperties& operator=( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT + ExtensionProperties & operator=( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkExtensionProperties const&() const VULKAN_HPP_NOEXCEPT + operator VkExtensionProperties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkExtensionProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ExtensionProperties const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExtensionProperties const & ) const = default; #else - bool operator==( ExtensionProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( extensionName == rhs.extensionName ) - && ( specVersion == rhs.specVersion ); + return ( extensionName == rhs.extensionName ) && ( specVersion == rhs.specVersion ); } - bool operator!=( ExtensionProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -35845,16 +41480,18 @@ namespace VULKAN_HPP_NAMESPACE public: VULKAN_HPP_NAMESPACE::ArrayWrapper1D extensionName = {}; - uint32_t specVersion = {}; + uint32_t specVersion = {}; }; - static_assert( sizeof( ExtensionProperties ) == sizeof( VkExtensionProperties ), "struct and wrapper have different size!" ); + static_assert( sizeof( ExtensionProperties ) == sizeof( VkExtensionProperties ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct ExternalMemoryProperties { - VULKAN_HPP_CONSTEXPR ExternalMemoryProperties( VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures_ = {}, - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes_ = {}, - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ExternalMemoryProperties( + VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes_ = {} ) VULKAN_HPP_NOEXCEPT : externalMemoryFeatures( externalMemoryFeatures_ ) , exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ) , compatibleHandleTypes( compatibleHandleTypes_ ) @@ -35865,49 +41502,51 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ExternalMemoryProperties& operator=( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + ExternalMemoryProperties & operator=( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkExternalMemoryProperties const&() const VULKAN_HPP_NOEXCEPT + operator VkExternalMemoryProperties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkExternalMemoryProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ExternalMemoryProperties const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalMemoryProperties const & ) const = default; #else - bool operator==( ExternalMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ExternalMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( externalMemoryFeatures == rhs.externalMemoryFeatures ) - && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) - && ( compatibleHandleTypes == rhs.compatibleHandleTypes ); + return ( externalMemoryFeatures == rhs.externalMemoryFeatures ) && + ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) && + ( compatibleHandleTypes == rhs.compatibleHandleTypes ); } - bool operator!=( ExternalMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExternalMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures = {}; VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes = {}; }; - static_assert( sizeof( ExternalMemoryProperties ) == sizeof( VkExternalMemoryProperties ), "struct and wrapper have different size!" ); + static_assert( sizeof( ExternalMemoryProperties ) == sizeof( VkExternalMemoryProperties ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct ExternalBufferProperties { - VULKAN_HPP_CONSTEXPR ExternalBufferProperties( VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ExternalBufferProperties( + VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {} ) VULKAN_HPP_NOEXCEPT : externalMemoryProperties( externalMemoryProperties_ ) {} @@ -35922,51 +41561,52 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ExternalBufferProperties& operator=( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT + ExternalBufferProperties & operator=( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkExternalBufferProperties const&() const VULKAN_HPP_NOEXCEPT + operator VkExternalBufferProperties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkExternalBufferProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ExternalBufferProperties const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalBufferProperties const & ) const = default; #else - bool operator==( ExternalBufferProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ExternalBufferProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( externalMemoryProperties == rhs.externalMemoryProperties ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( externalMemoryProperties == rhs.externalMemoryProperties ); } - bool operator!=( ExternalBufferProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExternalBufferProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalBufferProperties; - void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalBufferProperties; + void * pNext = {}; VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {}; }; - static_assert( sizeof( ExternalBufferProperties ) == sizeof( VkExternalBufferProperties ), "struct and wrapper have different size!" ); + static_assert( sizeof( ExternalBufferProperties ) == sizeof( VkExternalBufferProperties ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct ExternalFenceProperties { - VULKAN_HPP_CONSTEXPR ExternalFenceProperties( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes_ = {}, - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes_ = {}, - VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ExternalFenceProperties( + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures_ = {} ) VULKAN_HPP_NOEXCEPT : exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ) , compatibleHandleTypes( compatibleHandleTypes_ ) , externalFenceFeatures( externalFenceFeatures_ ) @@ -35983,48 +41623,48 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ExternalFenceProperties& operator=( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT + ExternalFenceProperties & operator=( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkExternalFenceProperties const&() const VULKAN_HPP_NOEXCEPT + operator VkExternalFenceProperties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkExternalFenceProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ExternalFenceProperties const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalFenceProperties const & ) const = default; #else - bool operator==( ExternalFenceProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ExternalFenceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) - && ( compatibleHandleTypes == rhs.compatibleHandleTypes ) - && ( externalFenceFeatures == rhs.externalFenceFeatures ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) && + ( compatibleHandleTypes == rhs.compatibleHandleTypes ) && + ( externalFenceFeatures == rhs.externalFenceFeatures ); } - bool operator!=( ExternalFenceProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExternalFenceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFenceProperties; - void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFenceProperties; + void * pNext = {}; VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes = {}; - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes = {}; - VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures = {}; }; - static_assert( sizeof( ExternalFenceProperties ) == sizeof( VkExternalFenceProperties ), "struct and wrapper have different size!" ); + static_assert( sizeof( ExternalFenceProperties ) == sizeof( VkExternalFenceProperties ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); #ifdef VK_USE_PLATFORM_ANDROID_KHR @@ -36045,13 +41685,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ExternalFormatANDROID& operator=( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + ExternalFormatANDROID & operator=( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ExternalFormatANDROID & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + ExternalFormatANDROID & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -36063,50 +41703,52 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkExternalFormatANDROID const&() const VULKAN_HPP_NOEXCEPT + operator VkExternalFormatANDROID const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkExternalFormatANDROID &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ExternalFormatANDROID const& ) const = default; -#else - bool operator==( ExternalFormatANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalFormatANDROID const & ) const = default; +# else + bool operator==( ExternalFormatANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( externalFormat == rhs.externalFormat ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalFormat == rhs.externalFormat ); } - bool operator!=( ExternalFormatANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExternalFormatANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFormatANDROID; - void* pNext = {}; - uint64_t externalFormat = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFormatANDROID; + void * pNext = {}; + uint64_t externalFormat = {}; }; - static_assert( sizeof( ExternalFormatANDROID ) == sizeof( VkExternalFormatANDROID ), "struct and wrapper have different size!" ); + static_assert( sizeof( ExternalFormatANDROID ) == sizeof( VkExternalFormatANDROID ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ struct ExternalImageFormatProperties { - VULKAN_HPP_CONSTEXPR ExternalImageFormatProperties( VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ExternalImageFormatProperties( + VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {} ) VULKAN_HPP_NOEXCEPT : externalMemoryProperties( externalMemoryProperties_ ) {} ExternalImageFormatProperties & operator=( ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( ExternalImageFormatProperties ) - offsetof( ExternalImageFormatProperties, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( ExternalImageFormatProperties ) - offsetof( ExternalImageFormatProperties, pNext ) ); return *this; } @@ -36115,53 +41757,55 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ExternalImageFormatProperties& operator=( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + ExternalImageFormatProperties & operator=( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkExternalImageFormatProperties const&() const VULKAN_HPP_NOEXCEPT + operator VkExternalImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkExternalImageFormatProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ExternalImageFormatProperties const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalImageFormatProperties const & ) const = default; #else - bool operator==( ExternalImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ExternalImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( externalMemoryProperties == rhs.externalMemoryProperties ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( externalMemoryProperties == rhs.externalMemoryProperties ); } - bool operator!=( ExternalImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExternalImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalImageFormatProperties; - void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalImageFormatProperties; + void * pNext = {}; VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {}; }; - static_assert( sizeof( ExternalImageFormatProperties ) == sizeof( VkExternalImageFormatProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ExternalImageFormatProperties ) == sizeof( VkExternalImageFormatProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct ImageFormatProperties { - VULKAN_HPP_CONSTEXPR ImageFormatProperties( VULKAN_HPP_NAMESPACE::Extent3D maxExtent_ = {}, - uint32_t maxMipLevels_ = {}, - uint32_t maxArrayLayers_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + ImageFormatProperties( VULKAN_HPP_NAMESPACE::Extent3D maxExtent_ = {}, + uint32_t maxMipLevels_ = {}, + uint32_t maxArrayLayers_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize_ = {} ) VULKAN_HPP_NOEXCEPT : maxExtent( maxExtent_ ) , maxMipLevels( maxMipLevels_ ) , maxArrayLayers( maxArrayLayers_ ) @@ -36174,56 +41818,56 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ImageFormatProperties& operator=( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + ImageFormatProperties & operator=( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkImageFormatProperties const&() const VULKAN_HPP_NOEXCEPT + operator VkImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkImageFormatProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageFormatProperties const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageFormatProperties const & ) const = default; #else - bool operator==( ImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( maxExtent == rhs.maxExtent ) - && ( maxMipLevels == rhs.maxMipLevels ) - && ( maxArrayLayers == rhs.maxArrayLayers ) - && ( sampleCounts == rhs.sampleCounts ) - && ( maxResourceSize == rhs.maxResourceSize ); + return ( maxExtent == rhs.maxExtent ) && ( maxMipLevels == rhs.maxMipLevels ) && + ( maxArrayLayers == rhs.maxArrayLayers ) && ( sampleCounts == rhs.sampleCounts ) && + ( maxResourceSize == rhs.maxResourceSize ); } - bool operator!=( ImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::Extent3D maxExtent = {}; - uint32_t maxMipLevels = {}; - uint32_t maxArrayLayers = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts = {}; - VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize = {}; + VULKAN_HPP_NAMESPACE::Extent3D maxExtent = {}; + uint32_t maxMipLevels = {}; + uint32_t maxArrayLayers = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize = {}; }; - static_assert( sizeof( ImageFormatProperties ) == sizeof( VkImageFormatProperties ), "struct and wrapper have different size!" ); + static_assert( sizeof( ImageFormatProperties ) == sizeof( VkImageFormatProperties ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct ExternalImageFormatPropertiesNV { - VULKAN_HPP_CONSTEXPR ExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {}, - VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures_ = {}, - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes_ = {}, - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ExternalImageFormatPropertiesNV( + VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes_ = {} ) VULKAN_HPP_NOEXCEPT : imageFormatProperties( imageFormatProperties_ ) , externalMemoryFeatures( externalMemoryFeatures_ ) , exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ) @@ -36235,57 +41879,62 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ExternalImageFormatPropertiesNV& operator=( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + ExternalImageFormatPropertiesNV & operator=( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkExternalImageFormatPropertiesNV const&() const VULKAN_HPP_NOEXCEPT + operator VkExternalImageFormatPropertiesNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkExternalImageFormatPropertiesNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ExternalImageFormatPropertiesNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalImageFormatPropertiesNV const & ) const = default; #else - bool operator==( ExternalImageFormatPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ExternalImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( imageFormatProperties == rhs.imageFormatProperties ) - && ( externalMemoryFeatures == rhs.externalMemoryFeatures ) - && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) - && ( compatibleHandleTypes == rhs.compatibleHandleTypes ); + return ( imageFormatProperties == rhs.imageFormatProperties ) && + ( externalMemoryFeatures == rhs.externalMemoryFeatures ) && + ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) && + ( compatibleHandleTypes == rhs.compatibleHandleTypes ); } - bool operator!=( ExternalImageFormatPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExternalImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures = {}; + VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures = {}; VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes = {}; }; - static_assert( sizeof( ExternalImageFormatPropertiesNV ) == sizeof( VkExternalImageFormatPropertiesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ExternalImageFormatPropertiesNV ) == sizeof( VkExternalImageFormatPropertiesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct ExternalMemoryBufferCreateInfo { - VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT : handleTypes( handleTypes_ ) {} ExternalMemoryBufferCreateInfo & operator=( ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( ExternalMemoryBufferCreateInfo ) - offsetof( ExternalMemoryBufferCreateInfo, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( ExternalMemoryBufferCreateInfo ) - offsetof( ExternalMemoryBufferCreateInfo, pNext ) ); return *this; } @@ -36294,67 +41943,71 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ExternalMemoryBufferCreateInfo& operator=( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + ExternalMemoryBufferCreateInfo & operator=( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ExternalMemoryBufferCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ExternalMemoryBufferCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - ExternalMemoryBufferCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT + ExternalMemoryBufferCreateInfo & + setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT { handleTypes = handleTypes_; return *this; } - operator VkExternalMemoryBufferCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkExternalMemoryBufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkExternalMemoryBufferCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ExternalMemoryBufferCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalMemoryBufferCreateInfo const & ) const = default; #else - bool operator==( ExternalMemoryBufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ExternalMemoryBufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleTypes == rhs.handleTypes ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); } - bool operator!=( ExternalMemoryBufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExternalMemoryBufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryBufferCreateInfo; - const void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryBufferCreateInfo; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {}; }; - static_assert( sizeof( ExternalMemoryBufferCreateInfo ) == sizeof( VkExternalMemoryBufferCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ExternalMemoryBufferCreateInfo ) == sizeof( VkExternalMemoryBufferCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct ExternalMemoryImageCreateInfo { - VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT : handleTypes( handleTypes_ ) {} ExternalMemoryImageCreateInfo & operator=( ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( ExternalMemoryImageCreateInfo ) - offsetof( ExternalMemoryImageCreateInfo, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( ExternalMemoryImageCreateInfo ) - offsetof( ExternalMemoryImageCreateInfo, pNext ) ); return *this; } @@ -36363,67 +42016,71 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ExternalMemoryImageCreateInfo& operator=( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + ExternalMemoryImageCreateInfo & operator=( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ExternalMemoryImageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ExternalMemoryImageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - ExternalMemoryImageCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT + ExternalMemoryImageCreateInfo & + setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT { handleTypes = handleTypes_; return *this; } - operator VkExternalMemoryImageCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkExternalMemoryImageCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkExternalMemoryImageCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ExternalMemoryImageCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalMemoryImageCreateInfo const & ) const = default; #else - bool operator==( ExternalMemoryImageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ExternalMemoryImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleTypes == rhs.handleTypes ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); } - bool operator!=( ExternalMemoryImageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExternalMemoryImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfo; - const void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfo; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {}; }; - static_assert( sizeof( ExternalMemoryImageCreateInfo ) == sizeof( VkExternalMemoryImageCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ExternalMemoryImageCreateInfo ) == sizeof( VkExternalMemoryImageCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct ExternalMemoryImageCreateInfoNV { - VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT : handleTypes( handleTypes_ ) {} ExternalMemoryImageCreateInfoNV & operator=( ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( ExternalMemoryImageCreateInfoNV ) - offsetof( ExternalMemoryImageCreateInfoNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( ExternalMemoryImageCreateInfoNV ) - offsetof( ExternalMemoryImageCreateInfoNV, pNext ) ); return *this; } @@ -36432,63 +42089,65 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ExternalMemoryImageCreateInfoNV& operator=( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + ExternalMemoryImageCreateInfoNV & operator=( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ExternalMemoryImageCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ExternalMemoryImageCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - ExternalMemoryImageCreateInfoNV & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT + ExternalMemoryImageCreateInfoNV & + setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT { handleTypes = handleTypes_; return *this; } - operator VkExternalMemoryImageCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + operator VkExternalMemoryImageCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkExternalMemoryImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ExternalMemoryImageCreateInfoNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalMemoryImageCreateInfoNV const & ) const = default; #else - bool operator==( ExternalMemoryImageCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ExternalMemoryImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleTypes == rhs.handleTypes ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); } - bool operator!=( ExternalMemoryImageCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExternalMemoryImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfoNV; - const void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfoNV; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {}; }; - static_assert( sizeof( ExternalMemoryImageCreateInfoNV ) == sizeof( VkExternalMemoryImageCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ExternalMemoryImageCreateInfoNV ) == sizeof( VkExternalMemoryImageCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct ExternalSemaphoreProperties { - VULKAN_HPP_CONSTEXPR ExternalSemaphoreProperties( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes_ = {}, - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes_ = {}, - VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ExternalSemaphoreProperties( + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures_ = {} ) VULKAN_HPP_NOEXCEPT : exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ) , compatibleHandleTypes( compatibleHandleTypes_ ) , externalSemaphoreFeatures( externalSemaphoreFeatures_ ) @@ -36496,7 +42155,8 @@ namespace VULKAN_HPP_NAMESPACE ExternalSemaphoreProperties & operator=( ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( ExternalSemaphoreProperties ) - offsetof( ExternalSemaphoreProperties, pNext ) ); + memcpy( + &pNext, &rhs.pNext, sizeof( ExternalSemaphoreProperties ) - offsetof( ExternalSemaphoreProperties, pNext ) ); return *this; } @@ -36505,49 +42165,50 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ExternalSemaphoreProperties& operator=( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT + ExternalSemaphoreProperties & operator=( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkExternalSemaphoreProperties const&() const VULKAN_HPP_NOEXCEPT + operator VkExternalSemaphoreProperties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkExternalSemaphoreProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ExternalSemaphoreProperties const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalSemaphoreProperties const & ) const = default; #else - bool operator==( ExternalSemaphoreProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ExternalSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) - && ( compatibleHandleTypes == rhs.compatibleHandleTypes ) - && ( externalSemaphoreFeatures == rhs.externalSemaphoreFeatures ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) && + ( compatibleHandleTypes == rhs.compatibleHandleTypes ) && + ( externalSemaphoreFeatures == rhs.externalSemaphoreFeatures ); } - bool operator!=( ExternalSemaphoreProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExternalSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalSemaphoreProperties; - void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalSemaphoreProperties; + void * pNext = {}; VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes = {}; - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes = {}; - VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures = {}; }; - static_assert( sizeof( ExternalSemaphoreProperties ) == sizeof( VkExternalSemaphoreProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ExternalSemaphoreProperties ) == sizeof( VkExternalSemaphoreProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct FenceCreateInfo { @@ -36566,13 +42227,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - FenceCreateInfo& operator=( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + FenceCreateInfo & operator=( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - FenceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + FenceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -36584,27 +42245,25 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkFenceCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkFenceCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkFenceCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( FenceCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FenceCreateInfo const & ) const = default; #else - bool operator==( FenceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( FenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); } - bool operator!=( FenceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( FenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -36612,16 +42271,18 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::FenceCreateFlags flags = {}; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::FenceCreateFlags flags = {}; }; static_assert( sizeof( FenceCreateInfo ) == sizeof( VkFenceCreateInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct FenceGetFdInfoKHR { - VULKAN_HPP_CONSTEXPR FenceGetFdInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {}, - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + FenceGetFdInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT : fence( fence_ ) , handleType( handleType_ ) {} @@ -36637,13 +42298,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - FenceGetFdInfoKHR& operator=( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + FenceGetFdInfoKHR & operator=( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - FenceGetFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + FenceGetFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -36655,60 +42316,64 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - FenceGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + FenceGetFdInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } - operator VkFenceGetFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkFenceGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkFenceGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( FenceGetFdInfoKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FenceGetFdInfoKHR const & ) const = default; #else - bool operator==( FenceGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( FenceGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( fence == rhs.fence ) - && ( handleType == rhs.handleType ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && + ( handleType == rhs.handleType ); } - bool operator!=( FenceGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( FenceGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetFdInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Fence fence = {}; - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetFdInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Fence fence = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; }; - static_assert( sizeof( FenceGetFdInfoKHR ) == sizeof( VkFenceGetFdInfoKHR ), "struct and wrapper have different size!" ); + static_assert( sizeof( FenceGetFdInfoKHR ) == sizeof( VkFenceGetFdInfoKHR ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); #ifdef VK_USE_PLATFORM_WIN32_KHR struct FenceGetWin32HandleInfoKHR { - VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {}, - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR( + VULKAN_HPP_NAMESPACE::Fence fence_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT : fence( fence_ ) , handleType( handleType_ ) {} FenceGetWin32HandleInfoKHR & operator=( FenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( FenceGetWin32HandleInfoKHR ) - offsetof( FenceGetWin32HandleInfoKHR, pNext ) ); + memcpy( + &pNext, &rhs.pNext, sizeof( FenceGetWin32HandleInfoKHR ) - offsetof( FenceGetWin32HandleInfoKHR, pNext ) ); return *this; } @@ -36717,13 +42382,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - FenceGetWin32HandleInfoKHR& operator=( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + FenceGetWin32HandleInfoKHR & operator=( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - FenceGetWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + FenceGetWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -36735,96 +42400,103 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - FenceGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + FenceGetWin32HandleInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } - operator VkFenceGetWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkFenceGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkFenceGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( FenceGetWin32HandleInfoKHR const& ) const = default; -#else - bool operator==( FenceGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FenceGetWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( FenceGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( fence == rhs.fence ) - && ( handleType == rhs.handleType ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && + ( handleType == rhs.handleType ); } - bool operator!=( FenceGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( FenceGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetWin32HandleInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Fence fence = {}; - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetWin32HandleInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Fence fence = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; }; - static_assert( sizeof( FenceGetWin32HandleInfoKHR ) == sizeof( VkFenceGetWin32HandleInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( FenceGetWin32HandleInfoKHR ) == sizeof( VkFenceGetWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ struct FilterCubicImageViewImageFormatPropertiesEXT { - VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 filterCubic_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT( + VULKAN_HPP_NAMESPACE::Bool32 filterCubic_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax_ = {} ) VULKAN_HPP_NOEXCEPT : filterCubic( filterCubic_ ) , filterCubicMinmax( filterCubicMinmax_ ) {} - FilterCubicImageViewImageFormatPropertiesEXT & operator=( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + FilterCubicImageViewImageFormatPropertiesEXT & + operator=( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( FilterCubicImageViewImageFormatPropertiesEXT ) - offsetof( FilterCubicImageViewImageFormatPropertiesEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( FilterCubicImageViewImageFormatPropertiesEXT ) - + offsetof( FilterCubicImageViewImageFormatPropertiesEXT, pNext ) ); return *this; } - FilterCubicImageViewImageFormatPropertiesEXT( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + FilterCubicImageViewImageFormatPropertiesEXT( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - FilterCubicImageViewImageFormatPropertiesEXT& operator=( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + FilterCubicImageViewImageFormatPropertiesEXT & + operator=( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkFilterCubicImageViewImageFormatPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkFilterCubicImageViewImageFormatPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkFilterCubicImageViewImageFormatPropertiesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( FilterCubicImageViewImageFormatPropertiesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FilterCubicImageViewImageFormatPropertiesEXT const & ) const = default; #else - bool operator==( FilterCubicImageViewImageFormatPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( filterCubic == rhs.filterCubic ) - && ( filterCubicMinmax == rhs.filterCubicMinmax ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( filterCubic == rhs.filterCubic ) && + ( filterCubicMinmax == rhs.filterCubicMinmax ); } - bool operator!=( FilterCubicImageViewImageFormatPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -36832,18 +42504,22 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 filterCubic = {}; - VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterCubic = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax = {}; }; - static_assert( sizeof( FilterCubicImageViewImageFormatPropertiesEXT ) == sizeof( VkFilterCubicImageViewImageFormatPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( FilterCubicImageViewImageFormatPropertiesEXT ) == + sizeof( VkFilterCubicImageViewImageFormatPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct FormatProperties { - VULKAN_HPP_CONSTEXPR FormatProperties( VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures_ = {}, - VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures_ = {}, - VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + FormatProperties( VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures_ = {} ) VULKAN_HPP_NOEXCEPT : linearTilingFeatures( linearTilingFeatures_ ) , optimalTilingFeatures( optimalTilingFeatures_ ) , bufferFeatures( bufferFeatures_ ) @@ -36854,49 +42530,50 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - FormatProperties& operator=( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + FormatProperties & operator=( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkFormatProperties const&() const VULKAN_HPP_NOEXCEPT + operator VkFormatProperties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkFormatProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( FormatProperties const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FormatProperties const & ) const = default; #else - bool operator==( FormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( FormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( linearTilingFeatures == rhs.linearTilingFeatures ) - && ( optimalTilingFeatures == rhs.optimalTilingFeatures ) - && ( bufferFeatures == rhs.bufferFeatures ); + return ( linearTilingFeatures == rhs.linearTilingFeatures ) && + ( optimalTilingFeatures == rhs.optimalTilingFeatures ) && ( bufferFeatures == rhs.bufferFeatures ); } - bool operator!=( FormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( FormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures = {}; VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures = {}; - VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures = {}; }; - static_assert( sizeof( FormatProperties ) == sizeof( VkFormatProperties ), "struct and wrapper have different size!" ); + static_assert( sizeof( FormatProperties ) == sizeof( VkFormatProperties ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct FormatProperties2 { - VULKAN_HPP_CONSTEXPR FormatProperties2( VULKAN_HPP_NAMESPACE::FormatProperties formatProperties_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + FormatProperties2( VULKAN_HPP_NAMESPACE::FormatProperties formatProperties_ = {} ) VULKAN_HPP_NOEXCEPT : formatProperties( formatProperties_ ) {} @@ -36911,55 +42588,55 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - FormatProperties2& operator=( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + FormatProperties2 & operator=( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkFormatProperties2 const&() const VULKAN_HPP_NOEXCEPT + operator VkFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkFormatProperties2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( FormatProperties2 const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FormatProperties2 const & ) const = default; #else - bool operator==( FormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( FormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( formatProperties == rhs.formatProperties ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( formatProperties == rhs.formatProperties ); } - bool operator!=( FormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( FormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFormatProperties2; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::FormatProperties formatProperties = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFormatProperties2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::FormatProperties formatProperties = {}; }; - static_assert( sizeof( FormatProperties2 ) == sizeof( VkFormatProperties2 ), "struct and wrapper have different size!" ); + static_assert( sizeof( FormatProperties2 ) == sizeof( VkFormatProperties2 ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct FramebufferAttachmentImageInfo { - VULKAN_HPP_CONSTEXPR FramebufferAttachmentImageInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, - uint32_t width_ = {}, - uint32_t height_ = {}, - uint32_t layerCount_ = {}, - uint32_t viewFormatCount_ = {}, - const VULKAN_HPP_NAMESPACE::Format* pViewFormats_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + FramebufferAttachmentImageInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, + uint32_t width_ = {}, + uint32_t height_ = {}, + uint32_t layerCount_ = {}, + uint32_t viewFormatCount_ = {}, + const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , usage( usage_ ) , width( width_ ) @@ -36971,7 +42648,9 @@ namespace VULKAN_HPP_NAMESPACE FramebufferAttachmentImageInfo & operator=( FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( FramebufferAttachmentImageInfo ) - offsetof( FramebufferAttachmentImageInfo, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( FramebufferAttachmentImageInfo ) - offsetof( FramebufferAttachmentImageInfo, pNext ) ); return *this; } @@ -36980,13 +42659,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - FramebufferAttachmentImageInfo& operator=( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT + FramebufferAttachmentImageInfo & operator=( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - FramebufferAttachmentImageInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + FramebufferAttachmentImageInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -37028,69 +42707,69 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - FramebufferAttachmentImageInfo & setPViewFormats( const VULKAN_HPP_NAMESPACE::Format* pViewFormats_ ) VULKAN_HPP_NOEXCEPT + FramebufferAttachmentImageInfo & + setPViewFormats( const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ ) VULKAN_HPP_NOEXCEPT { pViewFormats = pViewFormats_; return *this; } - operator VkFramebufferAttachmentImageInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkFramebufferAttachmentImageInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkFramebufferAttachmentImageInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( FramebufferAttachmentImageInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FramebufferAttachmentImageInfo const & ) const = default; #else - bool operator==( FramebufferAttachmentImageInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( FramebufferAttachmentImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( usage == rhs.usage ) - && ( width == rhs.width ) - && ( height == rhs.height ) - && ( layerCount == rhs.layerCount ) - && ( viewFormatCount == rhs.viewFormatCount ) - && ( pViewFormats == rhs.pViewFormats ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( usage == rhs.usage ) && + ( width == rhs.width ) && ( height == rhs.height ) && ( layerCount == rhs.layerCount ) && + ( viewFormatCount == rhs.viewFormatCount ) && ( pViewFormats == rhs.pViewFormats ); } - bool operator!=( FramebufferAttachmentImageInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( FramebufferAttachmentImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentImageInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; - uint32_t width = {}; - uint32_t height = {}; - uint32_t layerCount = {}; - uint32_t viewFormatCount = {}; - const VULKAN_HPP_NAMESPACE::Format* pViewFormats = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentImageInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + uint32_t width = {}; + uint32_t height = {}; + uint32_t layerCount = {}; + uint32_t viewFormatCount = {}; + const VULKAN_HPP_NAMESPACE::Format * pViewFormats = {}; }; - static_assert( sizeof( FramebufferAttachmentImageInfo ) == sizeof( VkFramebufferAttachmentImageInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( FramebufferAttachmentImageInfo ) == sizeof( VkFramebufferAttachmentImageInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct FramebufferAttachmentsCreateInfo { - VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfo( uint32_t attachmentImageInfoCount_ = {}, - const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo* pAttachmentImageInfos_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfo( + uint32_t attachmentImageInfoCount_ = {}, + const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos_ = {} ) VULKAN_HPP_NOEXCEPT : attachmentImageInfoCount( attachmentImageInfoCount_ ) , pAttachmentImageInfos( pAttachmentImageInfos_ ) {} FramebufferAttachmentsCreateInfo & operator=( FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( FramebufferAttachmentsCreateInfo ) - offsetof( FramebufferAttachmentsCreateInfo, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( FramebufferAttachmentsCreateInfo ) - offsetof( FramebufferAttachmentsCreateInfo, pNext ) ); return *this; } @@ -37099,52 +42778,53 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - FramebufferAttachmentsCreateInfo& operator=( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + FramebufferAttachmentsCreateInfo & operator=( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - FramebufferAttachmentsCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + FramebufferAttachmentsCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - FramebufferAttachmentsCreateInfo & setAttachmentImageInfoCount( uint32_t attachmentImageInfoCount_ ) VULKAN_HPP_NOEXCEPT + FramebufferAttachmentsCreateInfo & + setAttachmentImageInfoCount( uint32_t attachmentImageInfoCount_ ) VULKAN_HPP_NOEXCEPT { attachmentImageInfoCount = attachmentImageInfoCount_; return *this; } - FramebufferAttachmentsCreateInfo & setPAttachmentImageInfos( const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo* pAttachmentImageInfos_ ) VULKAN_HPP_NOEXCEPT + FramebufferAttachmentsCreateInfo & setPAttachmentImageInfos( + const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos_ ) VULKAN_HPP_NOEXCEPT { pAttachmentImageInfos = pAttachmentImageInfos_; return *this; } - operator VkFramebufferAttachmentsCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkFramebufferAttachmentsCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkFramebufferAttachmentsCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( FramebufferAttachmentsCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FramebufferAttachmentsCreateInfo const & ) const = default; #else - bool operator==( FramebufferAttachmentsCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( FramebufferAttachmentsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( attachmentImageInfoCount == rhs.attachmentImageInfoCount ) - && ( pAttachmentImageInfos == rhs.pAttachmentImageInfos ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( attachmentImageInfoCount == rhs.attachmentImageInfoCount ) && + ( pAttachmentImageInfos == rhs.pAttachmentImageInfos ); } - bool operator!=( FramebufferAttachmentsCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( FramebufferAttachmentsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -37152,21 +42832,23 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentsCreateInfo; - const void* pNext = {}; - uint32_t attachmentImageInfoCount = {}; - const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo* pAttachmentImageInfos = {}; + const void * pNext = {}; + uint32_t attachmentImageInfoCount = {}; + const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos = {}; }; - static_assert( sizeof( FramebufferAttachmentsCreateInfo ) == sizeof( VkFramebufferAttachmentsCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( FramebufferAttachmentsCreateInfo ) == sizeof( VkFramebufferAttachmentsCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct FramebufferCreateInfo { - VULKAN_HPP_CONSTEXPR FramebufferCreateInfo( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, - uint32_t attachmentCount_ = {}, - const VULKAN_HPP_NAMESPACE::ImageView* pAttachments_ = {}, - uint32_t width_ = {}, - uint32_t height_ = {}, + VULKAN_HPP_CONSTEXPR FramebufferCreateInfo( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, + uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ = {}, + uint32_t width_ = {}, + uint32_t height_ = {}, uint32_t layers_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , renderPass( renderPass_ ) @@ -37188,13 +42870,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - FramebufferCreateInfo& operator=( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + FramebufferCreateInfo & operator=( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - FramebufferCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + FramebufferCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -37218,7 +42900,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - FramebufferCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView* pAttachments_ ) VULKAN_HPP_NOEXCEPT + FramebufferCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ ) VULKAN_HPP_NOEXCEPT { pAttachments = pAttachments_; return *this; @@ -37242,67 +42924,69 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkFramebufferCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkFramebufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkFramebufferCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( FramebufferCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FramebufferCreateInfo const & ) const = default; #else - bool operator==( FramebufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( FramebufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( renderPass == rhs.renderPass ) - && ( attachmentCount == rhs.attachmentCount ) - && ( pAttachments == rhs.pAttachments ) - && ( width == rhs.width ) - && ( height == rhs.height ) - && ( layers == rhs.layers ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( renderPass == rhs.renderPass ) && ( attachmentCount == rhs.attachmentCount ) && + ( pAttachments == rhs.pAttachments ) && ( width == rhs.width ) && ( height == rhs.height ) && + ( layers == rhs.layers ); } - bool operator!=( FramebufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( FramebufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; - uint32_t attachmentCount = {}; - const VULKAN_HPP_NAMESPACE::ImageView* pAttachments = {}; - uint32_t width = {}; - uint32_t height = {}; - uint32_t layers = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::ImageView * pAttachments = {}; + uint32_t width = {}; + uint32_t height = {}; + uint32_t layers = {}; }; - static_assert( sizeof( FramebufferCreateInfo ) == sizeof( VkFramebufferCreateInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( FramebufferCreateInfo ) == sizeof( VkFramebufferCreateInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct FramebufferMixedSamplesCombinationNV { - VULKAN_HPP_CONSTEXPR FramebufferMixedSamplesCombinationNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge, - VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, - VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR FramebufferMixedSamplesCombinationNV( + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples_ = {} ) VULKAN_HPP_NOEXCEPT : coverageReductionMode( coverageReductionMode_ ) , rasterizationSamples( rasterizationSamples_ ) , depthStencilSamples( depthStencilSamples_ ) , colorSamples( colorSamples_ ) {} - FramebufferMixedSamplesCombinationNV & operator=( FramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT + FramebufferMixedSamplesCombinationNV & + operator=( FramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( FramebufferMixedSamplesCombinationNV ) - offsetof( FramebufferMixedSamplesCombinationNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( FramebufferMixedSamplesCombinationNV ) - + offsetof( FramebufferMixedSamplesCombinationNV, pNext ) ); return *this; } @@ -37311,55 +42995,57 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - FramebufferMixedSamplesCombinationNV& operator=( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT + FramebufferMixedSamplesCombinationNV & + operator=( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkFramebufferMixedSamplesCombinationNV const&() const VULKAN_HPP_NOEXCEPT + operator VkFramebufferMixedSamplesCombinationNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkFramebufferMixedSamplesCombinationNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( FramebufferMixedSamplesCombinationNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FramebufferMixedSamplesCombinationNV const & ) const = default; #else - bool operator==( FramebufferMixedSamplesCombinationNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( FramebufferMixedSamplesCombinationNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( coverageReductionMode == rhs.coverageReductionMode ) - && ( rasterizationSamples == rhs.rasterizationSamples ) - && ( depthStencilSamples == rhs.depthStencilSamples ) - && ( colorSamples == rhs.colorSamples ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( coverageReductionMode == rhs.coverageReductionMode ) && + ( rasterizationSamples == rhs.rasterizationSamples ) && + ( depthStencilSamples == rhs.depthStencilSamples ) && ( colorSamples == rhs.colorSamples ); } - bool operator!=( FramebufferMixedSamplesCombinationNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( FramebufferMixedSamplesCombinationNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferMixedSamplesCombinationNV; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferMixedSamplesCombinationNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode = + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge; VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; - VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples = {}; }; - static_assert( sizeof( FramebufferMixedSamplesCombinationNV ) == sizeof( VkFramebufferMixedSamplesCombinationNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( FramebufferMixedSamplesCombinationNV ) == sizeof( VkFramebufferMixedSamplesCombinationNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct IndirectCommandsStreamNV { - VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {} ) VULKAN_HPP_NOEXCEPT : buffer( buffer_ ) , offset( offset_ ) @@ -37370,9 +43056,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - IndirectCommandsStreamNV& operator=( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT + IndirectCommandsStreamNV & operator=( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -37388,53 +43074,54 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkIndirectCommandsStreamNV const&() const VULKAN_HPP_NOEXCEPT + operator VkIndirectCommandsStreamNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkIndirectCommandsStreamNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( IndirectCommandsStreamNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IndirectCommandsStreamNV const & ) const = default; #else - bool operator==( IndirectCommandsStreamNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( IndirectCommandsStreamNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( buffer == rhs.buffer ) - && ( offset == rhs.offset ); + return ( buffer == rhs.buffer ) && ( offset == rhs.offset ); } - bool operator!=( IndirectCommandsStreamNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( IndirectCommandsStreamNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; }; - static_assert( sizeof( IndirectCommandsStreamNV ) == sizeof( VkIndirectCommandsStreamNV ), "struct and wrapper have different size!" ); + static_assert( sizeof( IndirectCommandsStreamNV ) == sizeof( VkIndirectCommandsStreamNV ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct GeneratedCommandsInfoNV { - VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, - VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, - uint32_t streamCount_ = {}, - const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV* pStreams_ = {}, - uint32_t sequencesCount_ = {}, - VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, - VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, - VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV( + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, + uint32_t streamCount_ = {}, + const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV * pStreams_ = {}, + uint32_t sequencesCount_ = {}, + VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, + VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, + VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {} ) VULKAN_HPP_NOEXCEPT : pipelineBindPoint( pipelineBindPoint_ ) , pipeline( pipeline_ ) , indirectCommandsLayout( indirectCommandsLayout_ ) @@ -37461,19 +43148,20 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - GeneratedCommandsInfoNV& operator=( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + GeneratedCommandsInfoNV & operator=( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - GeneratedCommandsInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + GeneratedCommandsInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - GeneratedCommandsInfoNV & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + GeneratedCommandsInfoNV & + setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT { pipelineBindPoint = pipelineBindPoint_; return *this; @@ -37485,7 +43173,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - GeneratedCommandsInfoNV & setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT + GeneratedCommandsInfoNV & setIndirectCommandsLayout( + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT { indirectCommandsLayout = indirectCommandsLayout_; return *this; @@ -37497,7 +43186,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - GeneratedCommandsInfoNV & setPStreams( const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV* pStreams_ ) VULKAN_HPP_NOEXCEPT + GeneratedCommandsInfoNV & + setPStreams( const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV * pStreams_ ) VULKAN_HPP_NOEXCEPT { pStreams = pStreams_; return *this; @@ -37515,7 +43205,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - GeneratedCommandsInfoNV & setPreprocessOffset( VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ ) VULKAN_HPP_NOEXCEPT + GeneratedCommandsInfoNV & + setPreprocessOffset( VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ ) VULKAN_HPP_NOEXCEPT { preprocessOffset = preprocessOffset_; return *this; @@ -37527,136 +43218,145 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - GeneratedCommandsInfoNV & setSequencesCountBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ ) VULKAN_HPP_NOEXCEPT + GeneratedCommandsInfoNV & + setSequencesCountBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ ) VULKAN_HPP_NOEXCEPT { sequencesCountBuffer = sequencesCountBuffer_; return *this; } - GeneratedCommandsInfoNV & setSequencesCountOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ ) VULKAN_HPP_NOEXCEPT + GeneratedCommandsInfoNV & + setSequencesCountOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ ) VULKAN_HPP_NOEXCEPT { sequencesCountOffset = sequencesCountOffset_; return *this; } - GeneratedCommandsInfoNV & setSequencesIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ ) VULKAN_HPP_NOEXCEPT + GeneratedCommandsInfoNV & + setSequencesIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ ) VULKAN_HPP_NOEXCEPT { sequencesIndexBuffer = sequencesIndexBuffer_; return *this; } - GeneratedCommandsInfoNV & setSequencesIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ ) VULKAN_HPP_NOEXCEPT + GeneratedCommandsInfoNV & + setSequencesIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ ) VULKAN_HPP_NOEXCEPT { sequencesIndexOffset = sequencesIndexOffset_; return *this; } - operator VkGeneratedCommandsInfoNV const&() const VULKAN_HPP_NOEXCEPT + operator VkGeneratedCommandsInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkGeneratedCommandsInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( GeneratedCommandsInfoNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GeneratedCommandsInfoNV const & ) const = default; #else - bool operator==( GeneratedCommandsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( GeneratedCommandsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pipelineBindPoint == rhs.pipelineBindPoint ) - && ( pipeline == rhs.pipeline ) - && ( indirectCommandsLayout == rhs.indirectCommandsLayout ) - && ( streamCount == rhs.streamCount ) - && ( pStreams == rhs.pStreams ) - && ( sequencesCount == rhs.sequencesCount ) - && ( preprocessBuffer == rhs.preprocessBuffer ) - && ( preprocessOffset == rhs.preprocessOffset ) - && ( preprocessSize == rhs.preprocessSize ) - && ( sequencesCountBuffer == rhs.sequencesCountBuffer ) - && ( sequencesCountOffset == rhs.sequencesCountOffset ) - && ( sequencesIndexBuffer == rhs.sequencesIndexBuffer ) - && ( sequencesIndexOffset == rhs.sequencesIndexOffset ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && + ( pipeline == rhs.pipeline ) && ( indirectCommandsLayout == rhs.indirectCommandsLayout ) && + ( streamCount == rhs.streamCount ) && ( pStreams == rhs.pStreams ) && + ( sequencesCount == rhs.sequencesCount ) && ( preprocessBuffer == rhs.preprocessBuffer ) && + ( preprocessOffset == rhs.preprocessOffset ) && ( preprocessSize == rhs.preprocessSize ) && + ( sequencesCountBuffer == rhs.sequencesCountBuffer ) && + ( sequencesCountOffset == rhs.sequencesCountOffset ) && + ( sequencesIndexBuffer == rhs.sequencesIndexBuffer ) && + ( sequencesIndexOffset == rhs.sequencesIndexOffset ); } - bool operator!=( GeneratedCommandsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( GeneratedCommandsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsInfoNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; - VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout = {}; - uint32_t streamCount = {}; - const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV* pStreams = {}; - uint32_t sequencesCount = {}; - VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer = {}; - VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset = {}; - VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize = {}; - VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer = {}; - VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset = {}; - VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer = {}; - VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout = {}; + uint32_t streamCount = {}; + const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV * pStreams = {}; + uint32_t sequencesCount = {}; + VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize = {}; + VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset = {}; + VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset = {}; }; - static_assert( sizeof( GeneratedCommandsInfoNV ) == sizeof( VkGeneratedCommandsInfoNV ), "struct and wrapper have different size!" ); + static_assert( sizeof( GeneratedCommandsInfoNV ) == sizeof( VkGeneratedCommandsInfoNV ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct GeneratedCommandsMemoryRequirementsInfoNV { - VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, - VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, - uint32_t maxSequencesCount_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoNV( + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, + uint32_t maxSequencesCount_ = {} ) VULKAN_HPP_NOEXCEPT : pipelineBindPoint( pipelineBindPoint_ ) , pipeline( pipeline_ ) , indirectCommandsLayout( indirectCommandsLayout_ ) , maxSequencesCount( maxSequencesCount_ ) {} - GeneratedCommandsMemoryRequirementsInfoNV & operator=( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + GeneratedCommandsMemoryRequirementsInfoNV & + operator=( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( GeneratedCommandsMemoryRequirementsInfoNV ) - offsetof( GeneratedCommandsMemoryRequirementsInfoNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( GeneratedCommandsMemoryRequirementsInfoNV ) - + offsetof( GeneratedCommandsMemoryRequirementsInfoNV, pNext ) ); return *this; } - GeneratedCommandsMemoryRequirementsInfoNV( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + GeneratedCommandsMemoryRequirementsInfoNV( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - GeneratedCommandsMemoryRequirementsInfoNV& operator=( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + GeneratedCommandsMemoryRequirementsInfoNV & + operator=( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - GeneratedCommandsMemoryRequirementsInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + GeneratedCommandsMemoryRequirementsInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - GeneratedCommandsMemoryRequirementsInfoNV & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + GeneratedCommandsMemoryRequirementsInfoNV & + setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT { pipelineBindPoint = pipelineBindPoint_; return *this; } - GeneratedCommandsMemoryRequirementsInfoNV & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT + GeneratedCommandsMemoryRequirementsInfoNV & + setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT { pipeline = pipeline_; return *this; } - GeneratedCommandsMemoryRequirementsInfoNV & setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT + GeneratedCommandsMemoryRequirementsInfoNV & setIndirectCommandsLayout( + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT { indirectCommandsLayout = indirectCommandsLayout_; return *this; @@ -37668,30 +43368,27 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkGeneratedCommandsMemoryRequirementsInfoNV const&() const VULKAN_HPP_NOEXCEPT + operator VkGeneratedCommandsMemoryRequirementsInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkGeneratedCommandsMemoryRequirementsInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( GeneratedCommandsMemoryRequirementsInfoNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GeneratedCommandsMemoryRequirementsInfoNV const & ) const = default; #else - bool operator==( GeneratedCommandsMemoryRequirementsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pipelineBindPoint == rhs.pipelineBindPoint ) - && ( pipeline == rhs.pipeline ) - && ( indirectCommandsLayout == rhs.indirectCommandsLayout ) - && ( maxSequencesCount == rhs.maxSequencesCount ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && + ( pipeline == rhs.pipeline ) && ( indirectCommandsLayout == rhs.indirectCommandsLayout ) && + ( maxSequencesCount == rhs.maxSequencesCount ); } - bool operator!=( GeneratedCommandsMemoryRequirementsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -37699,20 +43396,25 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsMemoryRequirementsInfoNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; - VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout = {}; - uint32_t maxSequencesCount = {}; + uint32_t maxSequencesCount = {}; }; - static_assert( sizeof( GeneratedCommandsMemoryRequirementsInfoNV ) == sizeof( VkGeneratedCommandsMemoryRequirementsInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( GeneratedCommandsMemoryRequirementsInfoNV ) == + sizeof( VkGeneratedCommandsMemoryRequirementsInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct VertexInputBindingDescription { - VULKAN_HPP_CONSTEXPR VertexInputBindingDescription( uint32_t binding_ = {}, - uint32_t stride_ = {}, - VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + VertexInputBindingDescription( uint32_t binding_ = {}, + uint32_t stride_ = {}, + VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ = + VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex ) VULKAN_HPP_NOEXCEPT : binding( binding_ ) , stride( stride_ ) , inputRate( inputRate_ ) @@ -37723,9 +43425,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - VertexInputBindingDescription& operator=( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT + VertexInputBindingDescription & operator=( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -37747,46 +43449,47 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkVertexInputBindingDescription const&() const VULKAN_HPP_NOEXCEPT + operator VkVertexInputBindingDescription const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkVertexInputBindingDescription &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( VertexInputBindingDescription const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VertexInputBindingDescription const & ) const = default; #else - bool operator==( VertexInputBindingDescription const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( VertexInputBindingDescription const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( binding == rhs.binding ) - && ( stride == rhs.stride ) - && ( inputRate == rhs.inputRate ); + return ( binding == rhs.binding ) && ( stride == rhs.stride ) && ( inputRate == rhs.inputRate ); } - bool operator!=( VertexInputBindingDescription const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VertexInputBindingDescription const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint32_t binding = {}; - uint32_t stride = {}; + uint32_t binding = {}; + uint32_t stride = {}; VULKAN_HPP_NAMESPACE::VertexInputRate inputRate = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex; }; - static_assert( sizeof( VertexInputBindingDescription ) == sizeof( VkVertexInputBindingDescription ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( VertexInputBindingDescription ) == sizeof( VkVertexInputBindingDescription ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct VertexInputAttributeDescription { - VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription( uint32_t location_ = {}, - uint32_t binding_ = {}, - VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - uint32_t offset_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + VertexInputAttributeDescription( uint32_t location_ = {}, + uint32_t binding_ = {}, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + uint32_t offset_ = {} ) VULKAN_HPP_NOEXCEPT : location( location_ ) , binding( binding_ ) , format( format_ ) @@ -37798,9 +43501,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - VertexInputAttributeDescription& operator=( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT + VertexInputAttributeDescription & operator=( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -37828,49 +43531,51 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkVertexInputAttributeDescription const&() const VULKAN_HPP_NOEXCEPT + operator VkVertexInputAttributeDescription const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkVertexInputAttributeDescription &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( VertexInputAttributeDescription const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VertexInputAttributeDescription const & ) const = default; #else - bool operator==( VertexInputAttributeDescription const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( VertexInputAttributeDescription const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( location == rhs.location ) - && ( binding == rhs.binding ) - && ( format == rhs.format ) - && ( offset == rhs.offset ); + return ( location == rhs.location ) && ( binding == rhs.binding ) && ( format == rhs.format ) && + ( offset == rhs.offset ); } - bool operator!=( VertexInputAttributeDescription const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VertexInputAttributeDescription const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint32_t location = {}; - uint32_t binding = {}; - VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; - uint32_t offset = {}; + uint32_t location = {}; + uint32_t binding = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + uint32_t offset = {}; }; - static_assert( sizeof( VertexInputAttributeDescription ) == sizeof( VkVertexInputAttributeDescription ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( VertexInputAttributeDescription ) == sizeof( VkVertexInputAttributeDescription ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PipelineVertexInputStateCreateInfo { - VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ = {}, - uint32_t vertexBindingDescriptionCount_ = {}, - const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription* pVertexBindingDescriptions_ = {}, - uint32_t vertexAttributeDescriptionCount_ = {}, - const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription* pVertexAttributeDescriptions_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ = {}, + uint32_t vertexBindingDescriptionCount_ = {}, + const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * pVertexBindingDescriptions_ = {}, + uint32_t vertexAttributeDescriptionCount_ = {}, + const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions_ = {} ) + VULKAN_HPP_NOEXCEPT : flags( flags_ ) , vertexBindingDescriptionCount( vertexBindingDescriptionCount_ ) , pVertexBindingDescriptions( pVertexBindingDescriptions_ ) @@ -37880,7 +43585,9 @@ namespace VULKAN_HPP_NAMESPACE PipelineVertexInputStateCreateInfo & operator=( PipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineVertexInputStateCreateInfo ) - offsetof( PipelineVertexInputStateCreateInfo, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PipelineVertexInputStateCreateInfo ) - offsetof( PipelineVertexInputStateCreateInfo, pNext ) ); return *this; } @@ -37889,73 +43596,77 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PipelineVertexInputStateCreateInfo& operator=( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineVertexInputStateCreateInfo & + operator=( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineVertexInputStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineVertexInputStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineVertexInputStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + PipelineVertexInputStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - PipelineVertexInputStateCreateInfo & setVertexBindingDescriptionCount( uint32_t vertexBindingDescriptionCount_ ) VULKAN_HPP_NOEXCEPT + PipelineVertexInputStateCreateInfo & + setVertexBindingDescriptionCount( uint32_t vertexBindingDescriptionCount_ ) VULKAN_HPP_NOEXCEPT { vertexBindingDescriptionCount = vertexBindingDescriptionCount_; return *this; } - PipelineVertexInputStateCreateInfo & setPVertexBindingDescriptions( const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription* pVertexBindingDescriptions_ ) VULKAN_HPP_NOEXCEPT + PipelineVertexInputStateCreateInfo & setPVertexBindingDescriptions( + const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * pVertexBindingDescriptions_ ) VULKAN_HPP_NOEXCEPT { pVertexBindingDescriptions = pVertexBindingDescriptions_; return *this; } - PipelineVertexInputStateCreateInfo & setVertexAttributeDescriptionCount( uint32_t vertexAttributeDescriptionCount_ ) VULKAN_HPP_NOEXCEPT + PipelineVertexInputStateCreateInfo & + setVertexAttributeDescriptionCount( uint32_t vertexAttributeDescriptionCount_ ) VULKAN_HPP_NOEXCEPT { vertexAttributeDescriptionCount = vertexAttributeDescriptionCount_; return *this; } - PipelineVertexInputStateCreateInfo & setPVertexAttributeDescriptions( const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription* pVertexAttributeDescriptions_ ) VULKAN_HPP_NOEXCEPT + PipelineVertexInputStateCreateInfo & setPVertexAttributeDescriptions( + const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions_ ) VULKAN_HPP_NOEXCEPT { pVertexAttributeDescriptions = pVertexAttributeDescriptions_; return *this; } - operator VkPipelineVertexInputStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineVertexInputStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineVertexInputStateCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineVertexInputStateCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineVertexInputStateCreateInfo const & ) const = default; #else - bool operator==( PipelineVertexInputStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineVertexInputStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( vertexBindingDescriptionCount == rhs.vertexBindingDescriptionCount ) - && ( pVertexBindingDescriptions == rhs.pVertexBindingDescriptions ) - && ( vertexAttributeDescriptionCount == rhs.vertexAttributeDescriptionCount ) - && ( pVertexAttributeDescriptions == rhs.pVertexAttributeDescriptions ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( vertexBindingDescriptionCount == rhs.vertexBindingDescriptionCount ) && + ( pVertexBindingDescriptions == rhs.pVertexBindingDescriptions ) && + ( vertexAttributeDescriptionCount == rhs.vertexAttributeDescriptionCount ) && + ( pVertexAttributeDescriptions == rhs.pVertexAttributeDescriptions ); } - bool operator!=( PipelineVertexInputStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineVertexInputStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -37963,29 +43674,36 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputStateCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags = {}; - uint32_t vertexBindingDescriptionCount = {}; - const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription* pVertexBindingDescriptions = {}; - uint32_t vertexAttributeDescriptionCount = {}; - const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription* pVertexAttributeDescriptions = {}; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags = {}; + uint32_t vertexBindingDescriptionCount = {}; + const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * pVertexBindingDescriptions = {}; + uint32_t vertexAttributeDescriptionCount = {}; + const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions = {}; }; - static_assert( sizeof( PipelineVertexInputStateCreateInfo ) == sizeof( VkPipelineVertexInputStateCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineVertexInputStateCreateInfo ) == sizeof( VkPipelineVertexInputStateCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PipelineInputAssemblyStateCreateInfo { - VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList, - VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList, + VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , topology( topology_ ) , primitiveRestartEnable( primitiveRestartEnable_ ) {} - PipelineInputAssemblyStateCreateInfo & operator=( PipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineInputAssemblyStateCreateInfo & + operator=( PipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineInputAssemblyStateCreateInfo ) - offsetof( PipelineInputAssemblyStateCreateInfo, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PipelineInputAssemblyStateCreateInfo ) - + offsetof( PipelineInputAssemblyStateCreateInfo, pNext ) ); return *this; } @@ -37994,59 +43712,60 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PipelineInputAssemblyStateCreateInfo& operator=( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineInputAssemblyStateCreateInfo & + operator=( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineInputAssemblyStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineInputAssemblyStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineInputAssemblyStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + PipelineInputAssemblyStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - PipelineInputAssemblyStateCreateInfo & setTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ ) VULKAN_HPP_NOEXCEPT + PipelineInputAssemblyStateCreateInfo & + setTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ ) VULKAN_HPP_NOEXCEPT { topology = topology_; return *this; } - PipelineInputAssemblyStateCreateInfo & setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ ) VULKAN_HPP_NOEXCEPT + PipelineInputAssemblyStateCreateInfo & + setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ ) VULKAN_HPP_NOEXCEPT { primitiveRestartEnable = primitiveRestartEnable_; return *this; } - operator VkPipelineInputAssemblyStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineInputAssemblyStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineInputAssemblyStateCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineInputAssemblyStateCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineInputAssemblyStateCreateInfo const & ) const = default; #else - bool operator==( PipelineInputAssemblyStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineInputAssemblyStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( topology == rhs.topology ) - && ( primitiveRestartEnable == rhs.primitiveRestartEnable ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( topology == rhs.topology ) && ( primitiveRestartEnable == rhs.primitiveRestartEnable ); } - bool operator!=( PipelineInputAssemblyStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineInputAssemblyStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -38054,25 +43773,31 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInputAssemblyStateCreateInfo; - const void* pNext = {}; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags = {}; VULKAN_HPP_NAMESPACE::PrimitiveTopology topology = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList; - VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable = {}; }; - static_assert( sizeof( PipelineInputAssemblyStateCreateInfo ) == sizeof( VkPipelineInputAssemblyStateCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineInputAssemblyStateCreateInfo ) == sizeof( VkPipelineInputAssemblyStateCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PipelineTessellationStateCreateInfo { - VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ = {}, - uint32_t patchControlPoints_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + PipelineTessellationStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ = {}, + uint32_t patchControlPoints_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , patchControlPoints( patchControlPoints_ ) {} - PipelineTessellationStateCreateInfo & operator=( PipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineTessellationStateCreateInfo & + operator=( PipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineTessellationStateCreateInfo ) - offsetof( PipelineTessellationStateCreateInfo, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PipelineTessellationStateCreateInfo ) - offsetof( PipelineTessellationStateCreateInfo, pNext ) ); return *this; } @@ -38081,19 +43806,21 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PipelineTessellationStateCreateInfo& operator=( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineTessellationStateCreateInfo & + operator=( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineTessellationStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineTessellationStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineTessellationStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + PipelineTessellationStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; @@ -38105,28 +43832,26 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkPipelineTessellationStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineTessellationStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineTessellationStateCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineTessellationStateCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineTessellationStateCreateInfo const & ) const = default; #else - bool operator==( PipelineTessellationStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineTessellationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( patchControlPoints == rhs.patchControlPoints ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( patchControlPoints == rhs.patchControlPoints ); } - bool operator!=( PipelineTessellationStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineTessellationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -38134,19 +43859,21 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationStateCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags = {}; - uint32_t patchControlPoints = {}; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags = {}; + uint32_t patchControlPoints = {}; }; - static_assert( sizeof( PipelineTessellationStateCreateInfo ) == sizeof( VkPipelineTessellationStateCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineTessellationStateCreateInfo ) == sizeof( VkPipelineTessellationStateCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct Viewport { - VULKAN_HPP_CONSTEXPR Viewport( float x_ = {}, - float y_ = {}, - float width_ = {}, - float height_ = {}, + VULKAN_HPP_CONSTEXPR Viewport( float x_ = {}, + float y_ = {}, + float width_ = {}, + float height_ = {}, float minDepth_ = {}, float maxDepth_ = {} ) VULKAN_HPP_NOEXCEPT : x( x_ ) @@ -38162,9 +43889,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - Viewport& operator=( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT + Viewport & operator=( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -38204,40 +43931,36 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkViewport const&() const VULKAN_HPP_NOEXCEPT + operator VkViewport const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkViewport &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( Viewport const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Viewport const & ) const = default; #else - bool operator==( Viewport const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( Viewport const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( x == rhs.x ) - && ( y == rhs.y ) - && ( width == rhs.width ) - && ( height == rhs.height ) - && ( minDepth == rhs.minDepth ) - && ( maxDepth == rhs.maxDepth ); + return ( x == rhs.x ) && ( y == rhs.y ) && ( width == rhs.width ) && ( height == rhs.height ) && + ( minDepth == rhs.minDepth ) && ( maxDepth == rhs.maxDepth ); } - bool operator!=( Viewport const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( Viewport const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - float x = {}; - float y = {}; - float width = {}; - float height = {}; + float x = {}; + float y = {}; + float width = {}; + float height = {}; float minDepth = {}; float maxDepth = {}; }; @@ -38246,11 +43969,12 @@ namespace VULKAN_HPP_NAMESPACE struct PipelineViewportStateCreateInfo { - VULKAN_HPP_CONSTEXPR PipelineViewportStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ = {}, - uint32_t viewportCount_ = {}, - const VULKAN_HPP_NAMESPACE::Viewport* pViewports_ = {}, - uint32_t scissorCount_ = {}, - const VULKAN_HPP_NAMESPACE::Rect2D* pScissors_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + PipelineViewportStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ = {}, + uint32_t viewportCount_ = {}, + const VULKAN_HPP_NAMESPACE::Viewport * pViewports_ = {}, + uint32_t scissorCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D * pScissors_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , viewportCount( viewportCount_ ) , pViewports( pViewports_ ) @@ -38260,7 +43984,9 @@ namespace VULKAN_HPP_NAMESPACE PipelineViewportStateCreateInfo & operator=( PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineViewportStateCreateInfo ) - offsetof( PipelineViewportStateCreateInfo, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PipelineViewportStateCreateInfo ) - offsetof( PipelineViewportStateCreateInfo, pNext ) ); return *this; } @@ -38269,19 +43995,20 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PipelineViewportStateCreateInfo& operator=( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineViewportStateCreateInfo & operator=( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineViewportStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineViewportStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineViewportStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + PipelineViewportStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; @@ -38293,7 +44020,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PipelineViewportStateCreateInfo & setPViewports( const VULKAN_HPP_NAMESPACE::Viewport* pViewports_ ) VULKAN_HPP_NOEXCEPT + PipelineViewportStateCreateInfo & + setPViewports( const VULKAN_HPP_NAMESPACE::Viewport * pViewports_ ) VULKAN_HPP_NOEXCEPT { pViewports = pViewports_; return *this; @@ -38305,67 +44033,67 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PipelineViewportStateCreateInfo & setPScissors( const VULKAN_HPP_NAMESPACE::Rect2D* pScissors_ ) VULKAN_HPP_NOEXCEPT + PipelineViewportStateCreateInfo & + setPScissors( const VULKAN_HPP_NAMESPACE::Rect2D * pScissors_ ) VULKAN_HPP_NOEXCEPT { pScissors = pScissors_; return *this; } - operator VkPipelineViewportStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineViewportStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineViewportStateCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineViewportStateCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineViewportStateCreateInfo const & ) const = default; #else - bool operator==( PipelineViewportStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineViewportStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( viewportCount == rhs.viewportCount ) - && ( pViewports == rhs.pViewports ) - && ( scissorCount == rhs.scissorCount ) - && ( pScissors == rhs.pScissors ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( viewportCount == rhs.viewportCount ) && ( pViewports == rhs.pViewports ) && + ( scissorCount == rhs.scissorCount ) && ( pScissors == rhs.pScissors ); } - bool operator!=( PipelineViewportStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineViewportStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportStateCreateInfo; - const void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportStateCreateInfo; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags = {}; - uint32_t viewportCount = {}; - const VULKAN_HPP_NAMESPACE::Viewport* pViewports = {}; - uint32_t scissorCount = {}; - const VULKAN_HPP_NAMESPACE::Rect2D* pScissors = {}; + uint32_t viewportCount = {}; + const VULKAN_HPP_NAMESPACE::Viewport * pViewports = {}; + uint32_t scissorCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D * pScissors = {}; }; - static_assert( sizeof( PipelineViewportStateCreateInfo ) == sizeof( VkPipelineViewportStateCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineViewportStateCreateInfo ) == sizeof( VkPipelineViewportStateCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PipelineRasterizationStateCreateInfo { - VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ = {}, - VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ = VULKAN_HPP_NAMESPACE::PolygonMode::eFill, - VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ = {}, - VULKAN_HPP_NAMESPACE::FrontFace frontFace_ = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise, - VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ = {}, - float depthBiasConstantFactor_ = {}, - float depthBiasClamp_ = {}, - float depthBiasSlopeFactor_ = {}, - float lineWidth_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ = {}, + VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ = VULKAN_HPP_NAMESPACE::PolygonMode::eFill, + VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ = {}, + VULKAN_HPP_NAMESPACE::FrontFace frontFace_ = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise, + VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ = {}, + float depthBiasConstantFactor_ = {}, + float depthBiasClamp_ = {}, + float depthBiasSlopeFactor_ = {}, + float lineWidth_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , depthClampEnable( depthClampEnable_ ) , rasterizerDiscardEnable( rasterizerDiscardEnable_ ) @@ -38379,9 +44107,13 @@ namespace VULKAN_HPP_NAMESPACE , lineWidth( lineWidth_ ) {} - PipelineRasterizationStateCreateInfo & operator=( PipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationStateCreateInfo & + operator=( PipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineRasterizationStateCreateInfo ) - offsetof( PipelineRasterizationStateCreateInfo, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PipelineRasterizationStateCreateInfo ) - + offsetof( PipelineRasterizationStateCreateInfo, pNext ) ); return *this; } @@ -38390,61 +44122,70 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PipelineRasterizationStateCreateInfo& operator=( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationStateCreateInfo & + operator=( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineRasterizationStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineRasterizationStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - PipelineRasterizationStateCreateInfo & setDepthClampEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationStateCreateInfo & + setDepthClampEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ ) VULKAN_HPP_NOEXCEPT { depthClampEnable = depthClampEnable_; return *this; } - PipelineRasterizationStateCreateInfo & setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationStateCreateInfo & + setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ ) VULKAN_HPP_NOEXCEPT { rasterizerDiscardEnable = rasterizerDiscardEnable_; return *this; } - PipelineRasterizationStateCreateInfo & setPolygonMode( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationStateCreateInfo & + setPolygonMode( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ ) VULKAN_HPP_NOEXCEPT { polygonMode = polygonMode_; return *this; } - PipelineRasterizationStateCreateInfo & setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationStateCreateInfo & + setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ ) VULKAN_HPP_NOEXCEPT { cullMode = cullMode_; return *this; } - PipelineRasterizationStateCreateInfo & setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationStateCreateInfo & + setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace_ ) VULKAN_HPP_NOEXCEPT { frontFace = frontFace_; return *this; } - PipelineRasterizationStateCreateInfo & setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationStateCreateInfo & + setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ ) VULKAN_HPP_NOEXCEPT { depthBiasEnable = depthBiasEnable_; return *this; } - PipelineRasterizationStateCreateInfo & setDepthBiasConstantFactor( float depthBiasConstantFactor_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationStateCreateInfo & + setDepthBiasConstantFactor( float depthBiasConstantFactor_ ) VULKAN_HPP_NOEXCEPT { depthBiasConstantFactor = depthBiasConstantFactor_; return *this; @@ -38468,37 +44209,31 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkPipelineRasterizationStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineRasterizationStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineRasterizationStateCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineRasterizationStateCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRasterizationStateCreateInfo const & ) const = default; #else - bool operator==( PipelineRasterizationStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineRasterizationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( depthClampEnable == rhs.depthClampEnable ) - && ( rasterizerDiscardEnable == rhs.rasterizerDiscardEnable ) - && ( polygonMode == rhs.polygonMode ) - && ( cullMode == rhs.cullMode ) - && ( frontFace == rhs.frontFace ) - && ( depthBiasEnable == rhs.depthBiasEnable ) - && ( depthBiasConstantFactor == rhs.depthBiasConstantFactor ) - && ( depthBiasClamp == rhs.depthBiasClamp ) - && ( depthBiasSlopeFactor == rhs.depthBiasSlopeFactor ) - && ( lineWidth == rhs.lineWidth ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( depthClampEnable == rhs.depthClampEnable ) && + ( rasterizerDiscardEnable == rhs.rasterizerDiscardEnable ) && ( polygonMode == rhs.polygonMode ) && + ( cullMode == rhs.cullMode ) && ( frontFace == rhs.frontFace ) && + ( depthBiasEnable == rhs.depthBiasEnable ) && ( depthBiasConstantFactor == rhs.depthBiasConstantFactor ) && + ( depthBiasClamp == rhs.depthBiasClamp ) && ( depthBiasSlopeFactor == rhs.depthBiasSlopeFactor ) && + ( lineWidth == rhs.lineWidth ); } - bool operator!=( PipelineRasterizationStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineRasterizationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -38506,31 +44241,34 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable = {}; - VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable = {}; - VULKAN_HPP_NAMESPACE::PolygonMode polygonMode = VULKAN_HPP_NAMESPACE::PolygonMode::eFill; - VULKAN_HPP_NAMESPACE::CullModeFlags cullMode = {}; - VULKAN_HPP_NAMESPACE::FrontFace frontFace = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise; - VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable = {}; - float depthBiasConstantFactor = {}; - float depthBiasClamp = {}; - float depthBiasSlopeFactor = {}; - float lineWidth = {}; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable = {}; + VULKAN_HPP_NAMESPACE::PolygonMode polygonMode = VULKAN_HPP_NAMESPACE::PolygonMode::eFill; + VULKAN_HPP_NAMESPACE::CullModeFlags cullMode = {}; + VULKAN_HPP_NAMESPACE::FrontFace frontFace = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise; + VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable = {}; + float depthBiasConstantFactor = {}; + float depthBiasClamp = {}; + float depthBiasSlopeFactor = {}; + float lineWidth = {}; }; - static_assert( sizeof( PipelineRasterizationStateCreateInfo ) == sizeof( VkPipelineRasterizationStateCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineRasterizationStateCreateInfo ) == sizeof( VkPipelineRasterizationStateCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PipelineMultisampleStateCreateInfo { - VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, - VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ = {}, - float minSampleShading_ = {}, - const VULKAN_HPP_NAMESPACE::SampleMask* pSampleMask_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ = {}, + float minSampleShading_ = {}, + const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , rasterizationSamples( rasterizationSamples_ ) , sampleShadingEnable( sampleShadingEnable_ ) @@ -38542,7 +44280,9 @@ namespace VULKAN_HPP_NAMESPACE PipelineMultisampleStateCreateInfo & operator=( PipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineMultisampleStateCreateInfo ) - offsetof( PipelineMultisampleStateCreateInfo, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PipelineMultisampleStateCreateInfo ) - offsetof( PipelineMultisampleStateCreateInfo, pNext ) ); return *this; } @@ -38551,31 +44291,35 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PipelineMultisampleStateCreateInfo& operator=( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineMultisampleStateCreateInfo & + operator=( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineMultisampleStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineMultisampleStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineMultisampleStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + PipelineMultisampleStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - PipelineMultisampleStateCreateInfo & setRasterizationSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT + PipelineMultisampleStateCreateInfo & + setRasterizationSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT { rasterizationSamples = rasterizationSamples_; return *this; } - PipelineMultisampleStateCreateInfo & setSampleShadingEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ ) VULKAN_HPP_NOEXCEPT + PipelineMultisampleStateCreateInfo & + setSampleShadingEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ ) VULKAN_HPP_NOEXCEPT { sampleShadingEnable = sampleShadingEnable_; return *this; @@ -38587,51 +44331,50 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PipelineMultisampleStateCreateInfo & setPSampleMask( const VULKAN_HPP_NAMESPACE::SampleMask* pSampleMask_ ) VULKAN_HPP_NOEXCEPT + PipelineMultisampleStateCreateInfo & + setPSampleMask( const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask_ ) VULKAN_HPP_NOEXCEPT { pSampleMask = pSampleMask_; return *this; } - PipelineMultisampleStateCreateInfo & setAlphaToCoverageEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ ) VULKAN_HPP_NOEXCEPT + PipelineMultisampleStateCreateInfo & + setAlphaToCoverageEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ ) VULKAN_HPP_NOEXCEPT { alphaToCoverageEnable = alphaToCoverageEnable_; return *this; } - PipelineMultisampleStateCreateInfo & setAlphaToOneEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ ) VULKAN_HPP_NOEXCEPT + PipelineMultisampleStateCreateInfo & + setAlphaToOneEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ ) VULKAN_HPP_NOEXCEPT { alphaToOneEnable = alphaToOneEnable_; return *this; } - operator VkPipelineMultisampleStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineMultisampleStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineMultisampleStateCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineMultisampleStateCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineMultisampleStateCreateInfo const & ) const = default; #else - bool operator==( PipelineMultisampleStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineMultisampleStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( rasterizationSamples == rhs.rasterizationSamples ) - && ( sampleShadingEnable == rhs.sampleShadingEnable ) - && ( minSampleShading == rhs.minSampleShading ) - && ( pSampleMask == rhs.pSampleMask ) - && ( alphaToCoverageEnable == rhs.alphaToCoverageEnable ) - && ( alphaToOneEnable == rhs.alphaToOneEnable ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( rasterizationSamples == rhs.rasterizationSamples ) && + ( sampleShadingEnable == rhs.sampleShadingEnable ) && ( minSampleShading == rhs.minSampleShading ) && + ( pSampleMask == rhs.pSampleMask ) && ( alphaToCoverageEnable == rhs.alphaToCoverageEnable ) && + ( alphaToOneEnable == rhs.alphaToOneEnable ); } - bool operator!=( PipelineMultisampleStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineMultisampleStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -38639,27 +44382,30 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineMultisampleStateCreateInfo; - const void* pNext = {}; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; - VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable = {}; - float minSampleShading = {}; - const VULKAN_HPP_NAMESPACE::SampleMask* pSampleMask = {}; - VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable = {}; - VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable = {}; + float minSampleShading = {}; + const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask = {}; + VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable = {}; }; - static_assert( sizeof( PipelineMultisampleStateCreateInfo ) == sizeof( VkPipelineMultisampleStateCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineMultisampleStateCreateInfo ) == sizeof( VkPipelineMultisampleStateCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct StencilOpState { - VULKAN_HPP_CONSTEXPR StencilOpState( VULKAN_HPP_NAMESPACE::StencilOp failOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, - VULKAN_HPP_NAMESPACE::StencilOp passOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, - VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, - VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, - uint32_t compareMask_ = {}, - uint32_t writeMask_ = {}, - uint32_t reference_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + StencilOpState( VULKAN_HPP_NAMESPACE::StencilOp failOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, + VULKAN_HPP_NAMESPACE::StencilOp passOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, + VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, + uint32_t compareMask_ = {}, + uint32_t writeMask_ = {}, + uint32_t reference_ = {} ) VULKAN_HPP_NOEXCEPT : failOp( failOp_ ) , passOp( passOp_ ) , depthFailOp( depthFailOp_ ) @@ -38674,9 +44420,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - StencilOpState& operator=( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT + StencilOpState & operator=( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -38722,60 +44468,57 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkStencilOpState const&() const VULKAN_HPP_NOEXCEPT + operator VkStencilOpState const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkStencilOpState &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( StencilOpState const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( StencilOpState const & ) const = default; #else - bool operator==( StencilOpState const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( StencilOpState const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( failOp == rhs.failOp ) - && ( passOp == rhs.passOp ) - && ( depthFailOp == rhs.depthFailOp ) - && ( compareOp == rhs.compareOp ) - && ( compareMask == rhs.compareMask ) - && ( writeMask == rhs.writeMask ) - && ( reference == rhs.reference ); + return ( failOp == rhs.failOp ) && ( passOp == rhs.passOp ) && ( depthFailOp == rhs.depthFailOp ) && + ( compareOp == rhs.compareOp ) && ( compareMask == rhs.compareMask ) && ( writeMask == rhs.writeMask ) && + ( reference == rhs.reference ); } - bool operator!=( StencilOpState const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( StencilOpState const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StencilOp failOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep; - VULKAN_HPP_NAMESPACE::StencilOp passOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep; + VULKAN_HPP_NAMESPACE::StencilOp failOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep; + VULKAN_HPP_NAMESPACE::StencilOp passOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep; VULKAN_HPP_NAMESPACE::StencilOp depthFailOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep; - VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever; - uint32_t compareMask = {}; - uint32_t writeMask = {}; - uint32_t reference = {}; + VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever; + uint32_t compareMask = {}; + uint32_t writeMask = {}; + uint32_t reference = {}; }; static_assert( sizeof( StencilOpState ) == sizeof( VkStencilOpState ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PipelineDepthStencilStateCreateInfo { - VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ = {}, - VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, - VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ = {}, - VULKAN_HPP_NAMESPACE::StencilOpState front_ = {}, - VULKAN_HPP_NAMESPACE::StencilOpState back_ = {}, - float minDepthBounds_ = {}, - float maxDepthBounds_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ = {}, + VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, + VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ = {}, + VULKAN_HPP_NAMESPACE::StencilOpState front_ = {}, + VULKAN_HPP_NAMESPACE::StencilOpState back_ = {}, + float minDepthBounds_ = {}, + float maxDepthBounds_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , depthTestEnable( depthTestEnable_ ) , depthWriteEnable( depthWriteEnable_ ) @@ -38788,9 +44531,12 @@ namespace VULKAN_HPP_NAMESPACE , maxDepthBounds( maxDepthBounds_ ) {} - PipelineDepthStencilStateCreateInfo & operator=( PipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineDepthStencilStateCreateInfo & + operator=( PipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineDepthStencilStateCreateInfo ) - offsetof( PipelineDepthStencilStateCreateInfo, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PipelineDepthStencilStateCreateInfo ) - offsetof( PipelineDepthStencilStateCreateInfo, pNext ) ); return *this; } @@ -38799,61 +44545,70 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PipelineDepthStencilStateCreateInfo& operator=( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineDepthStencilStateCreateInfo & + operator=( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineDepthStencilStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineDepthStencilStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineDepthStencilStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + PipelineDepthStencilStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - PipelineDepthStencilStateCreateInfo & setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ ) VULKAN_HPP_NOEXCEPT + PipelineDepthStencilStateCreateInfo & + setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ ) VULKAN_HPP_NOEXCEPT { depthTestEnable = depthTestEnable_; return *this; } - PipelineDepthStencilStateCreateInfo & setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ ) VULKAN_HPP_NOEXCEPT + PipelineDepthStencilStateCreateInfo & + setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ ) VULKAN_HPP_NOEXCEPT { depthWriteEnable = depthWriteEnable_; return *this; } - PipelineDepthStencilStateCreateInfo & setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ ) VULKAN_HPP_NOEXCEPT + PipelineDepthStencilStateCreateInfo & + setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ ) VULKAN_HPP_NOEXCEPT { depthCompareOp = depthCompareOp_; return *this; } - PipelineDepthStencilStateCreateInfo & setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ ) VULKAN_HPP_NOEXCEPT + PipelineDepthStencilStateCreateInfo & + setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ ) VULKAN_HPP_NOEXCEPT { depthBoundsTestEnable = depthBoundsTestEnable_; return *this; } - PipelineDepthStencilStateCreateInfo & setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ ) VULKAN_HPP_NOEXCEPT + PipelineDepthStencilStateCreateInfo & + setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ ) VULKAN_HPP_NOEXCEPT { stencilTestEnable = stencilTestEnable_; return *this; } - PipelineDepthStencilStateCreateInfo & setFront( VULKAN_HPP_NAMESPACE::StencilOpState const & front_ ) VULKAN_HPP_NOEXCEPT + PipelineDepthStencilStateCreateInfo & + setFront( VULKAN_HPP_NAMESPACE::StencilOpState const & front_ ) VULKAN_HPP_NOEXCEPT { front = front_; return *this; } - PipelineDepthStencilStateCreateInfo & setBack( VULKAN_HPP_NAMESPACE::StencilOpState const & back_ ) VULKAN_HPP_NOEXCEPT + PipelineDepthStencilStateCreateInfo & + setBack( VULKAN_HPP_NAMESPACE::StencilOpState const & back_ ) VULKAN_HPP_NOEXCEPT { back = back_; return *this; @@ -38871,36 +44626,29 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkPipelineDepthStencilStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineDepthStencilStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineDepthStencilStateCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineDepthStencilStateCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineDepthStencilStateCreateInfo const & ) const = default; #else - bool operator==( PipelineDepthStencilStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineDepthStencilStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( depthTestEnable == rhs.depthTestEnable ) - && ( depthWriteEnable == rhs.depthWriteEnable ) - && ( depthCompareOp == rhs.depthCompareOp ) - && ( depthBoundsTestEnable == rhs.depthBoundsTestEnable ) - && ( stencilTestEnable == rhs.stencilTestEnable ) - && ( front == rhs.front ) - && ( back == rhs.back ) - && ( minDepthBounds == rhs.minDepthBounds ) - && ( maxDepthBounds == rhs.maxDepthBounds ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( depthTestEnable == rhs.depthTestEnable ) && ( depthWriteEnable == rhs.depthWriteEnable ) && + ( depthCompareOp == rhs.depthCompareOp ) && ( depthBoundsTestEnable == rhs.depthBoundsTestEnable ) && + ( stencilTestEnable == rhs.stencilTestEnable ) && ( front == rhs.front ) && ( back == rhs.back ) && + ( minDepthBounds == rhs.minDepthBounds ) && ( maxDepthBounds == rhs.maxDepthBounds ); } - bool operator!=( PipelineDepthStencilStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineDepthStencilStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -38908,31 +44656,34 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDepthStencilStateCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable = {}; - VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable = {}; - VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever; - VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable = {}; - VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable = {}; - VULKAN_HPP_NAMESPACE::StencilOpState front = {}; - VULKAN_HPP_NAMESPACE::StencilOpState back = {}; - float minDepthBounds = {}; - float maxDepthBounds = {}; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable = {}; + VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever; + VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable = {}; + VULKAN_HPP_NAMESPACE::StencilOpState front = {}; + VULKAN_HPP_NAMESPACE::StencilOpState back = {}; + float minDepthBounds = {}; + float maxDepthBounds = {}; }; - static_assert( sizeof( PipelineDepthStencilStateCreateInfo ) == sizeof( VkPipelineDepthStencilStateCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineDepthStencilStateCreateInfo ) == sizeof( VkPipelineDepthStencilStateCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PipelineColorBlendAttachmentState { - VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState( VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ = {}, - VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, - VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, - VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, - VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, - VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, - VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, - VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState( + VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ = {}, + VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, + VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, + VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, + VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, + VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, + VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, + VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ = {} ) VULKAN_HPP_NOEXCEPT : blendEnable( blendEnable_ ) , srcColorBlendFactor( srcColorBlendFactor_ ) , dstColorBlendFactor( dstColorBlendFactor_ ) @@ -38948,9 +44699,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PipelineColorBlendAttachmentState& operator=( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineColorBlendAttachmentState & operator=( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -38960,100 +44711,106 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PipelineColorBlendAttachmentState & setSrcColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT + PipelineColorBlendAttachmentState & + setSrcColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT { srcColorBlendFactor = srcColorBlendFactor_; return *this; } - PipelineColorBlendAttachmentState & setDstColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT + PipelineColorBlendAttachmentState & + setDstColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT { dstColorBlendFactor = dstColorBlendFactor_; return *this; } - PipelineColorBlendAttachmentState & setColorBlendOp( VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ ) VULKAN_HPP_NOEXCEPT + PipelineColorBlendAttachmentState & + setColorBlendOp( VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ ) VULKAN_HPP_NOEXCEPT { colorBlendOp = colorBlendOp_; return *this; } - PipelineColorBlendAttachmentState & setSrcAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT + PipelineColorBlendAttachmentState & + setSrcAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT { srcAlphaBlendFactor = srcAlphaBlendFactor_; return *this; } - PipelineColorBlendAttachmentState & setDstAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT + PipelineColorBlendAttachmentState & + setDstAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT { dstAlphaBlendFactor = dstAlphaBlendFactor_; return *this; } - PipelineColorBlendAttachmentState & setAlphaBlendOp( VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ ) VULKAN_HPP_NOEXCEPT + PipelineColorBlendAttachmentState & + setAlphaBlendOp( VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ ) VULKAN_HPP_NOEXCEPT { alphaBlendOp = alphaBlendOp_; return *this; } - PipelineColorBlendAttachmentState & setColorWriteMask( VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ ) VULKAN_HPP_NOEXCEPT + PipelineColorBlendAttachmentState & + setColorWriteMask( VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ ) VULKAN_HPP_NOEXCEPT { colorWriteMask = colorWriteMask_; return *this; } - operator VkPipelineColorBlendAttachmentState const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineColorBlendAttachmentState const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineColorBlendAttachmentState &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineColorBlendAttachmentState const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineColorBlendAttachmentState const & ) const = default; #else - bool operator==( PipelineColorBlendAttachmentState const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineColorBlendAttachmentState const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( blendEnable == rhs.blendEnable ) - && ( srcColorBlendFactor == rhs.srcColorBlendFactor ) - && ( dstColorBlendFactor == rhs.dstColorBlendFactor ) - && ( colorBlendOp == rhs.colorBlendOp ) - && ( srcAlphaBlendFactor == rhs.srcAlphaBlendFactor ) - && ( dstAlphaBlendFactor == rhs.dstAlphaBlendFactor ) - && ( alphaBlendOp == rhs.alphaBlendOp ) - && ( colorWriteMask == rhs.colorWriteMask ); + return ( blendEnable == rhs.blendEnable ) && ( srcColorBlendFactor == rhs.srcColorBlendFactor ) && + ( dstColorBlendFactor == rhs.dstColorBlendFactor ) && ( colorBlendOp == rhs.colorBlendOp ) && + ( srcAlphaBlendFactor == rhs.srcAlphaBlendFactor ) && ( dstAlphaBlendFactor == rhs.dstAlphaBlendFactor ) && + ( alphaBlendOp == rhs.alphaBlendOp ) && ( colorWriteMask == rhs.colorWriteMask ); } - bool operator!=( PipelineColorBlendAttachmentState const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineColorBlendAttachmentState const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::Bool32 blendEnable = {}; - VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; - VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; - VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd; - VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; - VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; - VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd; - VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask = {}; + VULKAN_HPP_NAMESPACE::Bool32 blendEnable = {}; + VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; + VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; + VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd; + VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; + VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; + VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd; + VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask = {}; }; - static_assert( sizeof( PipelineColorBlendAttachmentState ) == sizeof( VkPipelineColorBlendAttachmentState ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineColorBlendAttachmentState ) == sizeof( VkPipelineColorBlendAttachmentState ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PipelineColorBlendStateCreateInfo { - VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ = {}, - VULKAN_HPP_NAMESPACE::LogicOp logicOp_ = VULKAN_HPP_NAMESPACE::LogicOp::eClear, - uint32_t attachmentCount_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState* pAttachments_ = {}, - std::array const& blendConstants_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ = {}, + VULKAN_HPP_NAMESPACE::LogicOp logicOp_ = VULKAN_HPP_NAMESPACE::LogicOp::eClear, + uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments_ = {}, + std::array const & blendConstants_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , logicOpEnable( logicOpEnable_ ) , logicOp( logicOp_ ) @@ -39064,7 +44821,9 @@ namespace VULKAN_HPP_NAMESPACE PipelineColorBlendStateCreateInfo & operator=( PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineColorBlendStateCreateInfo ) - offsetof( PipelineColorBlendStateCreateInfo, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PipelineColorBlendStateCreateInfo ) - offsetof( PipelineColorBlendStateCreateInfo, pNext ) ); return *this; } @@ -39073,25 +44832,27 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PipelineColorBlendStateCreateInfo& operator=( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineColorBlendStateCreateInfo & operator=( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineColorBlendStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineColorBlendStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineColorBlendStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + PipelineColorBlendStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - PipelineColorBlendStateCreateInfo & setLogicOpEnable( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ ) VULKAN_HPP_NOEXCEPT + PipelineColorBlendStateCreateInfo & + setLogicOpEnable( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ ) VULKAN_HPP_NOEXCEPT { logicOpEnable = logicOpEnable_; return *this; @@ -39109,67 +44870,67 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PipelineColorBlendStateCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState* pAttachments_ ) VULKAN_HPP_NOEXCEPT + PipelineColorBlendStateCreateInfo & setPAttachments( + const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments_ ) VULKAN_HPP_NOEXCEPT { pAttachments = pAttachments_; return *this; } - PipelineColorBlendStateCreateInfo & setBlendConstants( std::array blendConstants_ ) VULKAN_HPP_NOEXCEPT + PipelineColorBlendStateCreateInfo & setBlendConstants( std::array blendConstants_ ) VULKAN_HPP_NOEXCEPT { blendConstants = blendConstants_; return *this; } - operator VkPipelineColorBlendStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineColorBlendStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineColorBlendStateCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineColorBlendStateCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineColorBlendStateCreateInfo const & ) const = default; #else - bool operator==( PipelineColorBlendStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineColorBlendStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( logicOpEnable == rhs.logicOpEnable ) - && ( logicOp == rhs.logicOp ) - && ( attachmentCount == rhs.attachmentCount ) - && ( pAttachments == rhs.pAttachments ) - && ( blendConstants == rhs.blendConstants ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( logicOpEnable == rhs.logicOpEnable ) && ( logicOp == rhs.logicOp ) && + ( attachmentCount == rhs.attachmentCount ) && ( pAttachments == rhs.pAttachments ) && + ( blendConstants == rhs.blendConstants ); } - bool operator!=( PipelineColorBlendStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineColorBlendStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendStateCreateInfo; - const void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendStateCreateInfo; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable = {}; - VULKAN_HPP_NAMESPACE::LogicOp logicOp = VULKAN_HPP_NAMESPACE::LogicOp::eClear; - uint32_t attachmentCount = {}; - const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState* pAttachments = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D blendConstants = {}; + VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable = {}; + VULKAN_HPP_NAMESPACE::LogicOp logicOp = VULKAN_HPP_NAMESPACE::LogicOp::eClear; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D blendConstants = {}; }; - static_assert( sizeof( PipelineColorBlendStateCreateInfo ) == sizeof( VkPipelineColorBlendStateCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineColorBlendStateCreateInfo ) == sizeof( VkPipelineColorBlendStateCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PipelineDynamicStateCreateInfo { - VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ = {}, - uint32_t dynamicStateCount_ = {}, - const VULKAN_HPP_NAMESPACE::DynamicState* pDynamicStates_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ = {}, + uint32_t dynamicStateCount_ = {}, + const VULKAN_HPP_NAMESPACE::DynamicState * pDynamicStates_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , dynamicStateCount( dynamicStateCount_ ) , pDynamicStates( pDynamicStates_ ) @@ -39177,7 +44938,9 @@ namespace VULKAN_HPP_NAMESPACE PipelineDynamicStateCreateInfo & operator=( PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineDynamicStateCreateInfo ) - offsetof( PipelineDynamicStateCreateInfo, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PipelineDynamicStateCreateInfo ) - offsetof( PipelineDynamicStateCreateInfo, pNext ) ); return *this; } @@ -39186,19 +44949,20 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PipelineDynamicStateCreateInfo& operator=( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineDynamicStateCreateInfo & operator=( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineDynamicStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineDynamicStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineDynamicStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + PipelineDynamicStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; @@ -39210,69 +44974,70 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PipelineDynamicStateCreateInfo & setPDynamicStates( const VULKAN_HPP_NAMESPACE::DynamicState* pDynamicStates_ ) VULKAN_HPP_NOEXCEPT + PipelineDynamicStateCreateInfo & + setPDynamicStates( const VULKAN_HPP_NAMESPACE::DynamicState * pDynamicStates_ ) VULKAN_HPP_NOEXCEPT { pDynamicStates = pDynamicStates_; return *this; } - operator VkPipelineDynamicStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineDynamicStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineDynamicStateCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineDynamicStateCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineDynamicStateCreateInfo const & ) const = default; #else - bool operator==( PipelineDynamicStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineDynamicStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( dynamicStateCount == rhs.dynamicStateCount ) - && ( pDynamicStates == rhs.pDynamicStates ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( dynamicStateCount == rhs.dynamicStateCount ) && ( pDynamicStates == rhs.pDynamicStates ); } - bool operator!=( PipelineDynamicStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineDynamicStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDynamicStateCreateInfo; - const void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDynamicStateCreateInfo; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags = {}; - uint32_t dynamicStateCount = {}; - const VULKAN_HPP_NAMESPACE::DynamicState* pDynamicStates = {}; + uint32_t dynamicStateCount = {}; + const VULKAN_HPP_NAMESPACE::DynamicState * pDynamicStates = {}; }; - static_assert( sizeof( PipelineDynamicStateCreateInfo ) == sizeof( VkPipelineDynamicStateCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineDynamicStateCreateInfo ) == sizeof( VkPipelineDynamicStateCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct GraphicsPipelineCreateInfo { - VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, - uint32_t stageCount_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo* pViewportState_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo* pRasterizationState_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo* pMultisampleState_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo* pDepthStencilState_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo* pColorBlendState_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo* pDynamicState_ = {}, - VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, - VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, - uint32_t subpass_ = {}, - VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, - int32_t basePipelineIndex_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, + uint32_t stageCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, + uint32_t subpass_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , stageCount( stageCount_ ) , pStages( pStages_ ) @@ -39294,7 +45059,8 @@ namespace VULKAN_HPP_NAMESPACE GraphicsPipelineCreateInfo & operator=( GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( GraphicsPipelineCreateInfo ) - offsetof( GraphicsPipelineCreateInfo, pNext ) ); + memcpy( + &pNext, &rhs.pNext, sizeof( GraphicsPipelineCreateInfo ) - offsetof( GraphicsPipelineCreateInfo, pNext ) ); return *this; } @@ -39303,13 +45069,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - GraphicsPipelineCreateInfo& operator=( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + GraphicsPipelineCreateInfo & operator=( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - GraphicsPipelineCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + GraphicsPipelineCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -39327,61 +45093,71 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - GraphicsPipelineCreateInfo & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ ) VULKAN_HPP_NOEXCEPT + GraphicsPipelineCreateInfo & + setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT { pStages = pStages_; return *this; } - GraphicsPipelineCreateInfo & setPVertexInputState( const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState_ ) VULKAN_HPP_NOEXCEPT + GraphicsPipelineCreateInfo & setPVertexInputState( + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ ) VULKAN_HPP_NOEXCEPT { pVertexInputState = pVertexInputState_; return *this; } - GraphicsPipelineCreateInfo & setPInputAssemblyState( const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ ) VULKAN_HPP_NOEXCEPT + GraphicsPipelineCreateInfo & setPInputAssemblyState( + const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ ) VULKAN_HPP_NOEXCEPT { pInputAssemblyState = pInputAssemblyState_; return *this; } - GraphicsPipelineCreateInfo & setPTessellationState( const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState_ ) VULKAN_HPP_NOEXCEPT + GraphicsPipelineCreateInfo & setPTessellationState( + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ ) VULKAN_HPP_NOEXCEPT { pTessellationState = pTessellationState_; return *this; } - GraphicsPipelineCreateInfo & setPViewportState( const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo* pViewportState_ ) VULKAN_HPP_NOEXCEPT + GraphicsPipelineCreateInfo & setPViewportState( + const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState_ ) VULKAN_HPP_NOEXCEPT { pViewportState = pViewportState_; return *this; } - GraphicsPipelineCreateInfo & setPRasterizationState( const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo* pRasterizationState_ ) VULKAN_HPP_NOEXCEPT + GraphicsPipelineCreateInfo & setPRasterizationState( + const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState_ ) VULKAN_HPP_NOEXCEPT { pRasterizationState = pRasterizationState_; return *this; } - GraphicsPipelineCreateInfo & setPMultisampleState( const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo* pMultisampleState_ ) VULKAN_HPP_NOEXCEPT + GraphicsPipelineCreateInfo & setPMultisampleState( + const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState_ ) VULKAN_HPP_NOEXCEPT { pMultisampleState = pMultisampleState_; return *this; } - GraphicsPipelineCreateInfo & setPDepthStencilState( const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo* pDepthStencilState_ ) VULKAN_HPP_NOEXCEPT + GraphicsPipelineCreateInfo & setPDepthStencilState( + const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState_ ) VULKAN_HPP_NOEXCEPT { pDepthStencilState = pDepthStencilState_; return *this; } - GraphicsPipelineCreateInfo & setPColorBlendState( const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo* pColorBlendState_ ) VULKAN_HPP_NOEXCEPT + GraphicsPipelineCreateInfo & setPColorBlendState( + const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState_ ) VULKAN_HPP_NOEXCEPT { pColorBlendState = pColorBlendState_; return *this; } - GraphicsPipelineCreateInfo & setPDynamicState( const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo* pDynamicState_ ) VULKAN_HPP_NOEXCEPT + GraphicsPipelineCreateInfo & setPDynamicState( + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ ) VULKAN_HPP_NOEXCEPT { pDynamicState = pDynamicState_; return *this; @@ -39405,7 +45181,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - GraphicsPipelineCreateInfo & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT + GraphicsPipelineCreateInfo & + setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT { basePipelineHandle = basePipelineHandle_; return *this; @@ -39417,78 +45194,71 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkGraphicsPipelineCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkGraphicsPipelineCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkGraphicsPipelineCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( GraphicsPipelineCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GraphicsPipelineCreateInfo const & ) const = default; #else - bool operator==( GraphicsPipelineCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( GraphicsPipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( stageCount == rhs.stageCount ) - && ( pStages == rhs.pStages ) - && ( pVertexInputState == rhs.pVertexInputState ) - && ( pInputAssemblyState == rhs.pInputAssemblyState ) - && ( pTessellationState == rhs.pTessellationState ) - && ( pViewportState == rhs.pViewportState ) - && ( pRasterizationState == rhs.pRasterizationState ) - && ( pMultisampleState == rhs.pMultisampleState ) - && ( pDepthStencilState == rhs.pDepthStencilState ) - && ( pColorBlendState == rhs.pColorBlendState ) - && ( pDynamicState == rhs.pDynamicState ) - && ( layout == rhs.layout ) - && ( renderPass == rhs.renderPass ) - && ( subpass == rhs.subpass ) - && ( basePipelineHandle == rhs.basePipelineHandle ) - && ( basePipelineIndex == rhs.basePipelineIndex ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) && + ( pVertexInputState == rhs.pVertexInputState ) && ( pInputAssemblyState == rhs.pInputAssemblyState ) && + ( pTessellationState == rhs.pTessellationState ) && ( pViewportState == rhs.pViewportState ) && + ( pRasterizationState == rhs.pRasterizationState ) && ( pMultisampleState == rhs.pMultisampleState ) && + ( pDepthStencilState == rhs.pDepthStencilState ) && ( pColorBlendState == rhs.pColorBlendState ) && + ( pDynamicState == rhs.pDynamicState ) && ( layout == rhs.layout ) && ( renderPass == rhs.renderPass ) && + ( subpass == rhs.subpass ) && ( basePipelineHandle == rhs.basePipelineHandle ) && + ( basePipelineIndex == rhs.basePipelineIndex ); } - bool operator!=( GraphicsPipelineCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( GraphicsPipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; - uint32_t stageCount = {}; - const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages = {}; - const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState = {}; - const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo* pInputAssemblyState = {}; - const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState = {}; - const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo* pViewportState = {}; - const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo* pRasterizationState = {}; - const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo* pMultisampleState = {}; - const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo* pDepthStencilState = {}; - const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo* pColorBlendState = {}; - const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo* pDynamicState = {}; - VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; - VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; - uint32_t subpass = {}; - VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; - int32_t basePipelineIndex = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; + uint32_t stageCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {}; + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState = {}; + const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState = {}; + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState = {}; + const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState = {}; + const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState = {}; + const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState = {}; + const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState = {}; + const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState = {}; + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; + uint32_t subpass = {}; + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; + int32_t basePipelineIndex = {}; }; - static_assert( sizeof( GraphicsPipelineCreateInfo ) == sizeof( VkGraphicsPipelineCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( GraphicsPipelineCreateInfo ) == sizeof( VkGraphicsPipelineCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct GraphicsShaderGroupCreateInfoNV { - VULKAN_HPP_CONSTEXPR GraphicsShaderGroupCreateInfoNV( uint32_t stageCount_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR GraphicsShaderGroupCreateInfoNV( + uint32_t stageCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {} ) VULKAN_HPP_NOEXCEPT : stageCount( stageCount_ ) , pStages( pStages_ ) , pVertexInputState( pVertexInputState_ ) @@ -39497,7 +45267,9 @@ namespace VULKAN_HPP_NAMESPACE GraphicsShaderGroupCreateInfoNV & operator=( GraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( GraphicsShaderGroupCreateInfoNV ) - offsetof( GraphicsShaderGroupCreateInfoNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( GraphicsShaderGroupCreateInfoNV ) - offsetof( GraphicsShaderGroupCreateInfoNV, pNext ) ); return *this; } @@ -39506,13 +45278,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - GraphicsShaderGroupCreateInfoNV& operator=( VkGraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + GraphicsShaderGroupCreateInfoNV & operator=( VkGraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - GraphicsShaderGroupCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + GraphicsShaderGroupCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -39524,94 +45296,103 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - GraphicsShaderGroupCreateInfoNV & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ ) VULKAN_HPP_NOEXCEPT + GraphicsShaderGroupCreateInfoNV & + setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT { pStages = pStages_; return *this; } - GraphicsShaderGroupCreateInfoNV & setPVertexInputState( const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState_ ) VULKAN_HPP_NOEXCEPT + GraphicsShaderGroupCreateInfoNV & setPVertexInputState( + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ ) VULKAN_HPP_NOEXCEPT { pVertexInputState = pVertexInputState_; return *this; } - GraphicsShaderGroupCreateInfoNV & setPTessellationState( const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState_ ) VULKAN_HPP_NOEXCEPT + GraphicsShaderGroupCreateInfoNV & setPTessellationState( + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ ) VULKAN_HPP_NOEXCEPT { pTessellationState = pTessellationState_; return *this; } - operator VkGraphicsShaderGroupCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + operator VkGraphicsShaderGroupCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkGraphicsShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( GraphicsShaderGroupCreateInfoNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GraphicsShaderGroupCreateInfoNV const & ) const = default; #else - bool operator==( GraphicsShaderGroupCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( GraphicsShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( stageCount == rhs.stageCount ) - && ( pStages == rhs.pStages ) - && ( pVertexInputState == rhs.pVertexInputState ) - && ( pTessellationState == rhs.pTessellationState ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stageCount == rhs.stageCount ) && + ( pStages == rhs.pStages ) && ( pVertexInputState == rhs.pVertexInputState ) && + ( pTessellationState == rhs.pTessellationState ); } - bool operator!=( GraphicsShaderGroupCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( GraphicsShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsShaderGroupCreateInfoNV; - const void* pNext = {}; - uint32_t stageCount = {}; - const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages = {}; - const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState = {}; - const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsShaderGroupCreateInfoNV; + const void * pNext = {}; + uint32_t stageCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {}; + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState = {}; + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState = {}; }; - static_assert( sizeof( GraphicsShaderGroupCreateInfoNV ) == sizeof( VkGraphicsShaderGroupCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( GraphicsShaderGroupCreateInfoNV ) == sizeof( VkGraphicsShaderGroupCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct GraphicsPipelineShaderGroupsCreateInfoNV { - VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV( uint32_t groupCount_ = {}, - const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV* pGroups_ = {}, - uint32_t pipelineCount_ = {}, - const VULKAN_HPP_NAMESPACE::Pipeline* pPipelines_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV( + uint32_t groupCount_ = {}, + const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * pGroups_ = {}, + uint32_t pipelineCount_ = {}, + const VULKAN_HPP_NAMESPACE::Pipeline * pPipelines_ = {} ) VULKAN_HPP_NOEXCEPT : groupCount( groupCount_ ) , pGroups( pGroups_ ) , pipelineCount( pipelineCount_ ) , pPipelines( pPipelines_ ) {} - GraphicsPipelineShaderGroupsCreateInfoNV & operator=( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + GraphicsPipelineShaderGroupsCreateInfoNV & + operator=( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( GraphicsPipelineShaderGroupsCreateInfoNV ) - offsetof( GraphicsPipelineShaderGroupsCreateInfoNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( GraphicsPipelineShaderGroupsCreateInfoNV ) - + offsetof( GraphicsPipelineShaderGroupsCreateInfoNV, pNext ) ); return *this; } - GraphicsPipelineShaderGroupsCreateInfoNV( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + GraphicsPipelineShaderGroupsCreateInfoNV( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - GraphicsPipelineShaderGroupsCreateInfoNV& operator=( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + GraphicsPipelineShaderGroupsCreateInfoNV & + operator=( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - GraphicsPipelineShaderGroupsCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + GraphicsPipelineShaderGroupsCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -39623,7 +45404,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - GraphicsPipelineShaderGroupsCreateInfoNV & setPGroups( const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV* pGroups_ ) VULKAN_HPP_NOEXCEPT + GraphicsPipelineShaderGroupsCreateInfoNV & + setPGroups( const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * pGroups_ ) VULKAN_HPP_NOEXCEPT { pGroups = pGroups_; return *this; @@ -39635,56 +45417,55 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - GraphicsPipelineShaderGroupsCreateInfoNV & setPPipelines( const VULKAN_HPP_NAMESPACE::Pipeline* pPipelines_ ) VULKAN_HPP_NOEXCEPT + GraphicsPipelineShaderGroupsCreateInfoNV & + setPPipelines( const VULKAN_HPP_NAMESPACE::Pipeline * pPipelines_ ) VULKAN_HPP_NOEXCEPT { pPipelines = pPipelines_; return *this; } - operator VkGraphicsPipelineShaderGroupsCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + operator VkGraphicsPipelineShaderGroupsCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkGraphicsPipelineShaderGroupsCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( GraphicsPipelineShaderGroupsCreateInfoNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GraphicsPipelineShaderGroupsCreateInfoNV const & ) const = default; #else - bool operator==( GraphicsPipelineShaderGroupsCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( groupCount == rhs.groupCount ) - && ( pGroups == rhs.pGroups ) - && ( pipelineCount == rhs.pipelineCount ) - && ( pPipelines == rhs.pPipelines ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( groupCount == rhs.groupCount ) && + ( pGroups == rhs.pGroups ) && ( pipelineCount == rhs.pipelineCount ) && ( pPipelines == rhs.pPipelines ); } - bool operator!=( GraphicsPipelineShaderGroupsCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV; - const void* pNext = {}; - uint32_t groupCount = {}; - const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV* pGroups = {}; - uint32_t pipelineCount = {}; - const VULKAN_HPP_NAMESPACE::Pipeline* pPipelines = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV; + const void * pNext = {}; + uint32_t groupCount = {}; + const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * pGroups = {}; + uint32_t pipelineCount = {}; + const VULKAN_HPP_NAMESPACE::Pipeline * pPipelines = {}; }; - static_assert( sizeof( GraphicsPipelineShaderGroupsCreateInfoNV ) == sizeof( VkGraphicsPipelineShaderGroupsCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( GraphicsPipelineShaderGroupsCreateInfoNV ) == + sizeof( VkGraphicsPipelineShaderGroupsCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct XYColorEXT { - VULKAN_HPP_CONSTEXPR XYColorEXT( float x_ = {}, - float y_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR XYColorEXT( float x_ = {}, float y_ = {} ) VULKAN_HPP_NOEXCEPT : x( x_ ) , y( y_ ) {} @@ -39694,9 +45475,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - XYColorEXT& operator=( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT + XYColorEXT & operator=( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -39712,26 +45493,25 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkXYColorEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkXYColorEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkXYColorEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( XYColorEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( XYColorEXT const & ) const = default; #else - bool operator==( XYColorEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( XYColorEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( x == rhs.x ) - && ( y == rhs.y ); + return ( x == rhs.x ) && ( y == rhs.y ); } - bool operator!=( XYColorEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( XYColorEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -39746,13 +45526,13 @@ namespace VULKAN_HPP_NAMESPACE struct HdrMetadataEXT { - VULKAN_HPP_CONSTEXPR HdrMetadataEXT( VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed_ = {}, - VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen_ = {}, - VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue_ = {}, - VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint_ = {}, - float maxLuminance_ = {}, - float minLuminance_ = {}, - float maxContentLightLevel_ = {}, + VULKAN_HPP_CONSTEXPR HdrMetadataEXT( VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed_ = {}, + VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen_ = {}, + VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue_ = {}, + VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint_ = {}, + float maxLuminance_ = {}, + float minLuminance_ = {}, + float maxContentLightLevel_ = {}, float maxFrameAverageLightLevel_ = {} ) VULKAN_HPP_NOEXCEPT : displayPrimaryRed( displayPrimaryRed_ ) , displayPrimaryGreen( displayPrimaryGreen_ ) @@ -39775,31 +45555,34 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - HdrMetadataEXT& operator=( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + HdrMetadataEXT & operator=( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - HdrMetadataEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + HdrMetadataEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - HdrMetadataEXT & setDisplayPrimaryRed( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryRed_ ) VULKAN_HPP_NOEXCEPT + HdrMetadataEXT & + setDisplayPrimaryRed( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryRed_ ) VULKAN_HPP_NOEXCEPT { displayPrimaryRed = displayPrimaryRed_; return *this; } - HdrMetadataEXT & setDisplayPrimaryGreen( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryGreen_ ) VULKAN_HPP_NOEXCEPT + HdrMetadataEXT & + setDisplayPrimaryGreen( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryGreen_ ) VULKAN_HPP_NOEXCEPT { displayPrimaryGreen = displayPrimaryGreen_; return *this; } - HdrMetadataEXT & setDisplayPrimaryBlue( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryBlue_ ) VULKAN_HPP_NOEXCEPT + HdrMetadataEXT & + setDisplayPrimaryBlue( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryBlue_ ) VULKAN_HPP_NOEXCEPT { displayPrimaryBlue = displayPrimaryBlue_; return *this; @@ -39835,63 +45618,59 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkHdrMetadataEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkHdrMetadataEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkHdrMetadataEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( HdrMetadataEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( HdrMetadataEXT const & ) const = default; #else - bool operator==( HdrMetadataEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( HdrMetadataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( displayPrimaryRed == rhs.displayPrimaryRed ) - && ( displayPrimaryGreen == rhs.displayPrimaryGreen ) - && ( displayPrimaryBlue == rhs.displayPrimaryBlue ) - && ( whitePoint == rhs.whitePoint ) - && ( maxLuminance == rhs.maxLuminance ) - && ( minLuminance == rhs.minLuminance ) - && ( maxContentLightLevel == rhs.maxContentLightLevel ) - && ( maxFrameAverageLightLevel == rhs.maxFrameAverageLightLevel ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayPrimaryRed == rhs.displayPrimaryRed ) && + ( displayPrimaryGreen == rhs.displayPrimaryGreen ) && ( displayPrimaryBlue == rhs.displayPrimaryBlue ) && + ( whitePoint == rhs.whitePoint ) && ( maxLuminance == rhs.maxLuminance ) && + ( minLuminance == rhs.minLuminance ) && ( maxContentLightLevel == rhs.maxContentLightLevel ) && + ( maxFrameAverageLightLevel == rhs.maxFrameAverageLightLevel ); } - bool operator!=( HdrMetadataEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( HdrMetadataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHdrMetadataEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed = {}; - VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen = {}; - VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue = {}; - VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint = {}; - float maxLuminance = {}; - float minLuminance = {}; - float maxContentLightLevel = {}; - float maxFrameAverageLightLevel = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHdrMetadataEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed = {}; + VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen = {}; + VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue = {}; + VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint = {}; + float maxLuminance = {}; + float minLuminance = {}; + float maxContentLightLevel = {}; + float maxFrameAverageLightLevel = {}; }; static_assert( sizeof( HdrMetadataEXT ) == sizeof( VkHdrMetadataEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct HeadlessSurfaceCreateInfoEXT { - VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) + VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ = {} ) + VULKAN_HPP_NOEXCEPT : flags( flags_ ) {} HeadlessSurfaceCreateInfoEXT & operator=( HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( HeadlessSurfaceCreateInfoEXT ) - offsetof( HeadlessSurfaceCreateInfoEXT, pNext ) ); + memcpy( + &pNext, &rhs.pNext, sizeof( HeadlessSurfaceCreateInfoEXT ) - offsetof( HeadlessSurfaceCreateInfoEXT, pNext ) ); return *this; } @@ -39900,63 +45679,64 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - HeadlessSurfaceCreateInfoEXT& operator=( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + HeadlessSurfaceCreateInfoEXT & operator=( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - HeadlessSurfaceCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + HeadlessSurfaceCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - HeadlessSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + HeadlessSurfaceCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - operator VkHeadlessSurfaceCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkHeadlessSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkHeadlessSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( HeadlessSurfaceCreateInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( HeadlessSurfaceCreateInfoEXT const & ) const = default; #else - bool operator==( HeadlessSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( HeadlessSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); } - bool operator!=( HeadlessSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( HeadlessSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHeadlessSurfaceCreateInfoEXT; - const void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHeadlessSurfaceCreateInfoEXT; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags = {}; }; - static_assert( sizeof( HeadlessSurfaceCreateInfoEXT ) == sizeof( VkHeadlessSurfaceCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( HeadlessSurfaceCreateInfoEXT ) == sizeof( VkHeadlessSurfaceCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #ifdef VK_USE_PLATFORM_IOS_MVK struct IOSSurfaceCreateInfoMVK { VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ = {}, - const void* pView_ = {} ) VULKAN_HPP_NOEXCEPT + const void * pView_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , pView( pView_ ) {} @@ -39972,13 +45752,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - IOSSurfaceCreateInfoMVK& operator=( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT + IOSSurfaceCreateInfoMVK & operator=( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - IOSSurfaceCreateInfoMVK & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + IOSSurfaceCreateInfoMVK & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -39990,55 +45770,54 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - IOSSurfaceCreateInfoMVK & setPView( const void* pView_ ) VULKAN_HPP_NOEXCEPT + IOSSurfaceCreateInfoMVK & setPView( const void * pView_ ) VULKAN_HPP_NOEXCEPT { pView = pView_; return *this; } - operator VkIOSSurfaceCreateInfoMVK const&() const VULKAN_HPP_NOEXCEPT + operator VkIOSSurfaceCreateInfoMVK const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkIOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( IOSSurfaceCreateInfoMVK const& ) const = default; -#else - bool operator==( IOSSurfaceCreateInfoMVK const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IOSSurfaceCreateInfoMVK const & ) const = default; +# else + bool operator==( IOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( pView == rhs.pView ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pView == rhs.pView ); } - bool operator!=( IOSSurfaceCreateInfoMVK const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( IOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIosSurfaceCreateInfoMVK; - const void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIosSurfaceCreateInfoMVK; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags = {}; - const void* pView = {}; + const void * pView = {}; }; - static_assert( sizeof( IOSSurfaceCreateInfoMVK ) == sizeof( VkIOSSurfaceCreateInfoMVK ), "struct and wrapper have different size!" ); + static_assert( sizeof( IOSSurfaceCreateInfoMVK ) == sizeof( VkIOSSurfaceCreateInfoMVK ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); #endif /*VK_USE_PLATFORM_IOS_MVK*/ struct ImageBlit { - VULKAN_HPP_CONSTEXPR_14 ImageBlit( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, - std::array const& srcOffsets_ = {}, - VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, - std::array const& dstOffsets_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 + ImageBlit( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, + std::array const & srcOffsets_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, + std::array const & dstOffsets_ = {} ) VULKAN_HPP_NOEXCEPT : srcSubresource( srcSubresource_ ) , srcOffsets( srcOffsets_ ) , dstSubresource( dstSubresource_ ) @@ -40050,68 +45829,68 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ImageBlit& operator=( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT + ImageBlit & operator=( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ImageBlit & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT + ImageBlit & + setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT { srcSubresource = srcSubresource_; return *this; } - ImageBlit & setSrcOffsets( std::array const & srcOffsets_ ) VULKAN_HPP_NOEXCEPT + ImageBlit & setSrcOffsets( std::array const & srcOffsets_ ) VULKAN_HPP_NOEXCEPT { srcOffsets = srcOffsets_; return *this; } - ImageBlit & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT + ImageBlit & + setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT { dstSubresource = dstSubresource_; return *this; } - ImageBlit & setDstOffsets( std::array const & dstOffsets_ ) VULKAN_HPP_NOEXCEPT + ImageBlit & setDstOffsets( std::array const & dstOffsets_ ) VULKAN_HPP_NOEXCEPT { dstOffsets = dstOffsets_; return *this; } - operator VkImageBlit const&() const VULKAN_HPP_NOEXCEPT + operator VkImageBlit const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkImageBlit &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageBlit const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageBlit const & ) const = default; #else - bool operator==( ImageBlit const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageBlit const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( srcSubresource == rhs.srcSubresource ) - && ( srcOffsets == rhs.srcOffsets ) - && ( dstSubresource == rhs.dstSubresource ) - && ( dstOffsets == rhs.dstOffsets ); + return ( srcSubresource == rhs.srcSubresource ) && ( srcOffsets == rhs.srcOffsets ) && + ( dstSubresource == rhs.dstSubresource ) && ( dstOffsets == rhs.dstOffsets ); } - bool operator!=( ImageBlit const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageBlit const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D srcOffsets = {}; - VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D dstOffsets = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D srcOffsets = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D dstOffsets = {}; }; static_assert( sizeof( ImageBlit ) == sizeof( VkImageBlit ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); @@ -40119,10 +45898,10 @@ namespace VULKAN_HPP_NAMESPACE struct ImageCopy { VULKAN_HPP_CONSTEXPR ImageCopy( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, - VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, - VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, - VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT : srcSubresource( srcSubresource_ ) , srcOffset( srcOffset_ ) , dstSubresource( dstSubresource_ ) @@ -40135,13 +45914,14 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ImageCopy& operator=( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT + ImageCopy & operator=( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ImageCopy & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT + ImageCopy & + setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT { srcSubresource = srcSubresource_; return *this; @@ -40153,7 +45933,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - ImageCopy & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT + ImageCopy & + setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT { dstSubresource = dstSubresource_; return *this; @@ -40171,29 +45952,26 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkImageCopy const&() const VULKAN_HPP_NOEXCEPT + operator VkImageCopy const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkImageCopy &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageCopy const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageCopy const & ) const = default; #else - bool operator==( ImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( srcSubresource == rhs.srcSubresource ) - && ( srcOffset == rhs.srcOffset ) - && ( dstSubresource == rhs.dstSubresource ) - && ( dstOffset == rhs.dstOffset ) - && ( extent == rhs.extent ); + return ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) && + ( dstSubresource == rhs.dstSubresource ) && ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent ); } - bool operator!=( ImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -40201,29 +45979,31 @@ namespace VULKAN_HPP_NAMESPACE public: VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; - VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; + VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; - VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; - VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; }; static_assert( sizeof( ImageCopy ) == sizeof( VkImageCopy ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct ImageCreateInfo { - VULKAN_HPP_CONSTEXPR ImageCreateInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::ImageType imageType_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, - VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, - uint32_t mipLevels_ = {}, - uint32_t arrayLayers_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, - VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, - VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, - VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, - uint32_t queueFamilyIndexCount_ = {}, - const uint32_t* pQueueFamilyIndices_ = {}, - VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ImageCreateInfo( + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ImageType imageType_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, + uint32_t mipLevels_ = {}, + uint32_t arrayLayers_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, + VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, + uint32_t queueFamilyIndexCount_ = {}, + const uint32_t * pQueueFamilyIndices_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) + VULKAN_HPP_NOEXCEPT : flags( flags_ ) , imageType( imageType_ ) , format( format_ ) @@ -40250,13 +46030,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ImageCreateInfo& operator=( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + ImageCreateInfo & operator=( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ImageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ImageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -40328,7 +46108,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - ImageCreateInfo & setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + ImageCreateInfo & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT { pQueueFamilyIndices = pQueueFamilyIndices_; return *this; @@ -40340,69 +46120,60 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkImageCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkImageCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkImageCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageCreateInfo const & ) const = default; #else - bool operator==( ImageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( imageType == rhs.imageType ) - && ( format == rhs.format ) - && ( extent == rhs.extent ) - && ( mipLevels == rhs.mipLevels ) - && ( arrayLayers == rhs.arrayLayers ) - && ( samples == rhs.samples ) - && ( tiling == rhs.tiling ) - && ( usage == rhs.usage ) - && ( sharingMode == rhs.sharingMode ) - && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) - && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ) - && ( initialLayout == rhs.initialLayout ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( imageType == rhs.imageType ) && ( format == rhs.format ) && ( extent == rhs.extent ) && + ( mipLevels == rhs.mipLevels ) && ( arrayLayers == rhs.arrayLayers ) && ( samples == rhs.samples ) && + ( tiling == rhs.tiling ) && ( usage == rhs.usage ) && ( sharingMode == rhs.sharingMode ) && + ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && + ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ) && ( initialLayout == rhs.initialLayout ); } - bool operator!=( ImageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::ImageType imageType = VULKAN_HPP_NAMESPACE::ImageType::e1D; - VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; - VULKAN_HPP_NAMESPACE::Extent3D extent = {}; - uint32_t mipLevels = {}; - uint32_t arrayLayers = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; - VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal; - VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; - VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; - uint32_t queueFamilyIndexCount = {}; - const uint32_t* pQueueFamilyIndices = {}; - VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::ImageType imageType = VULKAN_HPP_NAMESPACE::ImageType::e1D; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + uint32_t mipLevels = {}; + uint32_t arrayLayers = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal; + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; + uint32_t queueFamilyIndexCount = {}; + const uint32_t * pQueueFamilyIndices = {}; + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; }; static_assert( sizeof( ImageCreateInfo ) == sizeof( VkImageCreateInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct SubresourceLayout { - VULKAN_HPP_CONSTEXPR SubresourceLayout( VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize rowPitch_ = {}, + VULKAN_HPP_CONSTEXPR SubresourceLayout( VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize rowPitch_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize depthPitch_ = {} ) VULKAN_HPP_NOEXCEPT : offset( offset_ ) @@ -40417,150 +46188,164 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SubresourceLayout& operator=( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT + SubresourceLayout & operator=( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkSubresourceLayout const&() const VULKAN_HPP_NOEXCEPT + operator VkSubresourceLayout const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSubresourceLayout &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SubresourceLayout const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubresourceLayout const & ) const = default; #else - bool operator==( SubresourceLayout const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SubresourceLayout const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( offset == rhs.offset ) - && ( size == rhs.size ) - && ( rowPitch == rhs.rowPitch ) - && ( arrayPitch == rhs.arrayPitch ) - && ( depthPitch == rhs.depthPitch ); + return ( offset == rhs.offset ) && ( size == rhs.size ) && ( rowPitch == rhs.rowPitch ) && + ( arrayPitch == rhs.arrayPitch ) && ( depthPitch == rhs.depthPitch ); } - bool operator!=( SubresourceLayout const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SubresourceLayout const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; - VULKAN_HPP_NAMESPACE::DeviceSize size = {}; - VULKAN_HPP_NAMESPACE::DeviceSize rowPitch = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::DeviceSize rowPitch = {}; VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch = {}; VULKAN_HPP_NAMESPACE::DeviceSize depthPitch = {}; }; - static_assert( sizeof( SubresourceLayout ) == sizeof( VkSubresourceLayout ), "struct and wrapper have different size!" ); + static_assert( sizeof( SubresourceLayout ) == sizeof( VkSubresourceLayout ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct ImageDrmFormatModifierExplicitCreateInfoEXT { - VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT( uint64_t drmFormatModifier_ = {}, - uint32_t drmFormatModifierPlaneCount_ = {}, - const VULKAN_HPP_NAMESPACE::SubresourceLayout* pPlaneLayouts_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT( + uint64_t drmFormatModifier_ = {}, + uint32_t drmFormatModifierPlaneCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts_ = {} ) VULKAN_HPP_NOEXCEPT : drmFormatModifier( drmFormatModifier_ ) , drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ ) , pPlaneLayouts( pPlaneLayouts_ ) {} - ImageDrmFormatModifierExplicitCreateInfoEXT & operator=( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ImageDrmFormatModifierExplicitCreateInfoEXT & + operator=( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( ImageDrmFormatModifierExplicitCreateInfoEXT ) - offsetof( ImageDrmFormatModifierExplicitCreateInfoEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( ImageDrmFormatModifierExplicitCreateInfoEXT ) - + offsetof( ImageDrmFormatModifierExplicitCreateInfoEXT, pNext ) ); return *this; } - ImageDrmFormatModifierExplicitCreateInfoEXT( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ImageDrmFormatModifierExplicitCreateInfoEXT( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - ImageDrmFormatModifierExplicitCreateInfoEXT& operator=( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ImageDrmFormatModifierExplicitCreateInfoEXT & + operator=( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ImageDrmFormatModifierExplicitCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ImageDrmFormatModifierExplicitCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - ImageDrmFormatModifierExplicitCreateInfoEXT & setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT + ImageDrmFormatModifierExplicitCreateInfoEXT & + setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT { drmFormatModifier = drmFormatModifier_; return *this; } - ImageDrmFormatModifierExplicitCreateInfoEXT & setDrmFormatModifierPlaneCount( uint32_t drmFormatModifierPlaneCount_ ) VULKAN_HPP_NOEXCEPT + ImageDrmFormatModifierExplicitCreateInfoEXT & + setDrmFormatModifierPlaneCount( uint32_t drmFormatModifierPlaneCount_ ) VULKAN_HPP_NOEXCEPT { drmFormatModifierPlaneCount = drmFormatModifierPlaneCount_; return *this; } - ImageDrmFormatModifierExplicitCreateInfoEXT & setPPlaneLayouts( const VULKAN_HPP_NAMESPACE::SubresourceLayout* pPlaneLayouts_ ) VULKAN_HPP_NOEXCEPT + ImageDrmFormatModifierExplicitCreateInfoEXT & + setPPlaneLayouts( const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts_ ) VULKAN_HPP_NOEXCEPT { pPlaneLayouts = pPlaneLayouts_; return *this; } - operator VkImageDrmFormatModifierExplicitCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkImageDrmFormatModifierExplicitCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkImageDrmFormatModifierExplicitCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageDrmFormatModifierExplicitCreateInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageDrmFormatModifierExplicitCreateInfoEXT const & ) const = default; #else - bool operator==( ImageDrmFormatModifierExplicitCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( drmFormatModifier == rhs.drmFormatModifier ) - && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) - && ( pPlaneLayouts == rhs.pPlaneLayouts ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifier == rhs.drmFormatModifier ) && + ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) && + ( pPlaneLayouts == rhs.pPlaneLayouts ); } - bool operator!=( ImageDrmFormatModifierExplicitCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT; - const void* pNext = {}; - uint64_t drmFormatModifier = {}; - uint32_t drmFormatModifierPlaneCount = {}; - const VULKAN_HPP_NAMESPACE::SubresourceLayout* pPlaneLayouts = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT; + const void * pNext = {}; + uint64_t drmFormatModifier = {}; + uint32_t drmFormatModifierPlaneCount = {}; + const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts = {}; }; - static_assert( sizeof( ImageDrmFormatModifierExplicitCreateInfoEXT ) == sizeof( VkImageDrmFormatModifierExplicitCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ImageDrmFormatModifierExplicitCreateInfoEXT ) == + sizeof( VkImageDrmFormatModifierExplicitCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct ImageDrmFormatModifierListCreateInfoEXT { - VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT( uint32_t drmFormatModifierCount_ = {}, - const uint64_t* pDrmFormatModifiers_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + ImageDrmFormatModifierListCreateInfoEXT( uint32_t drmFormatModifierCount_ = {}, + const uint64_t * pDrmFormatModifiers_ = {} ) VULKAN_HPP_NOEXCEPT : drmFormatModifierCount( drmFormatModifierCount_ ) , pDrmFormatModifiers( pDrmFormatModifiers_ ) {} - ImageDrmFormatModifierListCreateInfoEXT & operator=( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ImageDrmFormatModifierListCreateInfoEXT & + operator=( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( ImageDrmFormatModifierListCreateInfoEXT ) - offsetof( ImageDrmFormatModifierListCreateInfoEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( ImageDrmFormatModifierListCreateInfoEXT ) - + offsetof( ImageDrmFormatModifierListCreateInfoEXT, pNext ) ); return *this; } @@ -40569,52 +46354,54 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ImageDrmFormatModifierListCreateInfoEXT& operator=( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ImageDrmFormatModifierListCreateInfoEXT & + operator=( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ImageDrmFormatModifierListCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ImageDrmFormatModifierListCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - ImageDrmFormatModifierListCreateInfoEXT & setDrmFormatModifierCount( uint32_t drmFormatModifierCount_ ) VULKAN_HPP_NOEXCEPT + ImageDrmFormatModifierListCreateInfoEXT & + setDrmFormatModifierCount( uint32_t drmFormatModifierCount_ ) VULKAN_HPP_NOEXCEPT { drmFormatModifierCount = drmFormatModifierCount_; return *this; } - ImageDrmFormatModifierListCreateInfoEXT & setPDrmFormatModifiers( const uint64_t* pDrmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT + ImageDrmFormatModifierListCreateInfoEXT & + setPDrmFormatModifiers( const uint64_t * pDrmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT { pDrmFormatModifiers = pDrmFormatModifiers_; return *this; } - operator VkImageDrmFormatModifierListCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkImageDrmFormatModifierListCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkImageDrmFormatModifierListCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageDrmFormatModifierListCreateInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageDrmFormatModifierListCreateInfoEXT const & ) const = default; #else - bool operator==( ImageDrmFormatModifierListCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( drmFormatModifierCount == rhs.drmFormatModifierCount ) - && ( pDrmFormatModifiers == rhs.pDrmFormatModifiers ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( drmFormatModifierCount == rhs.drmFormatModifierCount ) && + ( pDrmFormatModifiers == rhs.pDrmFormatModifiers ); } - bool operator!=( ImageDrmFormatModifierListCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -40622,12 +46409,15 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierListCreateInfoEXT; - const void* pNext = {}; - uint32_t drmFormatModifierCount = {}; - const uint64_t* pDrmFormatModifiers = {}; + const void * pNext = {}; + uint32_t drmFormatModifierCount = {}; + const uint64_t * pDrmFormatModifiers = {}; }; - static_assert( sizeof( ImageDrmFormatModifierListCreateInfoEXT ) == sizeof( VkImageDrmFormatModifierListCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ImageDrmFormatModifierListCreateInfoEXT ) == + sizeof( VkImageDrmFormatModifierListCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct ImageDrmFormatModifierPropertiesEXT { @@ -40635,9 +46425,12 @@ namespace VULKAN_HPP_NAMESPACE : drmFormatModifier( drmFormatModifier_ ) {} - ImageDrmFormatModifierPropertiesEXT & operator=( ImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ImageDrmFormatModifierPropertiesEXT & + operator=( ImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( ImageDrmFormatModifierPropertiesEXT ) - offsetof( ImageDrmFormatModifierPropertiesEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( ImageDrmFormatModifierPropertiesEXT ) - offsetof( ImageDrmFormatModifierPropertiesEXT, pNext ) ); return *this; } @@ -40646,50 +46439,52 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ImageDrmFormatModifierPropertiesEXT& operator=( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ImageDrmFormatModifierPropertiesEXT & + operator=( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkImageDrmFormatModifierPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkImageDrmFormatModifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkImageDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageDrmFormatModifierPropertiesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageDrmFormatModifierPropertiesEXT const & ) const = default; #else - bool operator==( ImageDrmFormatModifierPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageDrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( drmFormatModifier == rhs.drmFormatModifier ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifier == rhs.drmFormatModifier ); } - bool operator!=( ImageDrmFormatModifierPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageDrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierPropertiesEXT; - void* pNext = {}; - uint64_t drmFormatModifier = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierPropertiesEXT; + void * pNext = {}; + uint64_t drmFormatModifier = {}; }; - static_assert( sizeof( ImageDrmFormatModifierPropertiesEXT ) == sizeof( VkImageDrmFormatModifierPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ImageDrmFormatModifierPropertiesEXT ) == sizeof( VkImageDrmFormatModifierPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct ImageFormatListCreateInfo { - VULKAN_HPP_CONSTEXPR ImageFormatListCreateInfo( uint32_t viewFormatCount_ = {}, - const VULKAN_HPP_NAMESPACE::Format* pViewFormats_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + ImageFormatListCreateInfo( uint32_t viewFormatCount_ = {}, + const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ = {} ) VULKAN_HPP_NOEXCEPT : viewFormatCount( viewFormatCount_ ) , pViewFormats( pViewFormats_ ) {} @@ -40705,13 +46500,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ImageFormatListCreateInfo& operator=( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + ImageFormatListCreateInfo & operator=( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ImageFormatListCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ImageFormatListCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -40723,51 +46518,53 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - ImageFormatListCreateInfo & setPViewFormats( const VULKAN_HPP_NAMESPACE::Format* pViewFormats_ ) VULKAN_HPP_NOEXCEPT + ImageFormatListCreateInfo & + setPViewFormats( const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ ) VULKAN_HPP_NOEXCEPT { pViewFormats = pViewFormats_; return *this; } - operator VkImageFormatListCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkImageFormatListCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkImageFormatListCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageFormatListCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageFormatListCreateInfo const & ) const = default; #else - bool operator==( ImageFormatListCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageFormatListCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( viewFormatCount == rhs.viewFormatCount ) - && ( pViewFormats == rhs.pViewFormats ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewFormatCount == rhs.viewFormatCount ) && + ( pViewFormats == rhs.pViewFormats ); } - bool operator!=( ImageFormatListCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageFormatListCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatListCreateInfo; - const void* pNext = {}; - uint32_t viewFormatCount = {}; - const VULKAN_HPP_NAMESPACE::Format* pViewFormats = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatListCreateInfo; + const void * pNext = {}; + uint32_t viewFormatCount = {}; + const VULKAN_HPP_NAMESPACE::Format * pViewFormats = {}; }; - static_assert( sizeof( ImageFormatListCreateInfo ) == sizeof( VkImageFormatListCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ImageFormatListCreateInfo ) == sizeof( VkImageFormatListCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct ImageFormatProperties2 { - VULKAN_HPP_CONSTEXPR ImageFormatProperties2( VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ImageFormatProperties2( + VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {} ) VULKAN_HPP_NOEXCEPT : imageFormatProperties( imageFormatProperties_ ) {} @@ -40782,52 +46579,51 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ImageFormatProperties2& operator=( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + ImageFormatProperties2 & operator=( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkImageFormatProperties2 const&() const VULKAN_HPP_NOEXCEPT + operator VkImageFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageFormatProperties2 const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageFormatProperties2 const & ) const = default; #else - bool operator==( ImageFormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( imageFormatProperties == rhs.imageFormatProperties ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageFormatProperties == rhs.imageFormatProperties ); } - bool operator!=( ImageFormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatProperties2; - void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatProperties2; + void * pNext = {}; VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {}; }; - static_assert( sizeof( ImageFormatProperties2 ) == sizeof( VkImageFormatProperties2 ), "struct and wrapper have different size!" ); + static_assert( sizeof( ImageFormatProperties2 ) == sizeof( VkImageFormatProperties2 ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct ImageSubresourceRange { - VULKAN_HPP_CONSTEXPR ImageSubresourceRange( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, - uint32_t baseMipLevel_ = {}, - uint32_t levelCount_ = {}, - uint32_t baseArrayLayer_ = {}, + VULKAN_HPP_CONSTEXPR ImageSubresourceRange( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, + uint32_t baseMipLevel_ = {}, + uint32_t levelCount_ = {}, + uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT : aspectMask( aspectMask_ ) , baseMipLevel( baseMipLevel_ ) @@ -40841,9 +46637,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ImageSubresourceRange& operator=( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT + ImageSubresourceRange & operator=( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -40877,54 +46673,54 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkImageSubresourceRange const&() const VULKAN_HPP_NOEXCEPT + operator VkImageSubresourceRange const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkImageSubresourceRange &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageSubresourceRange const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageSubresourceRange const & ) const = default; #else - bool operator==( ImageSubresourceRange const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageSubresourceRange const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( aspectMask == rhs.aspectMask ) - && ( baseMipLevel == rhs.baseMipLevel ) - && ( levelCount == rhs.levelCount ) - && ( baseArrayLayer == rhs.baseArrayLayer ) - && ( layerCount == rhs.layerCount ); + return ( aspectMask == rhs.aspectMask ) && ( baseMipLevel == rhs.baseMipLevel ) && + ( levelCount == rhs.levelCount ) && ( baseArrayLayer == rhs.baseArrayLayer ) && + ( layerCount == rhs.layerCount ); } - bool operator!=( ImageSubresourceRange const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageSubresourceRange const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; - uint32_t baseMipLevel = {}; - uint32_t levelCount = {}; - uint32_t baseArrayLayer = {}; - uint32_t layerCount = {}; + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + uint32_t baseMipLevel = {}; + uint32_t levelCount = {}; + uint32_t baseArrayLayer = {}; + uint32_t layerCount = {}; }; - static_assert( sizeof( ImageSubresourceRange ) == sizeof( VkImageSubresourceRange ), "struct and wrapper have different size!" ); + static_assert( sizeof( ImageSubresourceRange ) == sizeof( VkImageSubresourceRange ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct ImageMemoryBarrier { - VULKAN_HPP_CONSTEXPR ImageMemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, - VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, - VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, - VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, - uint32_t srcQueueFamilyIndex_ = {}, - uint32_t dstQueueFamilyIndex_ = {}, - VULKAN_HPP_NAMESPACE::Image image_ = {}, - VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + ImageMemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t srcQueueFamilyIndex_ = {}, + uint32_t dstQueueFamilyIndex_ = {}, + VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {} ) VULKAN_HPP_NOEXCEPT : srcAccessMask( srcAccessMask_ ) , dstAccessMask( dstAccessMask_ ) , oldLayout( oldLayout_ ) @@ -40946,13 +46742,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ImageMemoryBarrier& operator=( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT + ImageMemoryBarrier & operator=( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ImageMemoryBarrier & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ImageMemoryBarrier & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -41000,58 +46796,55 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - ImageMemoryBarrier & setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT + ImageMemoryBarrier & + setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT { subresourceRange = subresourceRange_; return *this; } - operator VkImageMemoryBarrier const&() const VULKAN_HPP_NOEXCEPT + operator VkImageMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkImageMemoryBarrier &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageMemoryBarrier const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageMemoryBarrier const & ) const = default; #else - bool operator==( ImageMemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( srcAccessMask == rhs.srcAccessMask ) - && ( dstAccessMask == rhs.dstAccessMask ) - && ( oldLayout == rhs.oldLayout ) - && ( newLayout == rhs.newLayout ) - && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) - && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) - && ( image == rhs.image ) - && ( subresourceRange == rhs.subresourceRange ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask == rhs.srcAccessMask ) && + ( dstAccessMask == rhs.dstAccessMask ) && ( oldLayout == rhs.oldLayout ) && + ( newLayout == rhs.newLayout ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) && + ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( image == rhs.image ) && + ( subresourceRange == rhs.subresourceRange ); } - bool operator!=( ImageMemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryBarrier; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; - VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; - VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; - VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; - uint32_t srcQueueFamilyIndex = {}; - uint32_t dstQueueFamilyIndex = {}; - VULKAN_HPP_NAMESPACE::Image image = {}; - VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryBarrier; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; + VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t srcQueueFamilyIndex = {}; + uint32_t dstQueueFamilyIndex = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {}; }; - static_assert( sizeof( ImageMemoryBarrier ) == sizeof( VkImageMemoryBarrier ), "struct and wrapper have different size!" ); + static_assert( sizeof( ImageMemoryBarrier ) == sizeof( VkImageMemoryBarrier ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct ImageMemoryRequirementsInfo2 @@ -41062,7 +46855,8 @@ namespace VULKAN_HPP_NAMESPACE ImageMemoryRequirementsInfo2 & operator=( ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( ImageMemoryRequirementsInfo2 ) - offsetof( ImageMemoryRequirementsInfo2, pNext ) ); + memcpy( + &pNext, &rhs.pNext, sizeof( ImageMemoryRequirementsInfo2 ) - offsetof( ImageMemoryRequirementsInfo2, pNext ) ); return *this; } @@ -41071,13 +46865,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ImageMemoryRequirementsInfo2& operator=( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + ImageMemoryRequirementsInfo2 & operator=( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ImageMemoryRequirementsInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ImageMemoryRequirementsInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -41089,27 +46883,25 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkImageMemoryRequirementsInfo2 const&() const VULKAN_HPP_NOEXCEPT + operator VkImageMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkImageMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageMemoryRequirementsInfo2 const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageMemoryRequirementsInfo2 const & ) const = default; #else - bool operator==( ImageMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( image == rhs.image ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ); } - bool operator!=( ImageMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -41117,24 +46909,29 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryRequirementsInfo2; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Image image = {}; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; }; - static_assert( sizeof( ImageMemoryRequirementsInfo2 ) == sizeof( VkImageMemoryRequirementsInfo2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ImageMemoryRequirementsInfo2 ) == sizeof( VkImageMemoryRequirementsInfo2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #ifdef VK_USE_PLATFORM_FUCHSIA struct ImagePipeSurfaceCreateInfoFUCHSIA { - VULKAN_HPP_CONSTEXPR ImagePipeSurfaceCreateInfoFUCHSIA( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ = {}, - zx_handle_t imagePipeHandle_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + ImagePipeSurfaceCreateInfoFUCHSIA( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ = {}, + zx_handle_t imagePipeHandle_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , imagePipeHandle( imagePipeHandle_ ) {} ImagePipeSurfaceCreateInfoFUCHSIA & operator=( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( ImagePipeSurfaceCreateInfoFUCHSIA ) - offsetof( ImagePipeSurfaceCreateInfoFUCHSIA, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( ImagePipeSurfaceCreateInfoFUCHSIA ) - offsetof( ImagePipeSurfaceCreateInfoFUCHSIA, pNext ) ); return *this; } @@ -41143,19 +46940,20 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ImagePipeSurfaceCreateInfoFUCHSIA& operator=( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + ImagePipeSurfaceCreateInfoFUCHSIA & operator=( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ImagePipeSurfaceCreateInfoFUCHSIA & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ImagePipeSurfaceCreateInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - ImagePipeSurfaceCreateInfoFUCHSIA & setFlags( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT + ImagePipeSurfaceCreateInfoFUCHSIA & + setFlags( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; @@ -41167,52 +46965,56 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkImagePipeSurfaceCreateInfoFUCHSIA const&() const VULKAN_HPP_NOEXCEPT + operator VkImagePipeSurfaceCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkImagePipeSurfaceCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImagePipeSurfaceCreateInfoFUCHSIA const& ) const = default; -#else - bool operator==( ImagePipeSurfaceCreateInfoFUCHSIA const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImagePipeSurfaceCreateInfoFUCHSIA const & ) const = default; +# else + bool operator==( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( imagePipeHandle == rhs.imagePipeHandle ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( imagePipeHandle == rhs.imagePipeHandle ); } - bool operator!=( ImagePipeSurfaceCreateInfoFUCHSIA const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA; - const void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags = {}; - zx_handle_t imagePipeHandle = {}; + zx_handle_t imagePipeHandle = {}; }; - static_assert( sizeof( ImagePipeSurfaceCreateInfoFUCHSIA ) == sizeof( VkImagePipeSurfaceCreateInfoFUCHSIA ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ImagePipeSurfaceCreateInfoFUCHSIA ) == sizeof( VkImagePipeSurfaceCreateInfoFUCHSIA ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_USE_PLATFORM_FUCHSIA*/ struct ImagePlaneMemoryRequirementsInfo { - VULKAN_HPP_CONSTEXPR ImagePlaneMemoryRequirementsInfo( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + ImagePlaneMemoryRequirementsInfo( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = + VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor ) VULKAN_HPP_NOEXCEPT : planeAspect( planeAspect_ ) {} ImagePlaneMemoryRequirementsInfo & operator=( ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( ImagePlaneMemoryRequirementsInfo ) - offsetof( ImagePlaneMemoryRequirementsInfo, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( ImagePlaneMemoryRequirementsInfo ) - offsetof( ImagePlaneMemoryRequirementsInfo, pNext ) ); return *this; } @@ -41221,65 +47023,66 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ImagePlaneMemoryRequirementsInfo& operator=( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + ImagePlaneMemoryRequirementsInfo & operator=( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ImagePlaneMemoryRequirementsInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ImagePlaneMemoryRequirementsInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - ImagePlaneMemoryRequirementsInfo & setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT + ImagePlaneMemoryRequirementsInfo & + setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT { planeAspect = planeAspect_; return *this; } - operator VkImagePlaneMemoryRequirementsInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkImagePlaneMemoryRequirementsInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkImagePlaneMemoryRequirementsInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImagePlaneMemoryRequirementsInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImagePlaneMemoryRequirementsInfo const & ) const = default; #else - bool operator==( ImagePlaneMemoryRequirementsInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImagePlaneMemoryRequirementsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( planeAspect == rhs.planeAspect ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( planeAspect == rhs.planeAspect ); } - bool operator!=( ImagePlaneMemoryRequirementsInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImagePlaneMemoryRequirementsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagePlaneMemoryRequirementsInfo; - const void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagePlaneMemoryRequirementsInfo; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor; }; - static_assert( sizeof( ImagePlaneMemoryRequirementsInfo ) == sizeof( VkImagePlaneMemoryRequirementsInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ImagePlaneMemoryRequirementsInfo ) == sizeof( VkImagePlaneMemoryRequirementsInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct ImageResolve { VULKAN_HPP_CONSTEXPR ImageResolve( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, - VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, - VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, - VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT : srcSubresource( srcSubresource_ ) , srcOffset( srcOffset_ ) , dstSubresource( dstSubresource_ ) @@ -41292,13 +47095,14 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ImageResolve& operator=( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT + ImageResolve & operator=( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ImageResolve & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT + ImageResolve & + setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT { srcSubresource = srcSubresource_; return *this; @@ -41310,7 +47114,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - ImageResolve & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT + ImageResolve & + setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT { dstSubresource = dstSubresource_; return *this; @@ -41328,29 +47133,26 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkImageResolve const&() const VULKAN_HPP_NOEXCEPT + operator VkImageResolve const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkImageResolve &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageResolve const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageResolve const & ) const = default; #else - bool operator==( ImageResolve const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageResolve const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( srcSubresource == rhs.srcSubresource ) - && ( srcOffset == rhs.srcOffset ) - && ( dstSubresource == rhs.dstSubresource ) - && ( dstOffset == rhs.dstOffset ) - && ( extent == rhs.extent ); + return ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) && + ( dstSubresource == rhs.dstSubresource ) && ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent ); } - bool operator!=( ImageResolve const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageResolve const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -41358,23 +47160,26 @@ namespace VULKAN_HPP_NAMESPACE public: VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; - VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; + VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; - VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; - VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; }; static_assert( sizeof( ImageResolve ) == sizeof( VkImageResolve ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct ImageSparseMemoryRequirementsInfo2 { - VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Image image_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + ImageSparseMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Image image_ = {} ) VULKAN_HPP_NOEXCEPT : image( image_ ) {} ImageSparseMemoryRequirementsInfo2 & operator=( ImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( ImageSparseMemoryRequirementsInfo2 ) - offsetof( ImageSparseMemoryRequirementsInfo2, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( ImageSparseMemoryRequirementsInfo2 ) - offsetof( ImageSparseMemoryRequirementsInfo2, pNext ) ); return *this; } @@ -41383,13 +47188,14 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ImageSparseMemoryRequirementsInfo2& operator=( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + ImageSparseMemoryRequirementsInfo2 & + operator=( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ImageSparseMemoryRequirementsInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ImageSparseMemoryRequirementsInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -41401,27 +47207,25 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkImageSparseMemoryRequirementsInfo2 const&() const VULKAN_HPP_NOEXCEPT + operator VkImageSparseMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkImageSparseMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageSparseMemoryRequirementsInfo2 const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageSparseMemoryRequirementsInfo2 const & ) const = default; #else - bool operator==( ImageSparseMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageSparseMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( image == rhs.image ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ); } - bool operator!=( ImageSparseMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageSparseMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -41429,21 +47233,25 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSparseMemoryRequirementsInfo2; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Image image = {}; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; }; - static_assert( sizeof( ImageSparseMemoryRequirementsInfo2 ) == sizeof( VkImageSparseMemoryRequirementsInfo2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ImageSparseMemoryRequirementsInfo2 ) == sizeof( VkImageSparseMemoryRequirementsInfo2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct ImageStencilUsageCreateInfo { - VULKAN_HPP_CONSTEXPR ImageStencilUsageCreateInfo( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + ImageStencilUsageCreateInfo( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ = {} ) VULKAN_HPP_NOEXCEPT : stencilUsage( stencilUsage_ ) {} ImageStencilUsageCreateInfo & operator=( ImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( ImageStencilUsageCreateInfo ) - offsetof( ImageStencilUsageCreateInfo, pNext ) ); + memcpy( + &pNext, &rhs.pNext, sizeof( ImageStencilUsageCreateInfo ) - offsetof( ImageStencilUsageCreateInfo, pNext ) ); return *this; } @@ -41452,67 +47260,70 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ImageStencilUsageCreateInfo& operator=( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + ImageStencilUsageCreateInfo & operator=( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ImageStencilUsageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ImageStencilUsageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - ImageStencilUsageCreateInfo & setStencilUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ ) VULKAN_HPP_NOEXCEPT + ImageStencilUsageCreateInfo & + setStencilUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ ) VULKAN_HPP_NOEXCEPT { stencilUsage = stencilUsage_; return *this; } - operator VkImageStencilUsageCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkImageStencilUsageCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkImageStencilUsageCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageStencilUsageCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageStencilUsageCreateInfo const & ) const = default; #else - bool operator==( ImageStencilUsageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageStencilUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( stencilUsage == rhs.stencilUsage ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stencilUsage == rhs.stencilUsage ); } - bool operator!=( ImageStencilUsageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageStencilUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageStencilUsageCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageStencilUsageCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage = {}; }; - static_assert( sizeof( ImageStencilUsageCreateInfo ) == sizeof( VkImageStencilUsageCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ImageStencilUsageCreateInfo ) == sizeof( VkImageStencilUsageCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct ImageSwapchainCreateInfoKHR { - VULKAN_HPP_CONSTEXPR ImageSwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + ImageSwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {} ) VULKAN_HPP_NOEXCEPT : swapchain( swapchain_ ) {} ImageSwapchainCreateInfoKHR & operator=( ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( ImageSwapchainCreateInfoKHR ) - offsetof( ImageSwapchainCreateInfoKHR, pNext ) ); + memcpy( + &pNext, &rhs.pNext, sizeof( ImageSwapchainCreateInfoKHR ) - offsetof( ImageSwapchainCreateInfoKHR, pNext ) ); return *this; } @@ -41521,13 +47332,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ImageSwapchainCreateInfoKHR& operator=( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + ImageSwapchainCreateInfoKHR & operator=( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ImageSwapchainCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ImageSwapchainCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -41539,49 +47350,51 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkImageSwapchainCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkImageSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkImageSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageSwapchainCreateInfoKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageSwapchainCreateInfoKHR const & ) const = default; #else - bool operator==( ImageSwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( swapchain == rhs.swapchain ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchain == rhs.swapchain ); } - bool operator!=( ImageSwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSwapchainCreateInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSwapchainCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {}; }; - static_assert( sizeof( ImageSwapchainCreateInfoKHR ) == sizeof( VkImageSwapchainCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ImageSwapchainCreateInfoKHR ) == sizeof( VkImageSwapchainCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct ImageViewASTCDecodeModeEXT { - VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT( VULKAN_HPP_NAMESPACE::Format decodeMode_ = VULKAN_HPP_NAMESPACE::Format::eUndefined ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT( + VULKAN_HPP_NAMESPACE::Format decodeMode_ = VULKAN_HPP_NAMESPACE::Format::eUndefined ) VULKAN_HPP_NOEXCEPT : decodeMode( decodeMode_ ) {} ImageViewASTCDecodeModeEXT & operator=( ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( ImageViewASTCDecodeModeEXT ) - offsetof( ImageViewASTCDecodeModeEXT, pNext ) ); + memcpy( + &pNext, &rhs.pNext, sizeof( ImageViewASTCDecodeModeEXT ) - offsetof( ImageViewASTCDecodeModeEXT, pNext ) ); return *this; } @@ -41590,13 +47403,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ImageViewASTCDecodeModeEXT& operator=( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ImageViewASTCDecodeModeEXT & operator=( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ImageViewASTCDecodeModeEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ImageViewASTCDecodeModeEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -41608,51 +47421,54 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkImageViewASTCDecodeModeEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkImageViewASTCDecodeModeEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkImageViewASTCDecodeModeEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageViewASTCDecodeModeEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageViewASTCDecodeModeEXT const & ) const = default; #else - bool operator==( ImageViewASTCDecodeModeEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageViewASTCDecodeModeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( decodeMode == rhs.decodeMode ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( decodeMode == rhs.decodeMode ); } - bool operator!=( ImageViewASTCDecodeModeEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageViewASTCDecodeModeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewAstcDecodeModeEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Format decodeMode = VULKAN_HPP_NAMESPACE::Format::eUndefined; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewAstcDecodeModeEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format decodeMode = VULKAN_HPP_NAMESPACE::Format::eUndefined; }; - static_assert( sizeof( ImageViewASTCDecodeModeEXT ) == sizeof( VkImageViewASTCDecodeModeEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ImageViewASTCDecodeModeEXT ) == sizeof( VkImageViewASTCDecodeModeEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct ImageViewAddressPropertiesNVX { - VULKAN_HPP_CONSTEXPR ImageViewAddressPropertiesNVX( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + ImageViewAddressPropertiesNVX( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT : deviceAddress( deviceAddress_ ) , size( size_ ) {} ImageViewAddressPropertiesNVX & operator=( ImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( ImageViewAddressPropertiesNVX ) - offsetof( ImageViewAddressPropertiesNVX, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( ImageViewAddressPropertiesNVX ) - offsetof( ImageViewAddressPropertiesNVX, pNext ) ); return *this; } @@ -41661,56 +47477,57 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ImageViewAddressPropertiesNVX& operator=( VkImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT + ImageViewAddressPropertiesNVX & operator=( VkImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkImageViewAddressPropertiesNVX const&() const VULKAN_HPP_NOEXCEPT + operator VkImageViewAddressPropertiesNVX const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkImageViewAddressPropertiesNVX &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageViewAddressPropertiesNVX const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageViewAddressPropertiesNVX const & ) const = default; #else - bool operator==( ImageViewAddressPropertiesNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageViewAddressPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( deviceAddress == rhs.deviceAddress ) - && ( size == rhs.size ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceAddress == rhs.deviceAddress ) && + ( size == rhs.size ); } - bool operator!=( ImageViewAddressPropertiesNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageViewAddressPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewAddressPropertiesNVX; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; - VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewAddressPropertiesNVX; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; }; - static_assert( sizeof( ImageViewAddressPropertiesNVX ) == sizeof( VkImageViewAddressPropertiesNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ImageViewAddressPropertiesNVX ) == sizeof( VkImageViewAddressPropertiesNVX ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct ImageViewCreateInfo { - VULKAN_HPP_CONSTEXPR ImageViewCreateInfo( VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::Image image_ = {}, - VULKAN_HPP_NAMESPACE::ImageViewType viewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D, - VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, - VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + ImageViewCreateInfo( VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::ImageViewType viewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , image( image_ ) , viewType( viewType_ ) @@ -41730,13 +47547,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ImageViewCreateInfo& operator=( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + ImageViewCreateInfo & operator=( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ImageViewCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ImageViewCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -41766,67 +47583,66 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - ImageViewCreateInfo & setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT + ImageViewCreateInfo & + setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT { components = components_; return *this; } - ImageViewCreateInfo & setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT + ImageViewCreateInfo & + setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT { subresourceRange = subresourceRange_; return *this; } - operator VkImageViewCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkImageViewCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkImageViewCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageViewCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageViewCreateInfo const & ) const = default; #else - bool operator==( ImageViewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( image == rhs.image ) - && ( viewType == rhs.viewType ) - && ( format == rhs.format ) - && ( components == rhs.components ) - && ( subresourceRange == rhs.subresourceRange ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( image == rhs.image ) && + ( viewType == rhs.viewType ) && ( format == rhs.format ) && ( components == rhs.components ) && + ( subresourceRange == rhs.subresourceRange ); } - bool operator!=( ImageViewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::Image image = {}; - VULKAN_HPP_NAMESPACE::ImageViewType viewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D; - VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; - VULKAN_HPP_NAMESPACE::ComponentMapping components = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::ImageViewType viewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ComponentMapping components = {}; VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {}; }; - static_assert( sizeof( ImageViewCreateInfo ) == sizeof( VkImageViewCreateInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( ImageViewCreateInfo ) == sizeof( VkImageViewCreateInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct ImageViewHandleInfoNVX { - VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX( VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, - VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, - VULKAN_HPP_NAMESPACE::Sampler sampler_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX( + VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, + VULKAN_HPP_NAMESPACE::Sampler sampler_ = {} ) VULKAN_HPP_NOEXCEPT : imageView( imageView_ ) , descriptorType( descriptorType_ ) , sampler( sampler_ ) @@ -41843,13 +47659,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ImageViewHandleInfoNVX& operator=( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + ImageViewHandleInfoNVX & operator=( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ImageViewHandleInfoNVX & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ImageViewHandleInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -41861,7 +47677,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - ImageViewHandleInfoNVX & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT + ImageViewHandleInfoNVX & + setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT { descriptorType = descriptorType_; return *this; @@ -41873,47 +47690,46 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkImageViewHandleInfoNVX const&() const VULKAN_HPP_NOEXCEPT + operator VkImageViewHandleInfoNVX const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkImageViewHandleInfoNVX &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageViewHandleInfoNVX const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageViewHandleInfoNVX const & ) const = default; #else - bool operator==( ImageViewHandleInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageViewHandleInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( imageView == rhs.imageView ) - && ( descriptorType == rhs.descriptorType ) - && ( sampler == rhs.sampler ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageView == rhs.imageView ) && + ( descriptorType == rhs.descriptorType ) && ( sampler == rhs.sampler ); } - bool operator!=( ImageViewHandleInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageViewHandleInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewHandleInfoNVX; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ImageView imageView = {}; - VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; - VULKAN_HPP_NAMESPACE::Sampler sampler = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewHandleInfoNVX; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageView imageView = {}; + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; + VULKAN_HPP_NAMESPACE::Sampler sampler = {}; }; - static_assert( sizeof( ImageViewHandleInfoNVX ) == sizeof( VkImageViewHandleInfoNVX ), "struct and wrapper have different size!" ); + static_assert( sizeof( ImageViewHandleInfoNVX ) == sizeof( VkImageViewHandleInfoNVX ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct ImageViewUsageCreateInfo { - VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + ImageViewUsageCreateInfo( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {} ) VULKAN_HPP_NOEXCEPT : usage( usage_ ) {} @@ -41928,13 +47744,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ImageViewUsageCreateInfo& operator=( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + ImageViewUsageCreateInfo & operator=( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ImageViewUsageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ImageViewUsageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -41946,27 +47762,25 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkImageViewUsageCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkImageViewUsageCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkImageViewUsageCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageViewUsageCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageViewUsageCreateInfo const & ) const = default; #else - bool operator==( ImageViewUsageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageViewUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( usage == rhs.usage ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( usage == rhs.usage ); } - bool operator!=( ImageViewUsageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageViewUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -41974,22 +47788,28 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewUsageCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; }; - static_assert( sizeof( ImageViewUsageCreateInfo ) == sizeof( VkImageViewUsageCreateInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( ImageViewUsageCreateInfo ) == sizeof( VkImageViewUsageCreateInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); #ifdef VK_USE_PLATFORM_ANDROID_KHR struct ImportAndroidHardwareBufferInfoANDROID { - VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID( struct AHardwareBuffer* buffer_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + ImportAndroidHardwareBufferInfoANDROID( struct AHardwareBuffer * buffer_ = {} ) VULKAN_HPP_NOEXCEPT : buffer( buffer_ ) {} - ImportAndroidHardwareBufferInfoANDROID & operator=( ImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + ImportAndroidHardwareBufferInfoANDROID & + operator=( ImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( ImportAndroidHardwareBufferInfoANDROID ) - offsetof( ImportAndroidHardwareBufferInfoANDROID, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( ImportAndroidHardwareBufferInfoANDROID ) - + offsetof( ImportAndroidHardwareBufferInfoANDROID, pNext ) ); return *this; } @@ -41998,64 +47818,66 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ImportAndroidHardwareBufferInfoANDROID& operator=( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + ImportAndroidHardwareBufferInfoANDROID & + operator=( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ImportAndroidHardwareBufferInfoANDROID & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ImportAndroidHardwareBufferInfoANDROID & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - ImportAndroidHardwareBufferInfoANDROID & setBuffer( struct AHardwareBuffer* buffer_ ) VULKAN_HPP_NOEXCEPT + ImportAndroidHardwareBufferInfoANDROID & setBuffer( struct AHardwareBuffer * buffer_ ) VULKAN_HPP_NOEXCEPT { buffer = buffer_; return *this; } - operator VkImportAndroidHardwareBufferInfoANDROID const&() const VULKAN_HPP_NOEXCEPT + operator VkImportAndroidHardwareBufferInfoANDROID const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkImportAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImportAndroidHardwareBufferInfoANDROID const& ) const = default; -#else - bool operator==( ImportAndroidHardwareBufferInfoANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportAndroidHardwareBufferInfoANDROID const & ) const = default; +# else + bool operator==( ImportAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( buffer == rhs.buffer ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ); } - bool operator!=( ImportAndroidHardwareBufferInfoANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImportAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportAndroidHardwareBufferInfoANDROID; - const void* pNext = {}; - struct AHardwareBuffer* buffer = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportAndroidHardwareBufferInfoANDROID; + const void * pNext = {}; + struct AHardwareBuffer * buffer = {}; }; - static_assert( sizeof( ImportAndroidHardwareBufferInfoANDROID ) == sizeof( VkImportAndroidHardwareBufferInfoANDROID ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ImportAndroidHardwareBufferInfoANDROID ) == sizeof( VkImportAndroidHardwareBufferInfoANDROID ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ struct ImportFenceFdInfoKHR { - VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {}, - VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, + VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {}, + VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, int fd_ = {} ) VULKAN_HPP_NOEXCEPT : fence( fence_ ) , flags( flags_ ) @@ -42074,13 +47896,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ImportFenceFdInfoKHR& operator=( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + ImportFenceFdInfoKHR & operator=( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ImportFenceFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ImportFenceFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -42098,7 +47920,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - ImportFenceFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + ImportFenceFdInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; @@ -42110,54 +47933,54 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkImportFenceFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkImportFenceFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkImportFenceFdInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImportFenceFdInfoKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportFenceFdInfoKHR const & ) const = default; #else - bool operator==( ImportFenceFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImportFenceFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( fence == rhs.fence ) - && ( flags == rhs.flags ) - && ( handleType == rhs.handleType ) - && ( fd == rhs.fd ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && ( flags == rhs.flags ) && + ( handleType == rhs.handleType ) && ( fd == rhs.fd ); } - bool operator!=( ImportFenceFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImportFenceFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceFdInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Fence fence = {}; - VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {}; - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceFdInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Fence fence = {}; + VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; int fd = {}; }; - static_assert( sizeof( ImportFenceFdInfoKHR ) == sizeof( VkImportFenceFdInfoKHR ), "struct and wrapper have different size!" ); + static_assert( sizeof( ImportFenceFdInfoKHR ) == sizeof( VkImportFenceFdInfoKHR ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); #ifdef VK_USE_PLATFORM_WIN32_KHR struct ImportFenceWin32HandleInfoKHR { - VULKAN_HPP_CONSTEXPR ImportFenceWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {}, - VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, - HANDLE handle_ = {}, - LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + ImportFenceWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {}, + VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, + HANDLE handle_ = {}, + LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT : fence( fence_ ) , flags( flags_ ) , handleType( handleType_ ) @@ -42167,7 +47990,9 @@ namespace VULKAN_HPP_NAMESPACE ImportFenceWin32HandleInfoKHR & operator=( ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( ImportFenceWin32HandleInfoKHR ) - offsetof( ImportFenceWin32HandleInfoKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( ImportFenceWin32HandleInfoKHR ) - offsetof( ImportFenceWin32HandleInfoKHR, pNext ) ); return *this; } @@ -42176,13 +48001,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ImportFenceWin32HandleInfoKHR& operator=( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + ImportFenceWin32HandleInfoKHR & operator=( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ImportFenceWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ImportFenceWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -42200,7 +48025,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - ImportFenceWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + ImportFenceWin32HandleInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; @@ -42218,52 +48044,51 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkImportFenceWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkImportFenceWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkImportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImportFenceWin32HandleInfoKHR const& ) const = default; -#else - bool operator==( ImportFenceWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportFenceWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( ImportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( fence == rhs.fence ) - && ( flags == rhs.flags ) - && ( handleType == rhs.handleType ) - && ( handle == rhs.handle ) - && ( name == rhs.name ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && ( flags == rhs.flags ) && + ( handleType == rhs.handleType ) && ( handle == rhs.handle ) && ( name == rhs.name ); } - bool operator!=( ImportFenceWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceWin32HandleInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Fence fence = {}; - VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {}; - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; - HANDLE handle = {}; - LPCWSTR name = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceWin32HandleInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Fence fence = {}; + VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + HANDLE handle = {}; + LPCWSTR name = {}; }; - static_assert( sizeof( ImportFenceWin32HandleInfoKHR ) == sizeof( VkImportFenceWin32HandleInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ImportFenceWin32HandleInfoKHR ) == sizeof( VkImportFenceWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ struct ImportMemoryFdInfoKHR { - VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, int fd_ = {} ) VULKAN_HPP_NOEXCEPT : handleType( handleType_ ) , fd( fd_ ) @@ -42280,19 +48105,20 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ImportMemoryFdInfoKHR& operator=( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + ImportMemoryFdInfoKHR & operator=( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ImportMemoryFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ImportMemoryFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - ImportMemoryFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + ImportMemoryFdInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; @@ -42304,53 +48130,56 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkImportMemoryFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkImportMemoryFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkImportMemoryFdInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImportMemoryFdInfoKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMemoryFdInfoKHR const & ) const = default; #else - bool operator==( ImportMemoryFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImportMemoryFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleType == rhs.handleType ) - && ( fd == rhs.fd ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && ( fd == rhs.fd ); } - bool operator!=( ImportMemoryFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImportMemoryFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryFdInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryFdInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; int fd = {}; }; - static_assert( sizeof( ImportMemoryFdInfoKHR ) == sizeof( VkImportMemoryFdInfoKHR ), "struct and wrapper have different size!" ); + static_assert( sizeof( ImportMemoryFdInfoKHR ) == sizeof( VkImportMemoryFdInfoKHR ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct ImportMemoryHostPointerInfoEXT { - VULKAN_HPP_CONSTEXPR ImportMemoryHostPointerInfoEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, - void* pHostPointer_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + ImportMemoryHostPointerInfoEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + void * pHostPointer_ = {} ) VULKAN_HPP_NOEXCEPT : handleType( handleType_ ) , pHostPointer( pHostPointer_ ) {} ImportMemoryHostPointerInfoEXT & operator=( ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( ImportMemoryHostPointerInfoEXT ) - offsetof( ImportMemoryHostPointerInfoEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( ImportMemoryHostPointerInfoEXT ) - offsetof( ImportMemoryHostPointerInfoEXT, pNext ) ); return *this; } @@ -42359,72 +48188,76 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ImportMemoryHostPointerInfoEXT& operator=( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ImportMemoryHostPointerInfoEXT & operator=( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ImportMemoryHostPointerInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ImportMemoryHostPointerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - ImportMemoryHostPointerInfoEXT & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + ImportMemoryHostPointerInfoEXT & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } - ImportMemoryHostPointerInfoEXT & setPHostPointer( void* pHostPointer_ ) VULKAN_HPP_NOEXCEPT + ImportMemoryHostPointerInfoEXT & setPHostPointer( void * pHostPointer_ ) VULKAN_HPP_NOEXCEPT { pHostPointer = pHostPointer_; return *this; } - operator VkImportMemoryHostPointerInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkImportMemoryHostPointerInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkImportMemoryHostPointerInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImportMemoryHostPointerInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMemoryHostPointerInfoEXT const & ) const = default; #else - bool operator==( ImportMemoryHostPointerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImportMemoryHostPointerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleType == rhs.handleType ) - && ( pHostPointer == rhs.pHostPointer ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && + ( pHostPointer == rhs.pHostPointer ); } - bool operator!=( ImportMemoryHostPointerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImportMemoryHostPointerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryHostPointerInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; - void* pHostPointer = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryHostPointerInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + void * pHostPointer = {}; }; - static_assert( sizeof( ImportMemoryHostPointerInfoEXT ) == sizeof( VkImportMemoryHostPointerInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ImportMemoryHostPointerInfoEXT ) == sizeof( VkImportMemoryHostPointerInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #ifdef VK_USE_PLATFORM_WIN32_KHR struct ImportMemoryWin32HandleInfoKHR { - VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, - HANDLE handle_ = {}, - LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + ImportMemoryWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + HANDLE handle_ = {}, + LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT : handleType( handleType_ ) , handle( handle_ ) , name( name_ ) @@ -42432,7 +48265,9 @@ namespace VULKAN_HPP_NAMESPACE ImportMemoryWin32HandleInfoKHR & operator=( ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( ImportMemoryWin32HandleInfoKHR ) - offsetof( ImportMemoryWin32HandleInfoKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( ImportMemoryWin32HandleInfoKHR ) - offsetof( ImportMemoryWin32HandleInfoKHR, pNext ) ); return *this; } @@ -42441,19 +48276,20 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ImportMemoryWin32HandleInfoKHR& operator=( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + ImportMemoryWin32HandleInfoKHR & operator=( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ImportMemoryWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ImportMemoryWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - ImportMemoryWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + ImportMemoryWin32HandleInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; @@ -42471,57 +48307,60 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkImportMemoryWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkImportMemoryWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkImportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImportMemoryWin32HandleInfoKHR const& ) const = default; -#else - bool operator==( ImportMemoryWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMemoryWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( ImportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleType == rhs.handleType ) - && ( handle == rhs.handle ) - && ( name == rhs.name ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && + ( handle == rhs.handle ) && ( name == rhs.name ); } - bool operator!=( ImportMemoryWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; - HANDLE handle = {}; - LPCWSTR name = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + HANDLE handle = {}; + LPCWSTR name = {}; }; - static_assert( sizeof( ImportMemoryWin32HandleInfoKHR ) == sizeof( VkImportMemoryWin32HandleInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ImportMemoryWin32HandleInfoKHR ) == sizeof( VkImportMemoryWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #ifdef VK_USE_PLATFORM_WIN32_KHR struct ImportMemoryWin32HandleInfoNV { - VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ = {}, - HANDLE handle_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + ImportMemoryWin32HandleInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ = {}, + HANDLE handle_ = {} ) VULKAN_HPP_NOEXCEPT : handleType( handleType_ ) , handle( handle_ ) {} ImportMemoryWin32HandleInfoNV & operator=( ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( ImportMemoryWin32HandleInfoNV ) - offsetof( ImportMemoryWin32HandleInfoNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( ImportMemoryWin32HandleInfoNV ) - offsetof( ImportMemoryWin32HandleInfoNV, pNext ) ); return *this; } @@ -42530,19 +48369,20 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ImportMemoryWin32HandleInfoNV& operator=( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + ImportMemoryWin32HandleInfoNV & operator=( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ImportMemoryWin32HandleInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ImportMemoryWin32HandleInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - ImportMemoryWin32HandleInfoNV & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ ) VULKAN_HPP_NOEXCEPT + ImportMemoryWin32HandleInfoNV & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ ) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; @@ -42554,49 +48394,51 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkImportMemoryWin32HandleInfoNV const&() const VULKAN_HPP_NOEXCEPT + operator VkImportMemoryWin32HandleInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkImportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImportMemoryWin32HandleInfoNV const& ) const = default; -#else - bool operator==( ImportMemoryWin32HandleInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMemoryWin32HandleInfoNV const & ) const = default; +# else + bool operator==( ImportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleType == rhs.handleType ) - && ( handle == rhs.handle ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && + ( handle == rhs.handle ); } - bool operator!=( ImportMemoryWin32HandleInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoNV; - const void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoNV; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType = {}; - HANDLE handle = {}; + HANDLE handle = {}; }; - static_assert( sizeof( ImportMemoryWin32HandleInfoNV ) == sizeof( VkImportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ImportMemoryWin32HandleInfoNV ) == sizeof( VkImportMemoryWin32HandleInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ struct ImportSemaphoreFdInfoKHR { - VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, - VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, - int fd_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + ImportSemaphoreFdInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, + int fd_ = {} ) VULKAN_HPP_NOEXCEPT : semaphore( semaphore_ ) , flags( flags_ ) , handleType( handleType_ ) @@ -42614,13 +48456,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ImportSemaphoreFdInfoKHR& operator=( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + ImportSemaphoreFdInfoKHR & operator=( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ImportSemaphoreFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ImportSemaphoreFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -42638,7 +48480,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - ImportSemaphoreFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + ImportSemaphoreFdInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; @@ -42650,54 +48493,54 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkImportSemaphoreFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkImportSemaphoreFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkImportSemaphoreFdInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImportSemaphoreFdInfoKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportSemaphoreFdInfoKHR const & ) const = default; #else - bool operator==( ImportSemaphoreFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImportSemaphoreFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( semaphore == rhs.semaphore ) - && ( flags == rhs.flags ) - && ( handleType == rhs.handleType ) - && ( fd == rhs.fd ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && + ( flags == rhs.flags ) && ( handleType == rhs.handleType ) && ( fd == rhs.fd ); } - bool operator!=( ImportSemaphoreFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImportSemaphoreFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreFdInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; - VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {}; - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreFdInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; int fd = {}; }; - static_assert( sizeof( ImportSemaphoreFdInfoKHR ) == sizeof( VkImportSemaphoreFdInfoKHR ), "struct and wrapper have different size!" ); + static_assert( sizeof( ImportSemaphoreFdInfoKHR ) == sizeof( VkImportSemaphoreFdInfoKHR ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); #ifdef VK_USE_PLATFORM_WIN32_KHR struct ImportSemaphoreWin32HandleInfoKHR { - VULKAN_HPP_CONSTEXPR ImportSemaphoreWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, - VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, - HANDLE handle_ = {}, - LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + ImportSemaphoreWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, + HANDLE handle_ = {}, + LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT : semaphore( semaphore_ ) , flags( flags_ ) , handleType( handleType_ ) @@ -42707,7 +48550,9 @@ namespace VULKAN_HPP_NAMESPACE ImportSemaphoreWin32HandleInfoKHR & operator=( ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( ImportSemaphoreWin32HandleInfoKHR ) - offsetof( ImportSemaphoreWin32HandleInfoKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( ImportSemaphoreWin32HandleInfoKHR ) - offsetof( ImportSemaphoreWin32HandleInfoKHR, pNext ) ); return *this; } @@ -42716,13 +48561,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ImportSemaphoreWin32HandleInfoKHR& operator=( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + ImportSemaphoreWin32HandleInfoKHR & operator=( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ImportSemaphoreWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ImportSemaphoreWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -42734,13 +48579,15 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - ImportSemaphoreWin32HandleInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT + ImportSemaphoreWin32HandleInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - ImportSemaphoreWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + ImportSemaphoreWin32HandleInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; @@ -42758,64 +48605,65 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkImportSemaphoreWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkImportSemaphoreWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkImportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImportSemaphoreWin32HandleInfoKHR const& ) const = default; -#else - bool operator==( ImportSemaphoreWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportSemaphoreWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( ImportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( semaphore == rhs.semaphore ) - && ( flags == rhs.flags ) - && ( handleType == rhs.handleType ) - && ( handle == rhs.handle ) - && ( name == rhs.name ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && + ( flags == rhs.flags ) && ( handleType == rhs.handleType ) && ( handle == rhs.handle ) && + ( name == rhs.name ); } - bool operator!=( ImportSemaphoreWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreWin32HandleInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; - VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {}; - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; - HANDLE handle = {}; - LPCWSTR name = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreWin32HandleInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + HANDLE handle = {}; + LPCWSTR name = {}; }; - static_assert( sizeof( ImportSemaphoreWin32HandleInfoKHR ) == sizeof( VkImportSemaphoreWin32HandleInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ImportSemaphoreWin32HandleInfoKHR ) == sizeof( VkImportSemaphoreWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ struct IndirectCommandsLayoutTokenNV { - VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutTokenNV( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_ = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV::eShaderGroup, - uint32_t stream_ = {}, - uint32_t offset_ = {}, - uint32_t vertexBindingUnit_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_ = {}, - VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_ = {}, - VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_ = {}, - uint32_t pushconstantOffset_ = {}, - uint32_t pushconstantSize_ = {}, - VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_ = {}, - uint32_t indexTypeCount_ = {}, - const VULKAN_HPP_NAMESPACE::IndexType* pIndexTypes_ = {}, - const uint32_t* pIndexTypeValues_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + IndirectCommandsLayoutTokenNV( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_ = + VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV::eShaderGroup, + uint32_t stream_ = {}, + uint32_t offset_ = {}, + uint32_t vertexBindingUnit_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_ = {}, + uint32_t pushconstantOffset_ = {}, + uint32_t pushconstantSize_ = {}, + VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_ = {}, + uint32_t indexTypeCount_ = {}, + const VULKAN_HPP_NAMESPACE::IndexType * pIndexTypes_ = {}, + const uint32_t * pIndexTypeValues_ = {} ) VULKAN_HPP_NOEXCEPT : tokenType( tokenType_ ) , stream( stream_ ) , offset( offset_ ) @@ -42833,7 +48681,9 @@ namespace VULKAN_HPP_NAMESPACE IndirectCommandsLayoutTokenNV & operator=( IndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( IndirectCommandsLayoutTokenNV ) - offsetof( IndirectCommandsLayoutTokenNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( IndirectCommandsLayoutTokenNV ) - offsetof( IndirectCommandsLayoutTokenNV, pNext ) ); return *this; } @@ -42842,19 +48692,20 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - IndirectCommandsLayoutTokenNV& operator=( VkIndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutTokenNV & operator=( VkIndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - IndirectCommandsLayoutTokenNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutTokenNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - IndirectCommandsLayoutTokenNV & setTokenType( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_ ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutTokenNV & + setTokenType( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_ ) VULKAN_HPP_NOEXCEPT { tokenType = tokenType_; return *this; @@ -42878,19 +48729,22 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - IndirectCommandsLayoutTokenNV & setVertexDynamicStride( VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_ ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutTokenNV & + setVertexDynamicStride( VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_ ) VULKAN_HPP_NOEXCEPT { vertexDynamicStride = vertexDynamicStride_; return *this; } - IndirectCommandsLayoutTokenNV & setPushconstantPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_ ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutTokenNV & setPushconstantPipelineLayout( + VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_ ) VULKAN_HPP_NOEXCEPT { pushconstantPipelineLayout = pushconstantPipelineLayout_; return *this; } - IndirectCommandsLayoutTokenNV & setPushconstantShaderStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_ ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutTokenNV & setPushconstantShaderStageFlags( + VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_ ) VULKAN_HPP_NOEXCEPT { pushconstantShaderStageFlags = pushconstantShaderStageFlags_; return *this; @@ -42908,7 +48762,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - IndirectCommandsLayoutTokenNV & setIndirectStateFlags( VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_ ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutTokenNV & + setIndirectStateFlags( VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_ ) VULKAN_HPP_NOEXCEPT { indirectStateFlags = indirectStateFlags_; return *this; @@ -42920,84 +48775,82 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - IndirectCommandsLayoutTokenNV & setPIndexTypes( const VULKAN_HPP_NAMESPACE::IndexType* pIndexTypes_ ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutTokenNV & + setPIndexTypes( const VULKAN_HPP_NAMESPACE::IndexType * pIndexTypes_ ) VULKAN_HPP_NOEXCEPT { pIndexTypes = pIndexTypes_; return *this; } - IndirectCommandsLayoutTokenNV & setPIndexTypeValues( const uint32_t* pIndexTypeValues_ ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutTokenNV & setPIndexTypeValues( const uint32_t * pIndexTypeValues_ ) VULKAN_HPP_NOEXCEPT { pIndexTypeValues = pIndexTypeValues_; return *this; } - operator VkIndirectCommandsLayoutTokenNV const&() const VULKAN_HPP_NOEXCEPT + operator VkIndirectCommandsLayoutTokenNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkIndirectCommandsLayoutTokenNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( IndirectCommandsLayoutTokenNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IndirectCommandsLayoutTokenNV const & ) const = default; #else - bool operator==( IndirectCommandsLayoutTokenNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( IndirectCommandsLayoutTokenNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( tokenType == rhs.tokenType ) - && ( stream == rhs.stream ) - && ( offset == rhs.offset ) - && ( vertexBindingUnit == rhs.vertexBindingUnit ) - && ( vertexDynamicStride == rhs.vertexDynamicStride ) - && ( pushconstantPipelineLayout == rhs.pushconstantPipelineLayout ) - && ( pushconstantShaderStageFlags == rhs.pushconstantShaderStageFlags ) - && ( pushconstantOffset == rhs.pushconstantOffset ) - && ( pushconstantSize == rhs.pushconstantSize ) - && ( indirectStateFlags == rhs.indirectStateFlags ) - && ( indexTypeCount == rhs.indexTypeCount ) - && ( pIndexTypes == rhs.pIndexTypes ) - && ( pIndexTypeValues == rhs.pIndexTypeValues ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tokenType == rhs.tokenType ) && + ( stream == rhs.stream ) && ( offset == rhs.offset ) && ( vertexBindingUnit == rhs.vertexBindingUnit ) && + ( vertexDynamicStride == rhs.vertexDynamicStride ) && + ( pushconstantPipelineLayout == rhs.pushconstantPipelineLayout ) && + ( pushconstantShaderStageFlags == rhs.pushconstantShaderStageFlags ) && + ( pushconstantOffset == rhs.pushconstantOffset ) && ( pushconstantSize == rhs.pushconstantSize ) && + ( indirectStateFlags == rhs.indirectStateFlags ) && ( indexTypeCount == rhs.indexTypeCount ) && + ( pIndexTypes == rhs.pIndexTypes ) && ( pIndexTypeValues == rhs.pIndexTypeValues ); } - bool operator!=( IndirectCommandsLayoutTokenNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( IndirectCommandsLayoutTokenNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutTokenNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV::eShaderGroup; - uint32_t stream = {}; - uint32_t offset = {}; - uint32_t vertexBindingUnit = {}; - VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride = {}; - VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout = {}; - VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags = {}; - uint32_t pushconstantOffset = {}; - uint32_t pushconstantSize = {}; - VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags = {}; - uint32_t indexTypeCount = {}; - const VULKAN_HPP_NAMESPACE::IndexType* pIndexTypes = {}; - const uint32_t* pIndexTypeValues = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutTokenNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType = + VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV::eShaderGroup; + uint32_t stream = {}; + uint32_t offset = {}; + uint32_t vertexBindingUnit = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags = {}; + uint32_t pushconstantOffset = {}; + uint32_t pushconstantSize = {}; + VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags = {}; + uint32_t indexTypeCount = {}; + const VULKAN_HPP_NAMESPACE::IndexType * pIndexTypes = {}; + const uint32_t * pIndexTypeValues = {}; }; - static_assert( sizeof( IndirectCommandsLayoutTokenNV ) == sizeof( VkIndirectCommandsLayoutTokenNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( IndirectCommandsLayoutTokenNV ) == sizeof( VkIndirectCommandsLayoutTokenNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct IndirectCommandsLayoutCreateInfoNV { - VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_ = {}, - VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, - uint32_t tokenCount_ = {}, - const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV* pTokens_ = {}, - uint32_t streamCount_ = {}, - const uint32_t* pStreamStrides_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNV( + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + uint32_t tokenCount_ = {}, + const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * pTokens_ = {}, + uint32_t streamCount_ = {}, + const uint32_t * pStreamStrides_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , pipelineBindPoint( pipelineBindPoint_ ) , tokenCount( tokenCount_ ) @@ -43008,7 +48861,9 @@ namespace VULKAN_HPP_NAMESPACE IndirectCommandsLayoutCreateInfoNV & operator=( IndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( IndirectCommandsLayoutCreateInfoNV ) - offsetof( IndirectCommandsLayoutCreateInfoNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( IndirectCommandsLayoutCreateInfoNV ) - offsetof( IndirectCommandsLayoutCreateInfoNV, pNext ) ); return *this; } @@ -43017,25 +48872,28 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - IndirectCommandsLayoutCreateInfoNV& operator=( VkIndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutCreateInfoNV & + operator=( VkIndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - IndirectCommandsLayoutCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - IndirectCommandsLayoutCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutCreateInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - IndirectCommandsLayoutCreateInfoNV & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutCreateInfoNV & + setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT { pipelineBindPoint = pipelineBindPoint_; return *this; @@ -43047,7 +48905,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - IndirectCommandsLayoutCreateInfoNV & setPTokens( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV* pTokens_ ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutCreateInfoNV & + setPTokens( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * pTokens_ ) VULKAN_HPP_NOEXCEPT { pTokens = pTokens_; return *this; @@ -43059,65 +48918,65 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - IndirectCommandsLayoutCreateInfoNV & setPStreamStrides( const uint32_t* pStreamStrides_ ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutCreateInfoNV & setPStreamStrides( const uint32_t * pStreamStrides_ ) VULKAN_HPP_NOEXCEPT { pStreamStrides = pStreamStrides_; return *this; } - operator VkIndirectCommandsLayoutCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + operator VkIndirectCommandsLayoutCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkIndirectCommandsLayoutCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( IndirectCommandsLayoutCreateInfoNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IndirectCommandsLayoutCreateInfoNV const & ) const = default; #else - bool operator==( IndirectCommandsLayoutCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( IndirectCommandsLayoutCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( pipelineBindPoint == rhs.pipelineBindPoint ) - && ( tokenCount == rhs.tokenCount ) - && ( pTokens == rhs.pTokens ) - && ( streamCount == rhs.streamCount ) - && ( pStreamStrides == rhs.pStreamStrides ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( tokenCount == rhs.tokenCount ) && + ( pTokens == rhs.pTokens ) && ( streamCount == rhs.streamCount ) && + ( pStreamStrides == rhs.pStreamStrides ); } - bool operator!=( IndirectCommandsLayoutCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( IndirectCommandsLayoutCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutCreateInfoNV; - const void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutCreateInfoNV; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags = {}; - VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; - uint32_t tokenCount = {}; - const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV* pTokens = {}; - uint32_t streamCount = {}; - const uint32_t* pStreamStrides = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + uint32_t tokenCount = {}; + const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * pTokens = {}; + uint32_t streamCount = {}; + const uint32_t * pStreamStrides = {}; }; - static_assert( sizeof( IndirectCommandsLayoutCreateInfoNV ) == sizeof( VkIndirectCommandsLayoutCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( IndirectCommandsLayoutCreateInfoNV ) == sizeof( VkIndirectCommandsLayoutCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct InitializePerformanceApiInfoINTEL { - VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL( void* pUserData_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL( void * pUserData_ = {} ) VULKAN_HPP_NOEXCEPT : pUserData( pUserData_ ) {} InitializePerformanceApiInfoINTEL & operator=( InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( InitializePerformanceApiInfoINTEL ) - offsetof( InitializePerformanceApiInfoINTEL, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( InitializePerformanceApiInfoINTEL ) - offsetof( InitializePerformanceApiInfoINTEL, pNext ) ); return *this; } @@ -43126,63 +48985,64 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - InitializePerformanceApiInfoINTEL& operator=( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + InitializePerformanceApiInfoINTEL & operator=( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - InitializePerformanceApiInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + InitializePerformanceApiInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - InitializePerformanceApiInfoINTEL & setPUserData( void* pUserData_ ) VULKAN_HPP_NOEXCEPT + InitializePerformanceApiInfoINTEL & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT { pUserData = pUserData_; return *this; } - operator VkInitializePerformanceApiInfoINTEL const&() const VULKAN_HPP_NOEXCEPT + operator VkInitializePerformanceApiInfoINTEL const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkInitializePerformanceApiInfoINTEL &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( InitializePerformanceApiInfoINTEL const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( InitializePerformanceApiInfoINTEL const & ) const = default; #else - bool operator==( InitializePerformanceApiInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( InitializePerformanceApiInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pUserData == rhs.pUserData ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pUserData == rhs.pUserData ); } - bool operator!=( InitializePerformanceApiInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( InitializePerformanceApiInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInitializePerformanceApiInfoINTEL; - const void* pNext = {}; - void* pUserData = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInitializePerformanceApiInfoINTEL; + const void * pNext = {}; + void * pUserData = {}; }; - static_assert( sizeof( InitializePerformanceApiInfoINTEL ) == sizeof( VkInitializePerformanceApiInfoINTEL ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( InitializePerformanceApiInfoINTEL ) == sizeof( VkInitializePerformanceApiInfoINTEL ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct InputAttachmentAspectReference { - VULKAN_HPP_CONSTEXPR InputAttachmentAspectReference( uint32_t subpass_ = {}, - uint32_t inputAttachmentIndex_ = {}, - VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + InputAttachmentAspectReference( uint32_t subpass_ = {}, + uint32_t inputAttachmentIndex_ = {}, + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {} ) VULKAN_HPP_NOEXCEPT : subpass( subpass_ ) , inputAttachmentIndex( inputAttachmentIndex_ ) , aspectMask( aspectMask_ ) @@ -43193,9 +49053,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - InputAttachmentAspectReference& operator=( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT + InputAttachmentAspectReference & operator=( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -43211,54 +49071,56 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - InputAttachmentAspectReference & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT + InputAttachmentAspectReference & + setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT { aspectMask = aspectMask_; return *this; } - operator VkInputAttachmentAspectReference const&() const VULKAN_HPP_NOEXCEPT + operator VkInputAttachmentAspectReference const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkInputAttachmentAspectReference &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( InputAttachmentAspectReference const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( InputAttachmentAspectReference const & ) const = default; #else - bool operator==( InputAttachmentAspectReference const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( InputAttachmentAspectReference const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( subpass == rhs.subpass ) - && ( inputAttachmentIndex == rhs.inputAttachmentIndex ) - && ( aspectMask == rhs.aspectMask ); + return ( subpass == rhs.subpass ) && ( inputAttachmentIndex == rhs.inputAttachmentIndex ) && + ( aspectMask == rhs.aspectMask ); } - bool operator!=( InputAttachmentAspectReference const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( InputAttachmentAspectReference const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint32_t subpass = {}; - uint32_t inputAttachmentIndex = {}; - VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + uint32_t subpass = {}; + uint32_t inputAttachmentIndex = {}; + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; }; - static_assert( sizeof( InputAttachmentAspectReference ) == sizeof( VkInputAttachmentAspectReference ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( InputAttachmentAspectReference ) == sizeof( VkInputAttachmentAspectReference ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct InstanceCreateInfo { - VULKAN_HPP_CONSTEXPR InstanceCreateInfo( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ = {}, - const VULKAN_HPP_NAMESPACE::ApplicationInfo* pApplicationInfo_ = {}, - uint32_t enabledLayerCount_ = {}, - const char* const* ppEnabledLayerNames_ = {}, - uint32_t enabledExtensionCount_ = {}, - const char* const* ppEnabledExtensionNames_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR InstanceCreateInfo( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ = {}, + const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo_ = {}, + uint32_t enabledLayerCount_ = {}, + const char * const * ppEnabledLayerNames_ = {}, + uint32_t enabledExtensionCount_ = {}, + const char * const * ppEnabledExtensionNames_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , pApplicationInfo( pApplicationInfo_ ) , enabledLayerCount( enabledLayerCount_ ) @@ -43278,13 +49140,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - InstanceCreateInfo& operator=( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + InstanceCreateInfo & operator=( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - InstanceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + InstanceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -43296,7 +49158,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - InstanceCreateInfo & setPApplicationInfo( const VULKAN_HPP_NAMESPACE::ApplicationInfo* pApplicationInfo_ ) VULKAN_HPP_NOEXCEPT + InstanceCreateInfo & + setPApplicationInfo( const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo_ ) VULKAN_HPP_NOEXCEPT { pApplicationInfo = pApplicationInfo_; return *this; @@ -43308,7 +49171,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - InstanceCreateInfo & setPpEnabledLayerNames( const char* const* ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT + InstanceCreateInfo & setPpEnabledLayerNames( const char * const * ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT { ppEnabledLayerNames = ppEnabledLayerNames_; return *this; @@ -43320,62 +49183,61 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - InstanceCreateInfo & setPpEnabledExtensionNames( const char* const* ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT + InstanceCreateInfo & setPpEnabledExtensionNames( const char * const * ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT { ppEnabledExtensionNames = ppEnabledExtensionNames_; return *this; } - operator VkInstanceCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkInstanceCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkInstanceCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( InstanceCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( InstanceCreateInfo const & ) const = default; #else - bool operator==( InstanceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( InstanceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( pApplicationInfo == rhs.pApplicationInfo ) - && ( enabledLayerCount == rhs.enabledLayerCount ) - && ( ppEnabledLayerNames == rhs.ppEnabledLayerNames ) - && ( enabledExtensionCount == rhs.enabledExtensionCount ) - && ( ppEnabledExtensionNames == rhs.ppEnabledExtensionNames ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( pApplicationInfo == rhs.pApplicationInfo ) && ( enabledLayerCount == rhs.enabledLayerCount ) && + ( ppEnabledLayerNames == rhs.ppEnabledLayerNames ) && + ( enabledExtensionCount == rhs.enabledExtensionCount ) && + ( ppEnabledExtensionNames == rhs.ppEnabledExtensionNames ); } - bool operator!=( InstanceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( InstanceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInstanceCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags = {}; - const VULKAN_HPP_NAMESPACE::ApplicationInfo* pApplicationInfo = {}; - uint32_t enabledLayerCount = {}; - const char* const* ppEnabledLayerNames = {}; - uint32_t enabledExtensionCount = {}; - const char* const* ppEnabledExtensionNames = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInstanceCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags = {}; + const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo = {}; + uint32_t enabledLayerCount = {}; + const char * const * ppEnabledLayerNames = {}; + uint32_t enabledExtensionCount = {}; + const char * const * ppEnabledExtensionNames = {}; }; - static_assert( sizeof( InstanceCreateInfo ) == sizeof( VkInstanceCreateInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( InstanceCreateInfo ) == sizeof( VkInstanceCreateInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct LayerProperties { - VULKAN_HPP_CONSTEXPR_14 LayerProperties( std::array const& layerName_ = {}, - uint32_t specVersion_ = {}, - uint32_t implementationVersion_ = {}, - std::array const& description_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 + LayerProperties( std::array const & layerName_ = {}, + uint32_t specVersion_ = {}, + uint32_t implementationVersion_ = {}, + std::array const & description_ = {} ) VULKAN_HPP_NOEXCEPT : layerName( layerName_ ) , specVersion( specVersion_ ) , implementationVersion( implementationVersion_ ) @@ -43387,44 +49249,42 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - LayerProperties& operator=( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT + LayerProperties & operator=( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkLayerProperties const&() const VULKAN_HPP_NOEXCEPT + operator VkLayerProperties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkLayerProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( LayerProperties const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( LayerProperties const & ) const = default; #else - bool operator==( LayerProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( layerName == rhs.layerName ) - && ( specVersion == rhs.specVersion ) - && ( implementationVersion == rhs.implementationVersion ) - && ( description == rhs.description ); + return ( layerName == rhs.layerName ) && ( specVersion == rhs.specVersion ) && + ( implementationVersion == rhs.implementationVersion ) && ( description == rhs.description ); } - bool operator!=( LayerProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::ArrayWrapper1D layerName = {}; - uint32_t specVersion = {}; - uint32_t implementationVersion = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D layerName = {}; + uint32_t specVersion = {}; + uint32_t implementationVersion = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; }; static_assert( sizeof( LayerProperties ) == sizeof( VkLayerProperties ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); @@ -43433,7 +49293,7 @@ namespace VULKAN_HPP_NAMESPACE struct MacOSSurfaceCreateInfoMVK { VULKAN_HPP_CONSTEXPR MacOSSurfaceCreateInfoMVK( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ = {}, - const void* pView_ = {} ) VULKAN_HPP_NOEXCEPT + const void * pView_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , pView( pView_ ) {} @@ -43449,13 +49309,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - MacOSSurfaceCreateInfoMVK& operator=( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT + MacOSSurfaceCreateInfoMVK & operator=( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - MacOSSurfaceCreateInfoMVK & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + MacOSSurfaceCreateInfoMVK & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -43467,54 +49327,53 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - MacOSSurfaceCreateInfoMVK & setPView( const void* pView_ ) VULKAN_HPP_NOEXCEPT + MacOSSurfaceCreateInfoMVK & setPView( const void * pView_ ) VULKAN_HPP_NOEXCEPT { pView = pView_; return *this; } - operator VkMacOSSurfaceCreateInfoMVK const&() const VULKAN_HPP_NOEXCEPT + operator VkMacOSSurfaceCreateInfoMVK const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkMacOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MacOSSurfaceCreateInfoMVK const& ) const = default; -#else - bool operator==( MacOSSurfaceCreateInfoMVK const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MacOSSurfaceCreateInfoMVK const & ) const = default; +# else + bool operator==( MacOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( pView == rhs.pView ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pView == rhs.pView ); } - bool operator!=( MacOSSurfaceCreateInfoMVK const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MacOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMacosSurfaceCreateInfoMVK; - const void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMacosSurfaceCreateInfoMVK; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags = {}; - const void* pView = {}; + const void * pView = {}; }; - static_assert( sizeof( MacOSSurfaceCreateInfoMVK ) == sizeof( VkMacOSSurfaceCreateInfoMVK ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( MacOSSurfaceCreateInfoMVK ) == sizeof( VkMacOSSurfaceCreateInfoMVK ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_USE_PLATFORM_MACOS_MVK*/ struct MappedMemoryRange { VULKAN_HPP_CONSTEXPR MappedMemoryRange( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT : memory( memory_ ) , offset( offset_ ) , size( size_ ) @@ -43531,13 +49390,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - MappedMemoryRange& operator=( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT + MappedMemoryRange & operator=( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - MappedMemoryRange & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + MappedMemoryRange & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -43561,42 +49420,40 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkMappedMemoryRange const&() const VULKAN_HPP_NOEXCEPT + operator VkMappedMemoryRange const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkMappedMemoryRange &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MappedMemoryRange const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MappedMemoryRange const & ) const = default; #else - bool operator==( MappedMemoryRange const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( MappedMemoryRange const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memory == rhs.memory ) - && ( offset == rhs.offset ) - && ( size == rhs.size ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( offset == rhs.offset ) && + ( size == rhs.size ); } - bool operator!=( MappedMemoryRange const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MappedMemoryRange const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMappedMemoryRange; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; - VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; - VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMappedMemoryRange; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; }; - static_assert( sizeof( MappedMemoryRange ) == sizeof( VkMappedMemoryRange ), "struct and wrapper have different size!" ); + static_assert( sizeof( MappedMemoryRange ) == sizeof( VkMappedMemoryRange ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct MemoryAllocateFlagsInfo @@ -43618,13 +49475,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - MemoryAllocateFlagsInfo& operator=( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryAllocateFlagsInfo & operator=( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - MemoryAllocateFlagsInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + MemoryAllocateFlagsInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -43642,46 +49499,45 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkMemoryAllocateFlagsInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkMemoryAllocateFlagsInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkMemoryAllocateFlagsInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MemoryAllocateFlagsInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryAllocateFlagsInfo const & ) const = default; #else - bool operator==( MemoryAllocateFlagsInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( MemoryAllocateFlagsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( deviceMask == rhs.deviceMask ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( deviceMask == rhs.deviceMask ); } - bool operator!=( MemoryAllocateFlagsInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryAllocateFlagsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateFlagsInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags = {}; - uint32_t deviceMask = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateFlagsInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags = {}; + uint32_t deviceMask = {}; }; - static_assert( sizeof( MemoryAllocateFlagsInfo ) == sizeof( VkMemoryAllocateFlagsInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( MemoryAllocateFlagsInfo ) == sizeof( VkMemoryAllocateFlagsInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct MemoryAllocateInfo { VULKAN_HPP_CONSTEXPR MemoryAllocateInfo( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {}, - uint32_t memoryTypeIndex_ = {} ) VULKAN_HPP_NOEXCEPT + uint32_t memoryTypeIndex_ = {} ) VULKAN_HPP_NOEXCEPT : allocationSize( allocationSize_ ) , memoryTypeIndex( memoryTypeIndex_ ) {} @@ -43697,13 +49553,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - MemoryAllocateInfo& operator=( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryAllocateInfo & operator=( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - MemoryAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + MemoryAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -43721,40 +49577,39 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkMemoryAllocateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkMemoryAllocateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MemoryAllocateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryAllocateInfo const & ) const = default; #else - bool operator==( MemoryAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( MemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( allocationSize == rhs.allocationSize ) - && ( memoryTypeIndex == rhs.memoryTypeIndex ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( allocationSize == rhs.allocationSize ) && + ( memoryTypeIndex == rhs.memoryTypeIndex ); } - bool operator!=( MemoryAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {}; - uint32_t memoryTypeIndex = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {}; + uint32_t memoryTypeIndex = {}; }; - static_assert( sizeof( MemoryAllocateInfo ) == sizeof( VkMemoryAllocateInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( MemoryAllocateInfo ) == sizeof( VkMemoryAllocateInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct MemoryBarrier @@ -43776,13 +49631,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - MemoryBarrier& operator=( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryBarrier & operator=( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - MemoryBarrier & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + MemoryBarrier & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -43800,45 +49655,43 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkMemoryBarrier const&() const VULKAN_HPP_NOEXCEPT + operator VkMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkMemoryBarrier &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MemoryBarrier const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryBarrier const & ) const = default; #else - bool operator==( MemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( MemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( srcAccessMask == rhs.srcAccessMask ) - && ( dstAccessMask == rhs.dstAccessMask ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask == rhs.srcAccessMask ) && + ( dstAccessMask == rhs.dstAccessMask ); } - bool operator!=( MemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrier; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; - VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrier; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; }; static_assert( sizeof( MemoryBarrier ) == sizeof( VkMemoryBarrier ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct MemoryDedicatedAllocateInfo { - VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT : image( image_ ) , buffer( buffer_ ) @@ -43846,7 +49699,8 @@ namespace VULKAN_HPP_NAMESPACE MemoryDedicatedAllocateInfo & operator=( MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( MemoryDedicatedAllocateInfo ) - offsetof( MemoryDedicatedAllocateInfo, pNext ) ); + memcpy( + &pNext, &rhs.pNext, sizeof( MemoryDedicatedAllocateInfo ) - offsetof( MemoryDedicatedAllocateInfo, pNext ) ); return *this; } @@ -43855,13 +49709,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - MemoryDedicatedAllocateInfo& operator=( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryDedicatedAllocateInfo & operator=( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - MemoryDedicatedAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + MemoryDedicatedAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -43879,53 +49733,54 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkMemoryDedicatedAllocateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkMemoryDedicatedAllocateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkMemoryDedicatedAllocateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MemoryDedicatedAllocateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryDedicatedAllocateInfo const & ) const = default; #else - bool operator==( MemoryDedicatedAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( MemoryDedicatedAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( image == rhs.image ) - && ( buffer == rhs.buffer ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( buffer == rhs.buffer ); } - bool operator!=( MemoryDedicatedAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryDedicatedAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedAllocateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Image image = {}; - VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedAllocateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; }; - static_assert( sizeof( MemoryDedicatedAllocateInfo ) == sizeof( VkMemoryDedicatedAllocateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( MemoryDedicatedAllocateInfo ) == sizeof( VkMemoryDedicatedAllocateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct MemoryDedicatedRequirements { - VULKAN_HPP_CONSTEXPR MemoryDedicatedRequirements( VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + MemoryDedicatedRequirements( VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation_ = {} ) VULKAN_HPP_NOEXCEPT : prefersDedicatedAllocation( prefersDedicatedAllocation_ ) , requiresDedicatedAllocation( requiresDedicatedAllocation_ ) {} MemoryDedicatedRequirements & operator=( MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( MemoryDedicatedRequirements ) - offsetof( MemoryDedicatedRequirements, pNext ) ); + memcpy( + &pNext, &rhs.pNext, sizeof( MemoryDedicatedRequirements ) - offsetof( MemoryDedicatedRequirements, pNext ) ); return *this; } @@ -43934,47 +49789,48 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - MemoryDedicatedRequirements& operator=( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryDedicatedRequirements & operator=( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkMemoryDedicatedRequirements const&() const VULKAN_HPP_NOEXCEPT + operator VkMemoryDedicatedRequirements const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkMemoryDedicatedRequirements &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MemoryDedicatedRequirements const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryDedicatedRequirements const & ) const = default; #else - bool operator==( MemoryDedicatedRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( MemoryDedicatedRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( prefersDedicatedAllocation == rhs.prefersDedicatedAllocation ) - && ( requiresDedicatedAllocation == rhs.requiresDedicatedAllocation ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( prefersDedicatedAllocation == rhs.prefersDedicatedAllocation ) && + ( requiresDedicatedAllocation == rhs.requiresDedicatedAllocation ); } - bool operator!=( MemoryDedicatedRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryDedicatedRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedRequirements; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation = {}; - VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedRequirements; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation = {}; + VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation = {}; }; - static_assert( sizeof( MemoryDedicatedRequirements ) == sizeof( VkMemoryDedicatedRequirements ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( MemoryDedicatedRequirements ) == sizeof( VkMemoryDedicatedRequirements ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct MemoryFdPropertiesKHR { @@ -43993,121 +49849,131 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - MemoryFdPropertiesKHR& operator=( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryFdPropertiesKHR & operator=( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkMemoryFdPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkMemoryFdPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkMemoryFdPropertiesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MemoryFdPropertiesKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryFdPropertiesKHR const & ) const = default; #else - bool operator==( MemoryFdPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( MemoryFdPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memoryTypeBits == rhs.memoryTypeBits ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ); } - bool operator!=( MemoryFdPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryFdPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryFdPropertiesKHR; - void* pNext = {}; - uint32_t memoryTypeBits = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryFdPropertiesKHR; + void * pNext = {}; + uint32_t memoryTypeBits = {}; }; - static_assert( sizeof( MemoryFdPropertiesKHR ) == sizeof( VkMemoryFdPropertiesKHR ), "struct and wrapper have different size!" ); + static_assert( sizeof( MemoryFdPropertiesKHR ) == sizeof( VkMemoryFdPropertiesKHR ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); #ifdef VK_USE_PLATFORM_ANDROID_KHR struct MemoryGetAndroidHardwareBufferInfoANDROID { - VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + MemoryGetAndroidHardwareBufferInfoANDROID( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {} ) VULKAN_HPP_NOEXCEPT : memory( memory_ ) {} - MemoryGetAndroidHardwareBufferInfoANDROID & operator=( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryGetAndroidHardwareBufferInfoANDROID & + operator=( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( MemoryGetAndroidHardwareBufferInfoANDROID ) - offsetof( MemoryGetAndroidHardwareBufferInfoANDROID, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( MemoryGetAndroidHardwareBufferInfoANDROID ) - + offsetof( MemoryGetAndroidHardwareBufferInfoANDROID, pNext ) ); return *this; } - MemoryGetAndroidHardwareBufferInfoANDROID( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryGetAndroidHardwareBufferInfoANDROID( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - MemoryGetAndroidHardwareBufferInfoANDROID& operator=( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryGetAndroidHardwareBufferInfoANDROID & + operator=( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - MemoryGetAndroidHardwareBufferInfoANDROID & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + MemoryGetAndroidHardwareBufferInfoANDROID & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - MemoryGetAndroidHardwareBufferInfoANDROID & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + MemoryGetAndroidHardwareBufferInfoANDROID & + setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT { memory = memory_; return *this; } - operator VkMemoryGetAndroidHardwareBufferInfoANDROID const&() const VULKAN_HPP_NOEXCEPT + operator VkMemoryGetAndroidHardwareBufferInfoANDROID const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkMemoryGetAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MemoryGetAndroidHardwareBufferInfoANDROID const& ) const = default; -#else - bool operator==( MemoryGetAndroidHardwareBufferInfoANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryGetAndroidHardwareBufferInfoANDROID const & ) const = default; +# else + bool operator==( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memory == rhs.memory ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ); } - bool operator!=( MemoryGetAndroidHardwareBufferInfoANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; }; - static_assert( sizeof( MemoryGetAndroidHardwareBufferInfoANDROID ) == sizeof( VkMemoryGetAndroidHardwareBufferInfoANDROID ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( MemoryGetAndroidHardwareBufferInfoANDROID ) == + sizeof( VkMemoryGetAndroidHardwareBufferInfoANDROID ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ struct MemoryGetFdInfoKHR { - VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + MemoryGetFdInfoKHR( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT : memory( memory_ ) , handleType( handleType_ ) {} @@ -44123,13 +49989,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - MemoryGetFdInfoKHR& operator=( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryGetFdInfoKHR & operator=( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - MemoryGetFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + MemoryGetFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -44141,60 +50007,64 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - MemoryGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + MemoryGetFdInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } - operator VkMemoryGetFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkMemoryGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkMemoryGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MemoryGetFdInfoKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryGetFdInfoKHR const & ) const = default; #else - bool operator==( MemoryGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( MemoryGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memory == rhs.memory ) - && ( handleType == rhs.handleType ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && + ( handleType == rhs.handleType ); } - bool operator!=( MemoryGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetFdInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetFdInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; }; - static_assert( sizeof( MemoryGetFdInfoKHR ) == sizeof( VkMemoryGetFdInfoKHR ), "struct and wrapper have different size!" ); + static_assert( sizeof( MemoryGetFdInfoKHR ) == sizeof( VkMemoryGetFdInfoKHR ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); #ifdef VK_USE_PLATFORM_WIN32_KHR struct MemoryGetWin32HandleInfoKHR { - VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR( + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT : memory( memory_ ) , handleType( handleType_ ) {} MemoryGetWin32HandleInfoKHR & operator=( MemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( MemoryGetWin32HandleInfoKHR ) - offsetof( MemoryGetWin32HandleInfoKHR, pNext ) ); + memcpy( + &pNext, &rhs.pNext, sizeof( MemoryGetWin32HandleInfoKHR ) - offsetof( MemoryGetWin32HandleInfoKHR, pNext ) ); return *this; } @@ -44203,13 +50073,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - MemoryGetWin32HandleInfoKHR& operator=( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryGetWin32HandleInfoKHR & operator=( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - MemoryGetWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + MemoryGetWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -44221,52 +50091,54 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - MemoryGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + MemoryGetWin32HandleInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } - operator VkMemoryGetWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkMemoryGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkMemoryGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MemoryGetWin32HandleInfoKHR const& ) const = default; -#else - bool operator==( MemoryGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryGetWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( MemoryGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memory == rhs.memory ) - && ( handleType == rhs.handleType ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && + ( handleType == rhs.handleType ); } - bool operator!=( MemoryGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetWin32HandleInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetWin32HandleInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; }; - static_assert( sizeof( MemoryGetWin32HandleInfoKHR ) == sizeof( VkMemoryGetWin32HandleInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( MemoryGetWin32HandleInfoKHR ) == sizeof( VkMemoryGetWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ struct MemoryHeap { - VULKAN_HPP_CONSTEXPR MemoryHeap( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_CONSTEXPR MemoryHeap( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT : size( size_ ) , flags( flags_ ) @@ -44277,39 +50149,38 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - MemoryHeap& operator=( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryHeap & operator=( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkMemoryHeap const&() const VULKAN_HPP_NOEXCEPT + operator VkMemoryHeap const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkMemoryHeap &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MemoryHeap const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryHeap const & ) const = default; #else - bool operator==( MemoryHeap const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( MemoryHeap const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( size == rhs.size ) - && ( flags == rhs.flags ); + return ( size == rhs.size ) && ( flags == rhs.flags ); } - bool operator!=( MemoryHeap const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryHeap const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags = {}; }; static_assert( sizeof( MemoryHeap ) == sizeof( VkMemoryHeap ), "struct and wrapper have different size!" ); @@ -44323,7 +50194,9 @@ namespace VULKAN_HPP_NAMESPACE MemoryHostPointerPropertiesEXT & operator=( MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( MemoryHostPointerPropertiesEXT ) - offsetof( MemoryHostPointerPropertiesEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( MemoryHostPointerPropertiesEXT ) - offsetof( MemoryHostPointerPropertiesEXT, pNext ) ); return *this; } @@ -44332,55 +50205,60 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - MemoryHostPointerPropertiesEXT& operator=( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryHostPointerPropertiesEXT & operator=( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkMemoryHostPointerPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkMemoryHostPointerPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkMemoryHostPointerPropertiesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MemoryHostPointerPropertiesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryHostPointerPropertiesEXT const & ) const = default; #else - bool operator==( MemoryHostPointerPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( MemoryHostPointerPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memoryTypeBits == rhs.memoryTypeBits ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ); } - bool operator!=( MemoryHostPointerPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryHostPointerPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryHostPointerPropertiesEXT; - void* pNext = {}; - uint32_t memoryTypeBits = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryHostPointerPropertiesEXT; + void * pNext = {}; + uint32_t memoryTypeBits = {}; }; - static_assert( sizeof( MemoryHostPointerPropertiesEXT ) == sizeof( VkMemoryHostPointerPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( MemoryHostPointerPropertiesEXT ) == sizeof( VkMemoryHostPointerPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct MemoryOpaqueCaptureAddressAllocateInfo { - VULKAN_HPP_CONSTEXPR MemoryOpaqueCaptureAddressAllocateInfo( uint64_t opaqueCaptureAddress_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + MemoryOpaqueCaptureAddressAllocateInfo( uint64_t opaqueCaptureAddress_ = {} ) VULKAN_HPP_NOEXCEPT : opaqueCaptureAddress( opaqueCaptureAddress_ ) {} - MemoryOpaqueCaptureAddressAllocateInfo & operator=( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryOpaqueCaptureAddressAllocateInfo & + operator=( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( MemoryOpaqueCaptureAddressAllocateInfo ) - offsetof( MemoryOpaqueCaptureAddressAllocateInfo, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( MemoryOpaqueCaptureAddressAllocateInfo ) - + offsetof( MemoryOpaqueCaptureAddressAllocateInfo, pNext ) ); return *this; } @@ -44389,45 +50267,45 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - MemoryOpaqueCaptureAddressAllocateInfo& operator=( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryOpaqueCaptureAddressAllocateInfo & + operator=( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - MemoryOpaqueCaptureAddressAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + MemoryOpaqueCaptureAddressAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - MemoryOpaqueCaptureAddressAllocateInfo & setOpaqueCaptureAddress( uint64_t opaqueCaptureAddress_ ) VULKAN_HPP_NOEXCEPT + MemoryOpaqueCaptureAddressAllocateInfo & + setOpaqueCaptureAddress( uint64_t opaqueCaptureAddress_ ) VULKAN_HPP_NOEXCEPT { opaqueCaptureAddress = opaqueCaptureAddress_; return *this; } - operator VkMemoryOpaqueCaptureAddressAllocateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkMemoryOpaqueCaptureAddressAllocateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkMemoryOpaqueCaptureAddressAllocateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MemoryOpaqueCaptureAddressAllocateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryOpaqueCaptureAddressAllocateInfo const & ) const = default; #else - bool operator==( MemoryOpaqueCaptureAddressAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( opaqueCaptureAddress == rhs.opaqueCaptureAddress ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( opaqueCaptureAddress == rhs.opaqueCaptureAddress ); } - bool operator!=( MemoryOpaqueCaptureAddressAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -44435,11 +50313,13 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryOpaqueCaptureAddressAllocateInfo; - const void* pNext = {}; - uint64_t opaqueCaptureAddress = {}; + const void * pNext = {}; + uint64_t opaqueCaptureAddress = {}; }; - static_assert( sizeof( MemoryOpaqueCaptureAddressAllocateInfo ) == sizeof( VkMemoryOpaqueCaptureAddressAllocateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( MemoryOpaqueCaptureAddressAllocateInfo ) == sizeof( VkMemoryOpaqueCaptureAddressAllocateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct MemoryPriorityAllocateInfoEXT { @@ -44449,7 +50329,9 @@ namespace VULKAN_HPP_NAMESPACE MemoryPriorityAllocateInfoEXT & operator=( MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( MemoryPriorityAllocateInfoEXT ) - offsetof( MemoryPriorityAllocateInfoEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( MemoryPriorityAllocateInfoEXT ) - offsetof( MemoryPriorityAllocateInfoEXT, pNext ) ); return *this; } @@ -44458,13 +50340,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - MemoryPriorityAllocateInfoEXT& operator=( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryPriorityAllocateInfoEXT & operator=( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - MemoryPriorityAllocateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + MemoryPriorityAllocateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -44476,45 +50358,45 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkMemoryPriorityAllocateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkMemoryPriorityAllocateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkMemoryPriorityAllocateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MemoryPriorityAllocateInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryPriorityAllocateInfoEXT const & ) const = default; #else - bool operator==( MemoryPriorityAllocateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( MemoryPriorityAllocateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( priority == rhs.priority ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( priority == rhs.priority ); } - bool operator!=( MemoryPriorityAllocateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryPriorityAllocateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryPriorityAllocateInfoEXT; - const void* pNext = {}; - float priority = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryPriorityAllocateInfoEXT; + const void * pNext = {}; + float priority = {}; }; - static_assert( sizeof( MemoryPriorityAllocateInfoEXT ) == sizeof( VkMemoryPriorityAllocateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( MemoryPriorityAllocateInfoEXT ) == sizeof( VkMemoryPriorityAllocateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct MemoryRequirements { - VULKAN_HPP_CONSTEXPR MemoryRequirements( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize alignment_ = {}, - uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR MemoryRequirements( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize alignment_ = {}, + uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT : size( size_ ) , alignment( alignment_ ) , memoryTypeBits( memoryTypeBits_ ) @@ -44525,49 +50407,49 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - MemoryRequirements& operator=( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryRequirements & operator=( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkMemoryRequirements const&() const VULKAN_HPP_NOEXCEPT + operator VkMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkMemoryRequirements &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MemoryRequirements const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryRequirements const & ) const = default; #else - bool operator==( MemoryRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( MemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( size == rhs.size ) - && ( alignment == rhs.alignment ) - && ( memoryTypeBits == rhs.memoryTypeBits ); + return ( size == rhs.size ) && ( alignment == rhs.alignment ) && ( memoryTypeBits == rhs.memoryTypeBits ); } - bool operator!=( MemoryRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::DeviceSize size = {}; - VULKAN_HPP_NAMESPACE::DeviceSize alignment = {}; - uint32_t memoryTypeBits = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::DeviceSize alignment = {}; + uint32_t memoryTypeBits = {}; }; - static_assert( sizeof( MemoryRequirements ) == sizeof( VkMemoryRequirements ), "struct and wrapper have different size!" ); + static_assert( sizeof( MemoryRequirements ) == sizeof( VkMemoryRequirements ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct MemoryRequirements2 { - VULKAN_HPP_CONSTEXPR MemoryRequirements2( VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + MemoryRequirements2( VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements_ = {} ) VULKAN_HPP_NOEXCEPT : memoryRequirements( memoryRequirements_ ) {} @@ -44582,50 +50464,49 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - MemoryRequirements2& operator=( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryRequirements2 & operator=( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkMemoryRequirements2 const&() const VULKAN_HPP_NOEXCEPT + operator VkMemoryRequirements2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MemoryRequirements2 const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryRequirements2 const & ) const = default; #else - bool operator==( MemoryRequirements2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( MemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memoryRequirements == rhs.memoryRequirements ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryRequirements == rhs.memoryRequirements ); } - bool operator!=( MemoryRequirements2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryRequirements2; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryRequirements2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements = {}; }; - static_assert( sizeof( MemoryRequirements2 ) == sizeof( VkMemoryRequirements2 ), "struct and wrapper have different size!" ); + static_assert( sizeof( MemoryRequirements2 ) == sizeof( VkMemoryRequirements2 ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct MemoryType { VULKAN_HPP_CONSTEXPR MemoryType( VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags_ = {}, - uint32_t heapIndex_ = {} ) VULKAN_HPP_NOEXCEPT + uint32_t heapIndex_ = {} ) VULKAN_HPP_NOEXCEPT : propertyFlags( propertyFlags_ ) , heapIndex( heapIndex_ ) {} @@ -44635,32 +50516,31 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - MemoryType& operator=( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryType & operator=( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkMemoryType const&() const VULKAN_HPP_NOEXCEPT + operator VkMemoryType const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkMemoryType &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MemoryType const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryType const & ) const = default; #else - bool operator==( MemoryType const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( MemoryType const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( propertyFlags == rhs.propertyFlags ) - && ( heapIndex == rhs.heapIndex ); + return ( propertyFlags == rhs.propertyFlags ) && ( heapIndex == rhs.heapIndex ); } - bool operator!=( MemoryType const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryType const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -44668,7 +50548,7 @@ namespace VULKAN_HPP_NAMESPACE public: VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags = {}; - uint32_t heapIndex = {}; + uint32_t heapIndex = {}; }; static_assert( sizeof( MemoryType ) == sizeof( VkMemoryType ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); @@ -44682,7 +50562,9 @@ namespace VULKAN_HPP_NAMESPACE MemoryWin32HandlePropertiesKHR & operator=( MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( MemoryWin32HandlePropertiesKHR ) - offsetof( MemoryWin32HandlePropertiesKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( MemoryWin32HandlePropertiesKHR ) - offsetof( MemoryWin32HandlePropertiesKHR, pNext ) ); return *this; } @@ -44691,52 +50573,52 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - MemoryWin32HandlePropertiesKHR& operator=( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryWin32HandlePropertiesKHR & operator=( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkMemoryWin32HandlePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkMemoryWin32HandlePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkMemoryWin32HandlePropertiesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MemoryWin32HandlePropertiesKHR const& ) const = default; -#else - bool operator==( MemoryWin32HandlePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryWin32HandlePropertiesKHR const & ) const = default; +# else + bool operator==( MemoryWin32HandlePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memoryTypeBits == rhs.memoryTypeBits ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ); } - bool operator!=( MemoryWin32HandlePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryWin32HandlePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryWin32HandlePropertiesKHR; - void* pNext = {}; - uint32_t memoryTypeBits = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryWin32HandlePropertiesKHR; + void * pNext = {}; + uint32_t memoryTypeBits = {}; }; - static_assert( sizeof( MemoryWin32HandlePropertiesKHR ) == sizeof( VkMemoryWin32HandlePropertiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( MemoryWin32HandlePropertiesKHR ) == sizeof( VkMemoryWin32HandlePropertiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #ifdef VK_USE_PLATFORM_METAL_EXT struct MetalSurfaceCreateInfoEXT { VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ = {}, - const CAMetalLayer* pLayer_ = {} ) VULKAN_HPP_NOEXCEPT + const CAMetalLayer * pLayer_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , pLayer( pLayer_ ) {} @@ -44752,13 +50634,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - MetalSurfaceCreateInfoEXT& operator=( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + MetalSurfaceCreateInfoEXT & operator=( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - MetalSurfaceCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + MetalSurfaceCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -44770,52 +50652,52 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - MetalSurfaceCreateInfoEXT & setPLayer( const CAMetalLayer* pLayer_ ) VULKAN_HPP_NOEXCEPT + MetalSurfaceCreateInfoEXT & setPLayer( const CAMetalLayer * pLayer_ ) VULKAN_HPP_NOEXCEPT { pLayer = pLayer_; return *this; } - operator VkMetalSurfaceCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkMetalSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkMetalSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MetalSurfaceCreateInfoEXT const& ) const = default; -#else - bool operator==( MetalSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MetalSurfaceCreateInfoEXT const & ) const = default; +# else + bool operator==( MetalSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( pLayer == rhs.pLayer ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pLayer == rhs.pLayer ); } - bool operator!=( MetalSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MetalSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMetalSurfaceCreateInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags = {}; - const CAMetalLayer* pLayer = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMetalSurfaceCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags = {}; + const CAMetalLayer * pLayer = {}; }; - static_assert( sizeof( MetalSurfaceCreateInfoEXT ) == sizeof( VkMetalSurfaceCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( MetalSurfaceCreateInfoEXT ) == sizeof( VkMetalSurfaceCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_USE_PLATFORM_METAL_EXT*/ struct MultisamplePropertiesEXT { - VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + MultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {} ) VULKAN_HPP_NOEXCEPT : maxSampleLocationGridSize( maxSampleLocationGridSize_ ) {} @@ -44830,53 +50712,53 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - MultisamplePropertiesEXT& operator=( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + MultisamplePropertiesEXT & operator=( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkMultisamplePropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkMultisamplePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkMultisamplePropertiesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MultisamplePropertiesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MultisamplePropertiesEXT const & ) const = default; #else - bool operator==( MultisamplePropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( MultisamplePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize ); } - bool operator!=( MultisamplePropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MultisamplePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultisamplePropertiesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultisamplePropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {}; }; - static_assert( sizeof( MultisamplePropertiesEXT ) == sizeof( VkMultisamplePropertiesEXT ), "struct and wrapper have different size!" ); + static_assert( sizeof( MultisamplePropertiesEXT ) == sizeof( VkMultisamplePropertiesEXT ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PastPresentationTimingGOOGLE { - VULKAN_HPP_CONSTEXPR PastPresentationTimingGOOGLE( uint32_t presentID_ = {}, - uint64_t desiredPresentTime_ = {}, - uint64_t actualPresentTime_ = {}, + VULKAN_HPP_CONSTEXPR PastPresentationTimingGOOGLE( uint32_t presentID_ = {}, + uint64_t desiredPresentTime_ = {}, + uint64_t actualPresentTime_ = {}, uint64_t earliestPresentTime_ = {}, - uint64_t presentMargin_ = {} ) VULKAN_HPP_NOEXCEPT + uint64_t presentMargin_ = {} ) VULKAN_HPP_NOEXCEPT : presentID( presentID_ ) , desiredPresentTime( desiredPresentTime_ ) , actualPresentTime( actualPresentTime_ ) @@ -44889,106 +50771,113 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PastPresentationTimingGOOGLE& operator=( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + PastPresentationTimingGOOGLE & operator=( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPastPresentationTimingGOOGLE const&() const VULKAN_HPP_NOEXCEPT + operator VkPastPresentationTimingGOOGLE const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPastPresentationTimingGOOGLE &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PastPresentationTimingGOOGLE const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PastPresentationTimingGOOGLE const & ) const = default; #else - bool operator==( PastPresentationTimingGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PastPresentationTimingGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( presentID == rhs.presentID ) - && ( desiredPresentTime == rhs.desiredPresentTime ) - && ( actualPresentTime == rhs.actualPresentTime ) - && ( earliestPresentTime == rhs.earliestPresentTime ) - && ( presentMargin == rhs.presentMargin ); + return ( presentID == rhs.presentID ) && ( desiredPresentTime == rhs.desiredPresentTime ) && + ( actualPresentTime == rhs.actualPresentTime ) && ( earliestPresentTime == rhs.earliestPresentTime ) && + ( presentMargin == rhs.presentMargin ); } - bool operator!=( PastPresentationTimingGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PastPresentationTimingGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint32_t presentID = {}; - uint64_t desiredPresentTime = {}; - uint64_t actualPresentTime = {}; + uint32_t presentID = {}; + uint64_t desiredPresentTime = {}; + uint64_t actualPresentTime = {}; uint64_t earliestPresentTime = {}; - uint64_t presentMargin = {}; + uint64_t presentMargin = {}; }; - static_assert( sizeof( PastPresentationTimingGOOGLE ) == sizeof( VkPastPresentationTimingGOOGLE ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PastPresentationTimingGOOGLE ) == sizeof( VkPastPresentationTimingGOOGLE ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PerformanceConfigurationAcquireInfoINTEL { - VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated ) VULKAN_HPP_NOEXCEPT - : type( type_ ) + VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL( + VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ = + VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated ) + VULKAN_HPP_NOEXCEPT : type( type_ ) {} - PerformanceConfigurationAcquireInfoINTEL & operator=( PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + PerformanceConfigurationAcquireInfoINTEL & + operator=( PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PerformanceConfigurationAcquireInfoINTEL ) - offsetof( PerformanceConfigurationAcquireInfoINTEL, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PerformanceConfigurationAcquireInfoINTEL ) - + offsetof( PerformanceConfigurationAcquireInfoINTEL, pNext ) ); return *this; } - PerformanceConfigurationAcquireInfoINTEL( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + PerformanceConfigurationAcquireInfoINTEL( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PerformanceConfigurationAcquireInfoINTEL& operator=( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + PerformanceConfigurationAcquireInfoINTEL & + operator=( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PerformanceConfigurationAcquireInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PerformanceConfigurationAcquireInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PerformanceConfigurationAcquireInfoINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT + PerformanceConfigurationAcquireInfoINTEL & + setType( VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT { type = type_; return *this; } - operator VkPerformanceConfigurationAcquireInfoINTEL const&() const VULKAN_HPP_NOEXCEPT + operator VkPerformanceConfigurationAcquireInfoINTEL const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPerformanceConfigurationAcquireInfoINTEL &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PerformanceConfigurationAcquireInfoINTEL const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceConfigurationAcquireInfoINTEL const & ) const = default; #else - bool operator==( PerformanceConfigurationAcquireInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PerformanceConfigurationAcquireInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( type == rhs.type ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ); } - bool operator!=( PerformanceConfigurationAcquireInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PerformanceConfigurationAcquireInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -44996,18 +50885,23 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceConfigurationAcquireInfoINTEL; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type = + VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated; }; - static_assert( sizeof( PerformanceConfigurationAcquireInfoINTEL ) == sizeof( VkPerformanceConfigurationAcquireInfoINTEL ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PerformanceConfigurationAcquireInfoINTEL ) == + sizeof( VkPerformanceConfigurationAcquireInfoINTEL ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PerformanceCounterDescriptionKHR { - VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR( VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags_ = {}, - std::array const& name_ = {}, - std::array const& category_ = {}, - std::array const& description_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR( + VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags_ = {}, + std::array const & name_ = {}, + std::array const & category_ = {}, + std::array const & description_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , name( name_ ) , category( category_ ) @@ -45016,7 +50910,9 @@ namespace VULKAN_HPP_NAMESPACE PerformanceCounterDescriptionKHR & operator=( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PerformanceCounterDescriptionKHR ) - offsetof( PerformanceCounterDescriptionKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PerformanceCounterDescriptionKHR ) - offsetof( PerformanceCounterDescriptionKHR, pNext ) ); return *this; } @@ -45025,36 +50921,32 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PerformanceCounterDescriptionKHR& operator=( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PerformanceCounterDescriptionKHR & operator=( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPerformanceCounterDescriptionKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkPerformanceCounterDescriptionKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPerformanceCounterDescriptionKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PerformanceCounterDescriptionKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceCounterDescriptionKHR const & ) const = default; #else - bool operator==( PerformanceCounterDescriptionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( name == rhs.name ) - && ( category == rhs.category ) - && ( description == rhs.description ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( name == rhs.name ) && + ( category == rhs.category ) && ( description == rhs.description ); } - bool operator!=( PerformanceCounterDescriptionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -45062,21 +50954,26 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterDescriptionKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D category = {}; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D category = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; }; - static_assert( sizeof( PerformanceCounterDescriptionKHR ) == sizeof( VkPerformanceCounterDescriptionKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PerformanceCounterDescriptionKHR ) == sizeof( VkPerformanceCounterDescriptionKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PerformanceCounterKHR { - VULKAN_HPP_CONSTEXPR_14 PerformanceCounterKHR( VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit_ = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric, - VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope_ = VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer, - VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage_ = VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32, - std::array const& uuid_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterKHR( + VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit_ = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric, + VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope_ = + VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer, + VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage_ = + VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32, + std::array const & uuid_ = {} ) VULKAN_HPP_NOEXCEPT : unit( unit_ ) , scope( scope_ ) , storage( storage_ ) @@ -45094,82 +50991,69 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PerformanceCounterKHR& operator=( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PerformanceCounterKHR & operator=( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPerformanceCounterKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkPerformanceCounterKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPerformanceCounterKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PerformanceCounterKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceCounterKHR const & ) const = default; #else - bool operator==( PerformanceCounterKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PerformanceCounterKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( unit == rhs.unit ) - && ( scope == rhs.scope ) - && ( storage == rhs.storage ) - && ( uuid == rhs.uuid ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( unit == rhs.unit ) && ( scope == rhs.scope ) && + ( storage == rhs.storage ) && ( uuid == rhs.uuid ); } - bool operator!=( PerformanceCounterKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PerformanceCounterKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric; - VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope = VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer; - VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage = VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric; + VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope = + VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer; + VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage = + VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32; VULKAN_HPP_NAMESPACE::ArrayWrapper1D uuid = {}; }; - static_assert( sizeof( PerformanceCounterKHR ) == sizeof( VkPerformanceCounterKHR ), "struct and wrapper have different size!" ); + static_assert( sizeof( PerformanceCounterKHR ) == sizeof( VkPerformanceCounterKHR ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); union PerformanceCounterResultKHR { - PerformanceCounterResultKHR( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR const& rhs ) VULKAN_HPP_NOEXCEPT + PerformanceCounterResultKHR( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR ) ); + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR ) ); } - PerformanceCounterResultKHR( int32_t int32_ = {} ) - : int32( int32_ ) - {} + PerformanceCounterResultKHR( int32_t int32_ = {} ) : int32( int32_ ) {} - PerformanceCounterResultKHR( int64_t int64_ ) - : int64( int64_ ) - {} + PerformanceCounterResultKHR( int64_t int64_ ) : int64( int64_ ) {} - PerformanceCounterResultKHR( uint32_t uint32_ ) - : uint32( uint32_ ) - {} + PerformanceCounterResultKHR( uint32_t uint32_ ) : uint32( uint32_ ) {} - PerformanceCounterResultKHR( uint64_t uint64_ ) - : uint64( uint64_ ) - {} + PerformanceCounterResultKHR( uint64_t uint64_ ) : uint64( uint64_ ) {} - PerformanceCounterResultKHR( float float32_ ) - : float32( float32_ ) - {} + PerformanceCounterResultKHR( float float32_ ) : float32( float32_ ) {} - PerformanceCounterResultKHR( double float64_ ) - : float64( float64_ ) - {} + PerformanceCounterResultKHR( double float64_ ) : float64( float64_ ) {} PerformanceCounterResultKHR & setInt32( int32_t int32_ ) VULKAN_HPP_NOEXCEPT { @@ -45207,39 +51091,39 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR & operator=( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR & + operator=( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR ) ); + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR ) ); return *this; } - operator VkPerformanceCounterResultKHR const&() const + operator VkPerformanceCounterResultKHR const &() const { - return *reinterpret_cast(this); + return *reinterpret_cast( this ); } operator VkPerformanceCounterResultKHR &() { - return *reinterpret_cast(this); + return *reinterpret_cast( this ); } - int32_t int32; - int64_t int64; + int32_t int32; + int64_t int64; uint32_t uint32; uint64_t uint64; - float float32; - double float64; + float float32; + double float64; }; struct PerformanceMarkerInfoINTEL { - VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL( uint64_t marker_ = {} ) VULKAN_HPP_NOEXCEPT - : marker( marker_ ) - {} + VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL( uint64_t marker_ = {} ) VULKAN_HPP_NOEXCEPT : marker( marker_ ) {} PerformanceMarkerInfoINTEL & operator=( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PerformanceMarkerInfoINTEL ) - offsetof( PerformanceMarkerInfoINTEL, pNext ) ); + memcpy( + &pNext, &rhs.pNext, sizeof( PerformanceMarkerInfoINTEL ) - offsetof( PerformanceMarkerInfoINTEL, pNext ) ); return *this; } @@ -45248,13 +51132,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PerformanceMarkerInfoINTEL& operator=( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + PerformanceMarkerInfoINTEL & operator=( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PerformanceMarkerInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PerformanceMarkerInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -45266,45 +51150,47 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkPerformanceMarkerInfoINTEL const&() const VULKAN_HPP_NOEXCEPT + operator VkPerformanceMarkerInfoINTEL const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPerformanceMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PerformanceMarkerInfoINTEL const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceMarkerInfoINTEL const & ) const = default; #else - bool operator==( PerformanceMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PerformanceMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( marker == rhs.marker ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( marker == rhs.marker ); } - bool operator!=( PerformanceMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PerformanceMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceMarkerInfoINTEL; - const void* pNext = {}; - uint64_t marker = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceMarkerInfoINTEL; + const void * pNext = {}; + uint64_t marker = {}; }; - static_assert( sizeof( PerformanceMarkerInfoINTEL ) == sizeof( VkPerformanceMarkerInfoINTEL ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PerformanceMarkerInfoINTEL ) == sizeof( VkPerformanceMarkerInfoINTEL ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PerformanceOverrideInfoINTEL { - VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL( VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware, - VULKAN_HPP_NAMESPACE::Bool32 enable_ = {}, - uint64_t parameter_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + PerformanceOverrideInfoINTEL( VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ = + VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware, + VULKAN_HPP_NAMESPACE::Bool32 enable_ = {}, + uint64_t parameter_ = {} ) VULKAN_HPP_NOEXCEPT : type( type_ ) , enable( enable_ ) , parameter( parameter_ ) @@ -45312,7 +51198,8 @@ namespace VULKAN_HPP_NAMESPACE PerformanceOverrideInfoINTEL & operator=( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PerformanceOverrideInfoINTEL ) - offsetof( PerformanceOverrideInfoINTEL, pNext ) ); + memcpy( + &pNext, &rhs.pNext, sizeof( PerformanceOverrideInfoINTEL ) - offsetof( PerformanceOverrideInfoINTEL, pNext ) ); return *this; } @@ -45321,19 +51208,20 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PerformanceOverrideInfoINTEL& operator=( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + PerformanceOverrideInfoINTEL & operator=( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PerformanceOverrideInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PerformanceOverrideInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PerformanceOverrideInfoINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT + PerformanceOverrideInfoINTEL & + setType( VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT { type = type_; return *this; @@ -45351,43 +51239,43 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkPerformanceOverrideInfoINTEL const&() const VULKAN_HPP_NOEXCEPT + operator VkPerformanceOverrideInfoINTEL const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPerformanceOverrideInfoINTEL &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PerformanceOverrideInfoINTEL const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceOverrideInfoINTEL const & ) const = default; #else - bool operator==( PerformanceOverrideInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PerformanceOverrideInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( type == rhs.type ) - && ( enable == rhs.enable ) - && ( parameter == rhs.parameter ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( enable == rhs.enable ) && + ( parameter == rhs.parameter ); } - bool operator!=( PerformanceOverrideInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PerformanceOverrideInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceOverrideInfoINTEL; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware; - VULKAN_HPP_NAMESPACE::Bool32 enable = {}; - uint64_t parameter = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceOverrideInfoINTEL; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type = + VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware; + VULKAN_HPP_NAMESPACE::Bool32 enable = {}; + uint64_t parameter = {}; }; - static_assert( sizeof( PerformanceOverrideInfoINTEL ) == sizeof( VkPerformanceOverrideInfoINTEL ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PerformanceOverrideInfoINTEL ) == sizeof( VkPerformanceOverrideInfoINTEL ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PerformanceQuerySubmitInfoKHR { @@ -45397,7 +51285,9 @@ namespace VULKAN_HPP_NAMESPACE PerformanceQuerySubmitInfoKHR & operator=( PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PerformanceQuerySubmitInfoKHR ) - offsetof( PerformanceQuerySubmitInfoKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PerformanceQuerySubmitInfoKHR ) - offsetof( PerformanceQuerySubmitInfoKHR, pNext ) ); return *this; } @@ -45406,13 +51296,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PerformanceQuerySubmitInfoKHR& operator=( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PerformanceQuerySubmitInfoKHR & operator=( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PerformanceQuerySubmitInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PerformanceQuerySubmitInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -45424,39 +51314,39 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkPerformanceQuerySubmitInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkPerformanceQuerySubmitInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPerformanceQuerySubmitInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PerformanceQuerySubmitInfoKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceQuerySubmitInfoKHR const & ) const = default; #else - bool operator==( PerformanceQuerySubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PerformanceQuerySubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( counterPassIndex == rhs.counterPassIndex ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( counterPassIndex == rhs.counterPassIndex ); } - bool operator!=( PerformanceQuerySubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PerformanceQuerySubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceQuerySubmitInfoKHR; - const void* pNext = {}; - uint32_t counterPassIndex = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceQuerySubmitInfoKHR; + const void * pNext = {}; + uint32_t counterPassIndex = {}; }; - static_assert( sizeof( PerformanceQuerySubmitInfoKHR ) == sizeof( VkPerformanceQuerySubmitInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PerformanceQuerySubmitInfoKHR ) == sizeof( VkPerformanceQuerySubmitInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PerformanceStreamMarkerInfoINTEL { @@ -45466,7 +51356,9 @@ namespace VULKAN_HPP_NAMESPACE PerformanceStreamMarkerInfoINTEL & operator=( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PerformanceStreamMarkerInfoINTEL ) - offsetof( PerformanceStreamMarkerInfoINTEL, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PerformanceStreamMarkerInfoINTEL ) - offsetof( PerformanceStreamMarkerInfoINTEL, pNext ) ); return *this; } @@ -45475,13 +51367,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PerformanceStreamMarkerInfoINTEL& operator=( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + PerformanceStreamMarkerInfoINTEL & operator=( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PerformanceStreamMarkerInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PerformanceStreamMarkerInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -45493,62 +51385,54 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkPerformanceStreamMarkerInfoINTEL const&() const VULKAN_HPP_NOEXCEPT + operator VkPerformanceStreamMarkerInfoINTEL const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPerformanceStreamMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PerformanceStreamMarkerInfoINTEL const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceStreamMarkerInfoINTEL const & ) const = default; #else - bool operator==( PerformanceStreamMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PerformanceStreamMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( marker == rhs.marker ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( marker == rhs.marker ); } - bool operator!=( PerformanceStreamMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PerformanceStreamMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceStreamMarkerInfoINTEL; - const void* pNext = {}; - uint32_t marker = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceStreamMarkerInfoINTEL; + const void * pNext = {}; + uint32_t marker = {}; }; - static_assert( sizeof( PerformanceStreamMarkerInfoINTEL ) == sizeof( VkPerformanceStreamMarkerInfoINTEL ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PerformanceStreamMarkerInfoINTEL ) == sizeof( VkPerformanceStreamMarkerInfoINTEL ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); union PerformanceValueDataINTEL { - PerformanceValueDataINTEL( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const& rhs ) VULKAN_HPP_NOEXCEPT + PerformanceValueDataINTEL( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL ) ); + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL ) ); } - PerformanceValueDataINTEL( uint32_t value32_ = {} ) - : value32( value32_ ) - {} + PerformanceValueDataINTEL( uint32_t value32_ = {} ) : value32( value32_ ) {} - PerformanceValueDataINTEL( uint64_t value64_ ) - : value64( value64_ ) - {} + PerformanceValueDataINTEL( uint64_t value64_ ) : value64( value64_ ) {} - PerformanceValueDataINTEL( float valueFloat_ ) - : valueFloat( valueFloat_ ) - {} + PerformanceValueDataINTEL( float valueFloat_ ) : valueFloat( valueFloat_ ) {} - PerformanceValueDataINTEL( const char* valueString_ ) - : valueString( valueString_ ) - {} + PerformanceValueDataINTEL( const char * valueString_ ) : valueString( valueString_ ) {} PerformanceValueDataINTEL & setValue32( uint32_t value32_ ) VULKAN_HPP_NOEXCEPT { @@ -45574,47 +51458,49 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PerformanceValueDataINTEL & setValueString( const char* valueString_ ) VULKAN_HPP_NOEXCEPT + PerformanceValueDataINTEL & setValueString( const char * valueString_ ) VULKAN_HPP_NOEXCEPT { valueString = valueString_; return *this; } - VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL & operator=( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL & + operator=( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL ) ); + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL ) ); return *this; } - operator VkPerformanceValueDataINTEL const&() const + operator VkPerformanceValueDataINTEL const &() const { - return *reinterpret_cast(this); + return *reinterpret_cast( this ); } operator VkPerformanceValueDataINTEL &() { - return *reinterpret_cast(this); + return *reinterpret_cast( this ); } #ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS - uint32_t value32; - uint64_t value64; - float valueFloat; + uint32_t value32; + uint64_t value64; + float valueFloat; VULKAN_HPP_NAMESPACE::Bool32 valueBool; - const char* valueString; + const char * valueString; #else - uint32_t value32; - uint64_t value64; - float valueFloat; - VkBool32 valueBool; - const char* valueString; -#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + uint32_t value32; + uint64_t value64; + float valueFloat; + VkBool32 valueBool; + const char * valueString; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ }; struct PerformanceValueINTEL { - PerformanceValueINTEL( VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32, - VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data_ = {} ) VULKAN_HPP_NOEXCEPT + PerformanceValueINTEL( + VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32, + VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data_ = {} ) VULKAN_HPP_NOEXCEPT : type( type_ ) , data( data_ ) {} @@ -45624,9 +51510,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PerformanceValueINTEL& operator=( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + PerformanceValueINTEL & operator=( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -45642,29 +51528,31 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkPerformanceValueINTEL const&() const VULKAN_HPP_NOEXCEPT + operator VkPerformanceValueINTEL const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPerformanceValueINTEL &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } public: VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32; VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data = {}; }; - static_assert( sizeof( PerformanceValueINTEL ) == sizeof( VkPerformanceValueINTEL ), "struct and wrapper have different size!" ); + static_assert( sizeof( PerformanceValueINTEL ) == sizeof( VkPerformanceValueINTEL ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PhysicalDevice16BitStorageFeatures { - VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + PhysicalDevice16BitStorageFeatures( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {} ) VULKAN_HPP_NOEXCEPT : storageBuffer16BitAccess( storageBuffer16BitAccess_ ) , uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ ) , storagePushConstant16( storagePushConstant16_ ) @@ -45673,7 +51561,9 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDevice16BitStorageFeatures & operator=( PhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDevice16BitStorageFeatures ) - offsetof( PhysicalDevice16BitStorageFeatures, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDevice16BitStorageFeatures ) - offsetof( PhysicalDevice16BitStorageFeatures, pNext ) ); return *this; } @@ -45682,66 +51572,70 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDevice16BitStorageFeatures& operator=( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDevice16BitStorageFeatures & + operator=( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDevice16BitStorageFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDevice16BitStorageFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDevice16BitStorageFeatures & setStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT + PhysicalDevice16BitStorageFeatures & + setStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT { storageBuffer16BitAccess = storageBuffer16BitAccess_; return *this; } - PhysicalDevice16BitStorageFeatures & setUniformAndStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT + PhysicalDevice16BitStorageFeatures & setUniformAndStorageBuffer16BitAccess( + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT { uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_; return *this; } - PhysicalDevice16BitStorageFeatures & setStoragePushConstant16( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT + PhysicalDevice16BitStorageFeatures & + setStoragePushConstant16( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT { storagePushConstant16 = storagePushConstant16_; return *this; } - PhysicalDevice16BitStorageFeatures & setStorageInputOutput16( VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT + PhysicalDevice16BitStorageFeatures & + setStorageInputOutput16( VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT { storageInputOutput16 = storageInputOutput16_; return *this; } - operator VkPhysicalDevice16BitStorageFeatures const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDevice16BitStorageFeatures const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDevice16BitStorageFeatures &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDevice16BitStorageFeatures const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevice16BitStorageFeatures const & ) const = default; #else - bool operator==( PhysicalDevice16BitStorageFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDevice16BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess ) - && ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess ) - && ( storagePushConstant16 == rhs.storagePushConstant16 ) - && ( storageInputOutput16 == rhs.storageInputOutput16 ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess ) && + ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess ) && + ( storagePushConstant16 == rhs.storagePushConstant16 ) && + ( storageInputOutput16 == rhs.storageInputOutput16 ); } - bool operator!=( PhysicalDevice16BitStorageFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDevice16BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -45749,20 +51643,23 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice16BitStorageFeatures; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess = {}; - VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess = {}; - VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16 = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16 = {}; }; - static_assert( sizeof( PhysicalDevice16BitStorageFeatures ) == sizeof( VkPhysicalDevice16BitStorageFeatures ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDevice16BitStorageFeatures ) == sizeof( VkPhysicalDevice16BitStorageFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDevice8BitStorageFeatures { - VULKAN_HPP_CONSTEXPR PhysicalDevice8BitStorageFeatures( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + PhysicalDevice8BitStorageFeatures( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {} ) VULKAN_HPP_NOEXCEPT : storageBuffer8BitAccess( storageBuffer8BitAccess_ ) , uniformAndStorageBuffer8BitAccess( uniformAndStorageBuffer8BitAccess_ ) , storagePushConstant8( storagePushConstant8_ ) @@ -45770,7 +51667,9 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDevice8BitStorageFeatures & operator=( PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDevice8BitStorageFeatures ) - offsetof( PhysicalDevice8BitStorageFeatures, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDevice8BitStorageFeatures ) - offsetof( PhysicalDevice8BitStorageFeatures, pNext ) ); return *this; } @@ -45779,59 +51678,61 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDevice8BitStorageFeatures& operator=( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDevice8BitStorageFeatures & operator=( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDevice8BitStorageFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDevice8BitStorageFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDevice8BitStorageFeatures & setStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT + PhysicalDevice8BitStorageFeatures & + setStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT { storageBuffer8BitAccess = storageBuffer8BitAccess_; return *this; } - PhysicalDevice8BitStorageFeatures & setUniformAndStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT + PhysicalDevice8BitStorageFeatures & setUniformAndStorageBuffer8BitAccess( + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT { uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_; return *this; } - PhysicalDevice8BitStorageFeatures & setStoragePushConstant8( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT + PhysicalDevice8BitStorageFeatures & + setStoragePushConstant8( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT { storagePushConstant8 = storagePushConstant8_; return *this; } - operator VkPhysicalDevice8BitStorageFeatures const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDevice8BitStorageFeatures const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDevice8BitStorageFeatures &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDevice8BitStorageFeatures const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevice8BitStorageFeatures const & ) const = default; #else - bool operator==( PhysicalDevice8BitStorageFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDevice8BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess ) - && ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess ) - && ( storagePushConstant8 == rhs.storagePushConstant8 ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess ) && + ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess ) && + ( storagePushConstant8 == rhs.storagePushConstant8 ); } - bool operator!=( PhysicalDevice8BitStorageFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDevice8BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -45839,23 +51740,29 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice8BitStorageFeatures; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess = {}; - VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess = {}; - VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8 = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8 = {}; }; - static_assert( sizeof( PhysicalDevice8BitStorageFeatures ) == sizeof( VkPhysicalDevice8BitStorageFeatures ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDevice8BitStorageFeatures ) == sizeof( VkPhysicalDevice8BitStorageFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceASTCDecodeFeaturesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ = {} ) VULKAN_HPP_NOEXCEPT : decodeModeSharedExponent( decodeModeSharedExponent_ ) {} - PhysicalDeviceASTCDecodeFeaturesEXT & operator=( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceASTCDecodeFeaturesEXT & + operator=( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceASTCDecodeFeaturesEXT ) - offsetof( PhysicalDeviceASTCDecodeFeaturesEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceASTCDecodeFeaturesEXT ) - offsetof( PhysicalDeviceASTCDecodeFeaturesEXT, pNext ) ); return *this; } @@ -45864,45 +51771,46 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceASTCDecodeFeaturesEXT& operator=( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceASTCDecodeFeaturesEXT & + operator=( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceASTCDecodeFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceASTCDecodeFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceASTCDecodeFeaturesEXT & setDecodeModeSharedExponent( VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceASTCDecodeFeaturesEXT & + setDecodeModeSharedExponent( VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ ) VULKAN_HPP_NOEXCEPT { decodeModeSharedExponent = decodeModeSharedExponent_; return *this; } - operator VkPhysicalDeviceASTCDecodeFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceASTCDecodeFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceASTCDecodeFeaturesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceASTCDecodeFeaturesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceASTCDecodeFeaturesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceASTCDecodeFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( decodeModeSharedExponent == rhs.decodeModeSharedExponent ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( decodeModeSharedExponent == rhs.decodeModeSharedExponent ); } - bool operator!=( PhysicalDeviceASTCDecodeFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -45910,68 +51818,77 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent = {}; }; - static_assert( sizeof( PhysicalDeviceASTCDecodeFeaturesEXT ) == sizeof( VkPhysicalDeviceASTCDecodeFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceASTCDecodeFeaturesEXT ) == sizeof( VkPhysicalDeviceASTCDecodeFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ = {} ) VULKAN_HPP_NOEXCEPT : advancedBlendCoherentOperations( advancedBlendCoherentOperations_ ) {} - PhysicalDeviceBlendOperationAdvancedFeaturesEXT & operator=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceBlendOperationAdvancedFeaturesEXT & + operator=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) - offsetof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) - + offsetof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT, pNext ) ); return *this; } - PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceBlendOperationAdvancedFeaturesEXT& operator=( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceBlendOperationAdvancedFeaturesEXT & + operator=( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceBlendOperationAdvancedFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceBlendOperationAdvancedFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceBlendOperationAdvancedFeaturesEXT & setAdvancedBlendCoherentOperations( VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceBlendOperationAdvancedFeaturesEXT & setAdvancedBlendCoherentOperations( + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ ) VULKAN_HPP_NOEXCEPT { advancedBlendCoherentOperations = advancedBlendCoherentOperations_; return *this; } - operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( advancedBlendCoherentOperations == rhs.advancedBlendCoherentOperations ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( advancedBlendCoherentOperations == rhs.advancedBlendCoherentOperations ); } - bool operator!=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -45979,20 +51896,24 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations = {}; }; - static_assert( sizeof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) == sizeof( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) == + sizeof( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedPropertiesEXT( uint32_t advancedBlendMaxColorAttachments_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedPropertiesEXT( + uint32_t advancedBlendMaxColorAttachments_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations_ = {} ) VULKAN_HPP_NOEXCEPT : advancedBlendMaxColorAttachments( advancedBlendMaxColorAttachments_ ) , advancedBlendIndependentBlend( advancedBlendIndependentBlend_ ) , advancedBlendNonPremultipliedSrcColor( advancedBlendNonPremultipliedSrcColor_ ) @@ -46001,49 +51922,55 @@ namespace VULKAN_HPP_NAMESPACE , advancedBlendAllOperations( advancedBlendAllOperations_ ) {} - PhysicalDeviceBlendOperationAdvancedPropertiesEXT & operator=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceBlendOperationAdvancedPropertiesEXT & + operator=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceBlendOperationAdvancedPropertiesEXT ) - offsetof( PhysicalDeviceBlendOperationAdvancedPropertiesEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceBlendOperationAdvancedPropertiesEXT ) - + offsetof( PhysicalDeviceBlendOperationAdvancedPropertiesEXT, pNext ) ); return *this; } - PhysicalDeviceBlendOperationAdvancedPropertiesEXT( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceBlendOperationAdvancedPropertiesEXT( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceBlendOperationAdvancedPropertiesEXT& operator=( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceBlendOperationAdvancedPropertiesEXT & + operator=( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = + *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( advancedBlendMaxColorAttachments == rhs.advancedBlendMaxColorAttachments ) - && ( advancedBlendIndependentBlend == rhs.advancedBlendIndependentBlend ) - && ( advancedBlendNonPremultipliedSrcColor == rhs.advancedBlendNonPremultipliedSrcColor ) - && ( advancedBlendNonPremultipliedDstColor == rhs.advancedBlendNonPremultipliedDstColor ) - && ( advancedBlendCorrelatedOverlap == rhs.advancedBlendCorrelatedOverlap ) - && ( advancedBlendAllOperations == rhs.advancedBlendAllOperations ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( advancedBlendMaxColorAttachments == rhs.advancedBlendMaxColorAttachments ) && + ( advancedBlendIndependentBlend == rhs.advancedBlendIndependentBlend ) && + ( advancedBlendNonPremultipliedSrcColor == rhs.advancedBlendNonPremultipliedSrcColor ) && + ( advancedBlendNonPremultipliedDstColor == rhs.advancedBlendNonPremultipliedDstColor ) && + ( advancedBlendCorrelatedOverlap == rhs.advancedBlendCorrelatedOverlap ) && + ( advancedBlendAllOperations == rhs.advancedBlendAllOperations ); } - bool operator!=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -46051,91 +51978,102 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT; - void* pNext = {}; - uint32_t advancedBlendMaxColorAttachments = {}; - VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend = {}; - VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor = {}; - VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor = {}; - VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap = {}; - VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations = {}; + void * pNext = {}; + uint32_t advancedBlendMaxColorAttachments = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations = {}; }; - static_assert( sizeof( PhysicalDeviceBlendOperationAdvancedPropertiesEXT ) == sizeof( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceBlendOperationAdvancedPropertiesEXT ) == + sizeof( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceBufferDeviceAddressFeatures { - VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures( + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {} ) VULKAN_HPP_NOEXCEPT : bufferDeviceAddress( bufferDeviceAddress_ ) , bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ ) , bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ ) {} - PhysicalDeviceBufferDeviceAddressFeatures & operator=( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceBufferDeviceAddressFeatures & + operator=( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceBufferDeviceAddressFeatures ) - offsetof( PhysicalDeviceBufferDeviceAddressFeatures, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceBufferDeviceAddressFeatures ) - + offsetof( PhysicalDeviceBufferDeviceAddressFeatures, pNext ) ); return *this; } - PhysicalDeviceBufferDeviceAddressFeatures( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceBufferDeviceAddressFeatures( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceBufferDeviceAddressFeatures& operator=( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceBufferDeviceAddressFeatures & + operator=( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceBufferDeviceAddressFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceBufferDeviceAddressFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceBufferDeviceAddressFeatures & setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceBufferDeviceAddressFeatures & + setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT { bufferDeviceAddress = bufferDeviceAddress_; return *this; } - PhysicalDeviceBufferDeviceAddressFeatures & setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceBufferDeviceAddressFeatures & setBufferDeviceAddressCaptureReplay( + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT { bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_; return *this; } - PhysicalDeviceBufferDeviceAddressFeatures & setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceBufferDeviceAddressFeatures & setBufferDeviceAddressMultiDevice( + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT { bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_; return *this; } - operator VkPhysicalDeviceBufferDeviceAddressFeatures const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceBufferDeviceAddressFeatures const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceBufferDeviceAddressFeatures &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceBufferDeviceAddressFeatures const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceBufferDeviceAddressFeatures const & ) const = default; #else - bool operator==( PhysicalDeviceBufferDeviceAddressFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( bufferDeviceAddress == rhs.bufferDeviceAddress ) - && ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay ) - && ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bufferDeviceAddress == rhs.bufferDeviceAddress ) && + ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay ) && + ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice ); } - bool operator!=( PhysicalDeviceBufferDeviceAddressFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -46143,88 +52081,99 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeatures; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {}; - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {}; - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {}; }; - static_assert( sizeof( PhysicalDeviceBufferDeviceAddressFeatures ) == sizeof( VkPhysicalDeviceBufferDeviceAddressFeatures ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceBufferDeviceAddressFeatures ) == + sizeof( VkPhysicalDeviceBufferDeviceAddressFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceBufferDeviceAddressFeaturesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {} ) VULKAN_HPP_NOEXCEPT : bufferDeviceAddress( bufferDeviceAddress_ ) , bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ ) , bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ ) {} - PhysicalDeviceBufferDeviceAddressFeaturesEXT & operator=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceBufferDeviceAddressFeaturesEXT & + operator=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceBufferDeviceAddressFeaturesEXT ) - offsetof( PhysicalDeviceBufferDeviceAddressFeaturesEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceBufferDeviceAddressFeaturesEXT ) - + offsetof( PhysicalDeviceBufferDeviceAddressFeaturesEXT, pNext ) ); return *this; } - PhysicalDeviceBufferDeviceAddressFeaturesEXT( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceBufferDeviceAddressFeaturesEXT( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceBufferDeviceAddressFeaturesEXT& operator=( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceBufferDeviceAddressFeaturesEXT & + operator=( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceBufferDeviceAddressFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceBufferDeviceAddressFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceBufferDeviceAddressFeaturesEXT & + setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT { bufferDeviceAddress = bufferDeviceAddress_; return *this; } - PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddressCaptureReplay( + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT { bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_; return *this; } - PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddressMultiDevice( + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT { bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_; return *this; } - operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceBufferDeviceAddressFeaturesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceBufferDeviceAddressFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( bufferDeviceAddress == rhs.bufferDeviceAddress ) - && ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay ) - && ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bufferDeviceAddress == rhs.bufferDeviceAddress ) && + ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay ) && + ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice ); } - bool operator!=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -46232,23 +52181,31 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {}; - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {}; - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {}; }; - static_assert( sizeof( PhysicalDeviceBufferDeviceAddressFeaturesEXT ) == sizeof( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceBufferDeviceAddressFeaturesEXT ) == + sizeof( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceCoherentMemoryFeaturesAMD { - VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD( VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD( + VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ = {} ) VULKAN_HPP_NOEXCEPT : deviceCoherentMemory( deviceCoherentMemory_ ) {} - PhysicalDeviceCoherentMemoryFeaturesAMD & operator=( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceCoherentMemoryFeaturesAMD & + operator=( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceCoherentMemoryFeaturesAMD ) - offsetof( PhysicalDeviceCoherentMemoryFeaturesAMD, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceCoherentMemoryFeaturesAMD ) - + offsetof( PhysicalDeviceCoherentMemoryFeaturesAMD, pNext ) ); return *this; } @@ -46257,45 +52214,45 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceCoherentMemoryFeaturesAMD& operator=( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceCoherentMemoryFeaturesAMD & + operator=( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceCoherentMemoryFeaturesAMD & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceCoherentMemoryFeaturesAMD & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceCoherentMemoryFeaturesAMD & setDeviceCoherentMemory( VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceCoherentMemoryFeaturesAMD & + setDeviceCoherentMemory( VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ ) VULKAN_HPP_NOEXCEPT { deviceCoherentMemory = deviceCoherentMemory_; return *this; } - operator VkPhysicalDeviceCoherentMemoryFeaturesAMD const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceCoherentMemoryFeaturesAMD const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceCoherentMemoryFeaturesAMD &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceCoherentMemoryFeaturesAMD const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCoherentMemoryFeaturesAMD const & ) const = default; #else - bool operator==( PhysicalDeviceCoherentMemoryFeaturesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( deviceCoherentMemory == rhs.deviceCoherentMemory ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceCoherentMemory == rhs.deviceCoherentMemory ); } - bool operator!=( PhysicalDeviceCoherentMemoryFeaturesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -46303,77 +52260,88 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory = {}; }; - static_assert( sizeof( PhysicalDeviceCoherentMemoryFeaturesAMD ) == sizeof( VkPhysicalDeviceCoherentMemoryFeaturesAMD ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceCoherentMemoryFeaturesAMD ) == + sizeof( VkPhysicalDeviceCoherentMemoryFeaturesAMD ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceComputeShaderDerivativesFeaturesNV { - VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ = {} ) VULKAN_HPP_NOEXCEPT : computeDerivativeGroupQuads( computeDerivativeGroupQuads_ ) , computeDerivativeGroupLinear( computeDerivativeGroupLinear_ ) {} - PhysicalDeviceComputeShaderDerivativesFeaturesNV & operator=( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceComputeShaderDerivativesFeaturesNV & + operator=( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceComputeShaderDerivativesFeaturesNV ) - offsetof( PhysicalDeviceComputeShaderDerivativesFeaturesNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceComputeShaderDerivativesFeaturesNV ) - + offsetof( PhysicalDeviceComputeShaderDerivativesFeaturesNV, pNext ) ); return *this; } - PhysicalDeviceComputeShaderDerivativesFeaturesNV( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceComputeShaderDerivativesFeaturesNV( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceComputeShaderDerivativesFeaturesNV& operator=( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceComputeShaderDerivativesFeaturesNV & + operator=( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceComputeShaderDerivativesFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceComputeShaderDerivativesFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceComputeShaderDerivativesFeaturesNV & setComputeDerivativeGroupQuads( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceComputeShaderDerivativesFeaturesNV & + setComputeDerivativeGroupQuads( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ ) VULKAN_HPP_NOEXCEPT { computeDerivativeGroupQuads = computeDerivativeGroupQuads_; return *this; } - PhysicalDeviceComputeShaderDerivativesFeaturesNV & setComputeDerivativeGroupLinear( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceComputeShaderDerivativesFeaturesNV & + setComputeDerivativeGroupLinear( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ ) VULKAN_HPP_NOEXCEPT { computeDerivativeGroupLinear = computeDerivativeGroupLinear_; return *this; } - operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceComputeShaderDerivativesFeaturesNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & ) const = default; #else - bool operator==( PhysicalDeviceComputeShaderDerivativesFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( computeDerivativeGroupQuads == rhs.computeDerivativeGroupQuads ) - && ( computeDerivativeGroupLinear == rhs.computeDerivativeGroupLinear ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( computeDerivativeGroupQuads == rhs.computeDerivativeGroupQuads ) && + ( computeDerivativeGroupLinear == rhs.computeDerivativeGroupLinear ); } - bool operator!=( PhysicalDeviceComputeShaderDerivativesFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -46381,78 +52349,88 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads = {}; - VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads = {}; + VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear = {}; }; - static_assert( sizeof( PhysicalDeviceComputeShaderDerivativesFeaturesNV ) == sizeof( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceComputeShaderDerivativesFeaturesNV ) == + sizeof( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceConditionalRenderingFeaturesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ = {} ) VULKAN_HPP_NOEXCEPT : conditionalRendering( conditionalRendering_ ) , inheritedConditionalRendering( inheritedConditionalRendering_ ) {} - PhysicalDeviceConditionalRenderingFeaturesEXT & operator=( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceConditionalRenderingFeaturesEXT & + operator=( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceConditionalRenderingFeaturesEXT ) - offsetof( PhysicalDeviceConditionalRenderingFeaturesEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceConditionalRenderingFeaturesEXT ) - + offsetof( PhysicalDeviceConditionalRenderingFeaturesEXT, pNext ) ); return *this; } - PhysicalDeviceConditionalRenderingFeaturesEXT( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceConditionalRenderingFeaturesEXT( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceConditionalRenderingFeaturesEXT& operator=( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceConditionalRenderingFeaturesEXT & + operator=( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceConditionalRenderingFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceConditionalRenderingFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceConditionalRenderingFeaturesEXT & setConditionalRendering( VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceConditionalRenderingFeaturesEXT & + setConditionalRendering( VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ ) VULKAN_HPP_NOEXCEPT { conditionalRendering = conditionalRendering_; return *this; } - PhysicalDeviceConditionalRenderingFeaturesEXT & setInheritedConditionalRendering( VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceConditionalRenderingFeaturesEXT & setInheritedConditionalRendering( + VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ ) VULKAN_HPP_NOEXCEPT { inheritedConditionalRendering = inheritedConditionalRendering_; return *this; } - operator VkPhysicalDeviceConditionalRenderingFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceConditionalRenderingFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceConditionalRenderingFeaturesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceConditionalRenderingFeaturesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceConditionalRenderingFeaturesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceConditionalRenderingFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( conditionalRendering == rhs.conditionalRendering ) - && ( inheritedConditionalRendering == rhs.inheritedConditionalRendering ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( conditionalRendering == rhs.conditionalRendering ) && + ( inheritedConditionalRendering == rhs.inheritedConditionalRendering ); } - bool operator!=( PhysicalDeviceConditionalRenderingFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -46460,24 +52438,28 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering = {}; - VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering = {}; + VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering = {}; }; - static_assert( sizeof( PhysicalDeviceConditionalRenderingFeaturesEXT ) == sizeof( VkPhysicalDeviceConditionalRenderingFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceConditionalRenderingFeaturesEXT ) == + sizeof( VkPhysicalDeviceConditionalRenderingFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceConservativeRasterizationPropertiesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceConservativeRasterizationPropertiesEXT( float primitiveOverestimationSize_ = {}, - float maxExtraPrimitiveOverestimationSize_ = {}, - float extraPrimitiveOverestimationSizeGranularity_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceConservativeRasterizationPropertiesEXT( + float primitiveOverestimationSize_ = {}, + float maxExtraPrimitiveOverestimationSize_ = {}, + float extraPrimitiveOverestimationSizeGranularity_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage_ = {} ) VULKAN_HPP_NOEXCEPT : primitiveOverestimationSize( primitiveOverestimationSize_ ) , maxExtraPrimitiveOverestimationSize( maxExtraPrimitiveOverestimationSize_ ) , extraPrimitiveOverestimationSizeGranularity( extraPrimitiveOverestimationSizeGranularity_ ) @@ -46489,138 +52471,155 @@ namespace VULKAN_HPP_NAMESPACE , conservativeRasterizationPostDepthCoverage( conservativeRasterizationPostDepthCoverage_ ) {} - PhysicalDeviceConservativeRasterizationPropertiesEXT & operator=( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceConservativeRasterizationPropertiesEXT & + operator=( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceConservativeRasterizationPropertiesEXT ) - offsetof( PhysicalDeviceConservativeRasterizationPropertiesEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceConservativeRasterizationPropertiesEXT ) - + offsetof( PhysicalDeviceConservativeRasterizationPropertiesEXT, pNext ) ); return *this; } - PhysicalDeviceConservativeRasterizationPropertiesEXT( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceConservativeRasterizationPropertiesEXT( + VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceConservativeRasterizationPropertiesEXT& operator=( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceConservativeRasterizationPropertiesEXT & + operator=( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = + *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceConservativeRasterizationPropertiesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceConservativeRasterizationPropertiesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceConservativeRasterizationPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( primitiveOverestimationSize == rhs.primitiveOverestimationSize ) - && ( maxExtraPrimitiveOverestimationSize == rhs.maxExtraPrimitiveOverestimationSize ) - && ( extraPrimitiveOverestimationSizeGranularity == rhs.extraPrimitiveOverestimationSizeGranularity ) - && ( primitiveUnderestimation == rhs.primitiveUnderestimation ) - && ( conservativePointAndLineRasterization == rhs.conservativePointAndLineRasterization ) - && ( degenerateTrianglesRasterized == rhs.degenerateTrianglesRasterized ) - && ( degenerateLinesRasterized == rhs.degenerateLinesRasterized ) - && ( fullyCoveredFragmentShaderInputVariable == rhs.fullyCoveredFragmentShaderInputVariable ) - && ( conservativeRasterizationPostDepthCoverage == rhs.conservativeRasterizationPostDepthCoverage ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( primitiveOverestimationSize == rhs.primitiveOverestimationSize ) && + ( maxExtraPrimitiveOverestimationSize == rhs.maxExtraPrimitiveOverestimationSize ) && + ( extraPrimitiveOverestimationSizeGranularity == rhs.extraPrimitiveOverestimationSizeGranularity ) && + ( primitiveUnderestimation == rhs.primitiveUnderestimation ) && + ( conservativePointAndLineRasterization == rhs.conservativePointAndLineRasterization ) && + ( degenerateTrianglesRasterized == rhs.degenerateTrianglesRasterized ) && + ( degenerateLinesRasterized == rhs.degenerateLinesRasterized ) && + ( fullyCoveredFragmentShaderInputVariable == rhs.fullyCoveredFragmentShaderInputVariable ) && + ( conservativeRasterizationPostDepthCoverage == rhs.conservativeRasterizationPostDepthCoverage ); } - bool operator!=( PhysicalDeviceConservativeRasterizationPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT; - void* pNext = {}; - float primitiveOverestimationSize = {}; - float maxExtraPrimitiveOverestimationSize = {}; - float extraPrimitiveOverestimationSizeGranularity = {}; - VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation = {}; - VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization = {}; - VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized = {}; - VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized = {}; - VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable = {}; - VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = + StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT; + void * pNext = {}; + float primitiveOverestimationSize = {}; + float maxExtraPrimitiveOverestimationSize = {}; + float extraPrimitiveOverestimationSizeGranularity = {}; + VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation = {}; + VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization = {}; + VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized = {}; + VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized = {}; + VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable = {}; + VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage = {}; }; - static_assert( sizeof( PhysicalDeviceConservativeRasterizationPropertiesEXT ) == sizeof( VkPhysicalDeviceConservativeRasterizationPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceConservativeRasterizationPropertiesEXT ) == + sizeof( VkPhysicalDeviceConservativeRasterizationPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceCooperativeMatrixFeaturesNV { - VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ = {} ) VULKAN_HPP_NOEXCEPT : cooperativeMatrix( cooperativeMatrix_ ) , cooperativeMatrixRobustBufferAccess( cooperativeMatrixRobustBufferAccess_ ) {} - PhysicalDeviceCooperativeMatrixFeaturesNV & operator=( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceCooperativeMatrixFeaturesNV & + operator=( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceCooperativeMatrixFeaturesNV ) - offsetof( PhysicalDeviceCooperativeMatrixFeaturesNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceCooperativeMatrixFeaturesNV ) - + offsetof( PhysicalDeviceCooperativeMatrixFeaturesNV, pNext ) ); return *this; } - PhysicalDeviceCooperativeMatrixFeaturesNV( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceCooperativeMatrixFeaturesNV( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceCooperativeMatrixFeaturesNV& operator=( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceCooperativeMatrixFeaturesNV & + operator=( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceCooperativeMatrixFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceCooperativeMatrixFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceCooperativeMatrixFeaturesNV & setCooperativeMatrix( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceCooperativeMatrixFeaturesNV & + setCooperativeMatrix( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ ) VULKAN_HPP_NOEXCEPT { cooperativeMatrix = cooperativeMatrix_; return *this; } - PhysicalDeviceCooperativeMatrixFeaturesNV & setCooperativeMatrixRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceCooperativeMatrixFeaturesNV & setCooperativeMatrixRobustBufferAccess( + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ ) VULKAN_HPP_NOEXCEPT { cooperativeMatrixRobustBufferAccess = cooperativeMatrixRobustBufferAccess_; return *this; } - operator VkPhysicalDeviceCooperativeMatrixFeaturesNV const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceCooperativeMatrixFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceCooperativeMatrixFeaturesNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceCooperativeMatrixFeaturesNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCooperativeMatrixFeaturesNV const & ) const = default; #else - bool operator==( PhysicalDeviceCooperativeMatrixFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( cooperativeMatrix == rhs.cooperativeMatrix ) - && ( cooperativeMatrixRobustBufferAccess == rhs.cooperativeMatrixRobustBufferAccess ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeMatrix == rhs.cooperativeMatrix ) && + ( cooperativeMatrixRobustBufferAccess == rhs.cooperativeMatrixRobustBufferAccess ); } - bool operator!=( PhysicalDeviceCooperativeMatrixFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -46628,57 +52627,66 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix = {}; - VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess = {}; }; - static_assert( sizeof( PhysicalDeviceCooperativeMatrixFeaturesNV ) == sizeof( VkPhysicalDeviceCooperativeMatrixFeaturesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceCooperativeMatrixFeaturesNV ) == + sizeof( VkPhysicalDeviceCooperativeMatrixFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceCooperativeMatrixPropertiesNV { - VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV( VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV( + VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages_ = {} ) VULKAN_HPP_NOEXCEPT : cooperativeMatrixSupportedStages( cooperativeMatrixSupportedStages_ ) {} - PhysicalDeviceCooperativeMatrixPropertiesNV & operator=( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceCooperativeMatrixPropertiesNV & + operator=( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceCooperativeMatrixPropertiesNV ) - offsetof( PhysicalDeviceCooperativeMatrixPropertiesNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceCooperativeMatrixPropertiesNV ) - + offsetof( PhysicalDeviceCooperativeMatrixPropertiesNV, pNext ) ); return *this; } - PhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceCooperativeMatrixPropertiesNV& operator=( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceCooperativeMatrixPropertiesNV & + operator=( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceCooperativeMatrixPropertiesNV const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceCooperativeMatrixPropertiesNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceCooperativeMatrixPropertiesNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCooperativeMatrixPropertiesNV const & ) const = default; #else - bool operator==( PhysicalDeviceCooperativeMatrixPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( cooperativeMatrixSupportedStages == rhs.cooperativeMatrixSupportedStages ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( cooperativeMatrixSupportedStages == rhs.cooperativeMatrixSupportedStages ); } - bool operator!=( PhysicalDeviceCooperativeMatrixPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -46686,68 +52694,77 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages = {}; }; - static_assert( sizeof( PhysicalDeviceCooperativeMatrixPropertiesNV ) == sizeof( VkPhysicalDeviceCooperativeMatrixPropertiesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceCooperativeMatrixPropertiesNV ) == + sizeof( VkPhysicalDeviceCooperativeMatrixPropertiesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceCornerSampledImageFeaturesNV { - VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ = {} ) VULKAN_HPP_NOEXCEPT : cornerSampledImage( cornerSampledImage_ ) {} - PhysicalDeviceCornerSampledImageFeaturesNV & operator=( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceCornerSampledImageFeaturesNV & + operator=( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceCornerSampledImageFeaturesNV ) - offsetof( PhysicalDeviceCornerSampledImageFeaturesNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceCornerSampledImageFeaturesNV ) - + offsetof( PhysicalDeviceCornerSampledImageFeaturesNV, pNext ) ); return *this; } - PhysicalDeviceCornerSampledImageFeaturesNV( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceCornerSampledImageFeaturesNV( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceCornerSampledImageFeaturesNV& operator=( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceCornerSampledImageFeaturesNV & + operator=( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceCornerSampledImageFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceCornerSampledImageFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceCornerSampledImageFeaturesNV & setCornerSampledImage( VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceCornerSampledImageFeaturesNV & + setCornerSampledImage( VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ ) VULKAN_HPP_NOEXCEPT { cornerSampledImage = cornerSampledImage_; return *this; } - operator VkPhysicalDeviceCornerSampledImageFeaturesNV const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceCornerSampledImageFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceCornerSampledImageFeaturesNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceCornerSampledImageFeaturesNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCornerSampledImageFeaturesNV const & ) const = default; #else - bool operator==( PhysicalDeviceCornerSampledImageFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( cornerSampledImage == rhs.cornerSampledImage ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cornerSampledImage == rhs.cornerSampledImage ); } - bool operator!=( PhysicalDeviceCornerSampledImageFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -46755,68 +52772,77 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage = {}; }; - static_assert( sizeof( PhysicalDeviceCornerSampledImageFeaturesNV ) == sizeof( VkPhysicalDeviceCornerSampledImageFeaturesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceCornerSampledImageFeaturesNV ) == + sizeof( VkPhysicalDeviceCornerSampledImageFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceCoverageReductionModeFeaturesNV { - VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ = {} ) VULKAN_HPP_NOEXCEPT : coverageReductionMode( coverageReductionMode_ ) {} - PhysicalDeviceCoverageReductionModeFeaturesNV & operator=( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceCoverageReductionModeFeaturesNV & + operator=( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceCoverageReductionModeFeaturesNV ) - offsetof( PhysicalDeviceCoverageReductionModeFeaturesNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceCoverageReductionModeFeaturesNV ) - + offsetof( PhysicalDeviceCoverageReductionModeFeaturesNV, pNext ) ); return *this; } - PhysicalDeviceCoverageReductionModeFeaturesNV( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceCoverageReductionModeFeaturesNV( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceCoverageReductionModeFeaturesNV& operator=( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceCoverageReductionModeFeaturesNV & + operator=( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceCoverageReductionModeFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceCoverageReductionModeFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceCoverageReductionModeFeaturesNV & setCoverageReductionMode( VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceCoverageReductionModeFeaturesNV & + setCoverageReductionMode( VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT { coverageReductionMode = coverageReductionMode_; return *this; } - operator VkPhysicalDeviceCoverageReductionModeFeaturesNV const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceCoverageReductionModeFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceCoverageReductionModeFeaturesNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceCoverageReductionModeFeaturesNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCoverageReductionModeFeaturesNV const & ) const = default; #else - bool operator==( PhysicalDeviceCoverageReductionModeFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( coverageReductionMode == rhs.coverageReductionMode ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( coverageReductionMode == rhs.coverageReductionMode ); } - bool operator!=( PhysicalDeviceCoverageReductionModeFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -46824,137 +52850,158 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode = {}; }; - static_assert( sizeof( PhysicalDeviceCoverageReductionModeFeaturesNV ) == sizeof( VkPhysicalDeviceCoverageReductionModeFeaturesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceCoverageReductionModeFeaturesNV ) == + sizeof( VkPhysicalDeviceCoverageReductionModeFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV { - VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ = {} ) VULKAN_HPP_NOEXCEPT : dedicatedAllocationImageAliasing( dedicatedAllocationImageAliasing_ ) {} - PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & operator=( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & + operator=( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ) - offsetof( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ) - + offsetof( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV, pNext ) ); return *this; } - PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( + VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV& operator=( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & + operator=( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( + &rhs ); return *this; } - PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & setDedicatedAllocationImageAliasing( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & setDedicatedAllocationImageAliasing( + VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ ) VULKAN_HPP_NOEXCEPT { dedicatedAllocationImageAliasing = dedicatedAllocationImageAliasing_; return *this; } - operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & ) const = default; #else - bool operator==( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( dedicatedAllocationImageAliasing == rhs.dedicatedAllocationImageAliasing ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( dedicatedAllocationImageAliasing == rhs.dedicatedAllocationImageAliasing ); } - bool operator!=( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; - void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = + StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; + void * pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing = {}; }; - static_assert( sizeof( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ) == sizeof( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ) == + sizeof( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceDepthClipEnableFeaturesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + PhysicalDeviceDepthClipEnableFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {} ) VULKAN_HPP_NOEXCEPT : depthClipEnable( depthClipEnable_ ) {} - PhysicalDeviceDepthClipEnableFeaturesEXT & operator=( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDepthClipEnableFeaturesEXT & + operator=( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceDepthClipEnableFeaturesEXT ) - offsetof( PhysicalDeviceDepthClipEnableFeaturesEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceDepthClipEnableFeaturesEXT ) - + offsetof( PhysicalDeviceDepthClipEnableFeaturesEXT, pNext ) ); return *this; } - PhysicalDeviceDepthClipEnableFeaturesEXT( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDepthClipEnableFeaturesEXT( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceDepthClipEnableFeaturesEXT& operator=( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDepthClipEnableFeaturesEXT & + operator=( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceDepthClipEnableFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDepthClipEnableFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceDepthClipEnableFeaturesEXT & setDepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDepthClipEnableFeaturesEXT & + setDepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT { depthClipEnable = depthClipEnable_; return *this; } - operator VkPhysicalDeviceDepthClipEnableFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceDepthClipEnableFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceDepthClipEnableFeaturesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceDepthClipEnableFeaturesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDepthClipEnableFeaturesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceDepthClipEnableFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( depthClipEnable == rhs.depthClipEnable ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthClipEnable == rhs.depthClipEnable ); } - bool operator!=( PhysicalDeviceDepthClipEnableFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -46962,65 +53009,74 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {}; }; - static_assert( sizeof( PhysicalDeviceDepthClipEnableFeaturesEXT ) == sizeof( VkPhysicalDeviceDepthClipEnableFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceDepthClipEnableFeaturesEXT ) == + sizeof( VkPhysicalDeviceDepthClipEnableFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceDepthStencilResolveProperties { - VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties( VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {}, - VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties( + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {}, + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {} ) VULKAN_HPP_NOEXCEPT : supportedDepthResolveModes( supportedDepthResolveModes_ ) , supportedStencilResolveModes( supportedStencilResolveModes_ ) , independentResolveNone( independentResolveNone_ ) , independentResolve( independentResolve_ ) {} - PhysicalDeviceDepthStencilResolveProperties & operator=( PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDepthStencilResolveProperties & + operator=( PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceDepthStencilResolveProperties ) - offsetof( PhysicalDeviceDepthStencilResolveProperties, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceDepthStencilResolveProperties ) - + offsetof( PhysicalDeviceDepthStencilResolveProperties, pNext ) ); return *this; } - PhysicalDeviceDepthStencilResolveProperties( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDepthStencilResolveProperties( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceDepthStencilResolveProperties& operator=( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDepthStencilResolveProperties & + operator=( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceDepthStencilResolveProperties const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceDepthStencilResolveProperties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceDepthStencilResolveProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceDepthStencilResolveProperties const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDepthStencilResolveProperties const & ) const = default; #else - bool operator==( PhysicalDeviceDepthStencilResolveProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceDepthStencilResolveProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( supportedDepthResolveModes == rhs.supportedDepthResolveModes ) - && ( supportedStencilResolveModes == rhs.supportedStencilResolveModes ) - && ( independentResolveNone == rhs.independentResolveNone ) - && ( independentResolve == rhs.independentResolve ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( supportedDepthResolveModes == rhs.supportedDepthResolveModes ) && + ( supportedStencilResolveModes == rhs.supportedStencilResolveModes ) && + ( independentResolveNone == rhs.independentResolveNone ) && + ( independentResolve == rhs.independentResolve ); } - bool operator!=( PhysicalDeviceDepthStencilResolveProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceDepthStencilResolveProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -47028,37 +53084,41 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthStencilResolveProperties; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes = {}; - VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes = {}; - VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone = {}; - VULKAN_HPP_NAMESPACE::Bool32 independentResolve = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes = {}; + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes = {}; + VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone = {}; + VULKAN_HPP_NAMESPACE::Bool32 independentResolve = {}; }; - static_assert( sizeof( PhysicalDeviceDepthStencilResolveProperties ) == sizeof( VkPhysicalDeviceDepthStencilResolveProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceDepthStencilResolveProperties ) == + sizeof( VkPhysicalDeviceDepthStencilResolveProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceDescriptorIndexingFeatures { - VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures( + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {} ) VULKAN_HPP_NOEXCEPT : shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ ) , shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ ) , shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ ) @@ -47081,189 +53141,218 @@ namespace VULKAN_HPP_NAMESPACE , runtimeDescriptorArray( runtimeDescriptorArray_ ) {} - PhysicalDeviceDescriptorIndexingFeatures & operator=( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDescriptorIndexingFeatures & + operator=( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceDescriptorIndexingFeatures ) - offsetof( PhysicalDeviceDescriptorIndexingFeatures, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceDescriptorIndexingFeatures ) - + offsetof( PhysicalDeviceDescriptorIndexingFeatures, pNext ) ); return *this; } - PhysicalDeviceDescriptorIndexingFeatures( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDescriptorIndexingFeatures( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceDescriptorIndexingFeatures& operator=( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDescriptorIndexingFeatures & + operator=( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceDescriptorIndexingFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDescriptorIndexingFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceDescriptorIndexingFeatures & setShaderInputAttachmentArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDescriptorIndexingFeatures & setShaderInputAttachmentArrayDynamicIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT { shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_; return *this; } - PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformTexelBufferArrayDynamicIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT { shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_; return *this; } - PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageTexelBufferArrayDynamicIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT { shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_; return *this; } - PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformBufferArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT { shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_; return *this; } - PhysicalDeviceDescriptorIndexingFeatures & setShaderSampledImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDescriptorIndexingFeatures & setShaderSampledImageArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT { shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_; return *this; } - PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageBufferArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT { shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_; return *this; } - PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageImageArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT { shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_; return *this; } - PhysicalDeviceDescriptorIndexingFeatures & setShaderInputAttachmentArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDescriptorIndexingFeatures & setShaderInputAttachmentArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT { shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_; return *this; } - PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformTexelBufferArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT { shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_; return *this; } - PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageTexelBufferArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT { shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_; return *this; } - PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUniformBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUniformBufferUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT { descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_; return *this; } - PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingSampledImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingSampledImageUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT { descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_; return *this; } - PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageImageUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT { descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_; return *this; } - PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageBufferUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT { descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_; return *this; } - PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUniformTexelBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUniformTexelBufferUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT { descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_; return *this; } - PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageTexelBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageTexelBufferUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT { descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_; return *this; } - PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUpdateUnusedWhilePending( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUpdateUnusedWhilePending( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT { descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_; return *this; } - PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingPartiallyBound( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingPartiallyBound( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT { descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_; return *this; } - PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingVariableDescriptorCount( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingVariableDescriptorCount( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT { descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_; return *this; } - PhysicalDeviceDescriptorIndexingFeatures & setRuntimeDescriptorArray( VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDescriptorIndexingFeatures & + setRuntimeDescriptorArray( VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT { runtimeDescriptorArray = runtimeDescriptorArray_; return *this; } - operator VkPhysicalDeviceDescriptorIndexingFeatures const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceDescriptorIndexingFeatures const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceDescriptorIndexingFeatures &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceDescriptorIndexingFeatures const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDescriptorIndexingFeatures const & ) const = default; #else - bool operator==( PhysicalDeviceDescriptorIndexingFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing ) - && ( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing ) - && ( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing ) - && ( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing ) - && ( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing ) - && ( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing ) - && ( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing ) - && ( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing ) - && ( shaderUniformTexelBufferArrayNonUniformIndexing == rhs.shaderUniformTexelBufferArrayNonUniformIndexing ) - && ( shaderStorageTexelBufferArrayNonUniformIndexing == rhs.shaderStorageTexelBufferArrayNonUniformIndexing ) - && ( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind ) - && ( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind ) - && ( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind ) - && ( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind ) - && ( descriptorBindingUniformTexelBufferUpdateAfterBind == rhs.descriptorBindingUniformTexelBufferUpdateAfterBind ) - && ( descriptorBindingStorageTexelBufferUpdateAfterBind == rhs.descriptorBindingStorageTexelBufferUpdateAfterBind ) - && ( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending ) - && ( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound ) - && ( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount ) - && ( runtimeDescriptorArray == rhs.runtimeDescriptorArray ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing ) && + ( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing ) && + ( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing ) && + ( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing ) && + ( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing ) && + ( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing ) && + ( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing ) && + ( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing ) && + ( shaderUniformTexelBufferArrayNonUniformIndexing == + rhs.shaderUniformTexelBufferArrayNonUniformIndexing ) && + ( shaderStorageTexelBufferArrayNonUniformIndexing == + rhs.shaderStorageTexelBufferArrayNonUniformIndexing ) && + ( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind ) && + ( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind ) && + ( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind ) && + ( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind ) && + ( descriptorBindingUniformTexelBufferUpdateAfterBind == + rhs.descriptorBindingUniformTexelBufferUpdateAfterBind ) && + ( descriptorBindingStorageTexelBufferUpdateAfterBind == + rhs.descriptorBindingStorageTexelBufferUpdateAfterBind ) && + ( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending ) && + ( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound ) && + ( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount ) && + ( runtimeDescriptorArray == rhs.runtimeDescriptorArray ); } - bool operator!=( PhysicalDeviceDescriptorIndexingFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -47271,56 +53360,60 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingFeatures; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount = {}; - VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount = {}; + VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray = {}; }; - static_assert( sizeof( PhysicalDeviceDescriptorIndexingFeatures ) == sizeof( VkPhysicalDeviceDescriptorIndexingFeatures ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceDescriptorIndexingFeatures ) == + sizeof( VkPhysicalDeviceDescriptorIndexingFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceDescriptorIndexingProperties { - VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties( uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {}, - uint32_t maxPerStageUpdateAfterBindResources_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties( + uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {}, + uint32_t maxPerStageUpdateAfterBindResources_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {} ) VULKAN_HPP_NOEXCEPT : maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ ) , shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ ) , shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ ) @@ -47346,66 +53439,83 @@ namespace VULKAN_HPP_NAMESPACE , maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ ) {} - PhysicalDeviceDescriptorIndexingProperties & operator=( PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDescriptorIndexingProperties & + operator=( PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceDescriptorIndexingProperties ) - offsetof( PhysicalDeviceDescriptorIndexingProperties, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceDescriptorIndexingProperties ) - + offsetof( PhysicalDeviceDescriptorIndexingProperties, pNext ) ); return *this; } - PhysicalDeviceDescriptorIndexingProperties( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDescriptorIndexingProperties( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceDescriptorIndexingProperties& operator=( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDescriptorIndexingProperties & + operator=( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceDescriptorIndexingProperties const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceDescriptorIndexingProperties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceDescriptorIndexingProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceDescriptorIndexingProperties const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDescriptorIndexingProperties const & ) const = default; #else - bool operator==( PhysicalDeviceDescriptorIndexingProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceDescriptorIndexingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools ) - && ( shaderUniformBufferArrayNonUniformIndexingNative == rhs.shaderUniformBufferArrayNonUniformIndexingNative ) - && ( shaderSampledImageArrayNonUniformIndexingNative == rhs.shaderSampledImageArrayNonUniformIndexingNative ) - && ( shaderStorageBufferArrayNonUniformIndexingNative == rhs.shaderStorageBufferArrayNonUniformIndexingNative ) - && ( shaderStorageImageArrayNonUniformIndexingNative == rhs.shaderStorageImageArrayNonUniformIndexingNative ) - && ( shaderInputAttachmentArrayNonUniformIndexingNative == rhs.shaderInputAttachmentArrayNonUniformIndexingNative ) - && ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind ) - && ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod ) - && ( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers ) - && ( maxPerStageDescriptorUpdateAfterBindUniformBuffers == rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers ) - && ( maxPerStageDescriptorUpdateAfterBindStorageBuffers == rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers ) - && ( maxPerStageDescriptorUpdateAfterBindSampledImages == rhs.maxPerStageDescriptorUpdateAfterBindSampledImages ) - && ( maxPerStageDescriptorUpdateAfterBindStorageImages == rhs.maxPerStageDescriptorUpdateAfterBindStorageImages ) - && ( maxPerStageDescriptorUpdateAfterBindInputAttachments == rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments ) - && ( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources ) - && ( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers ) - && ( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers ) - && ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ) - && ( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers ) - && ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ) - && ( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages ) - && ( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages ) - && ( maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools ) && + ( shaderUniformBufferArrayNonUniformIndexingNative == + rhs.shaderUniformBufferArrayNonUniformIndexingNative ) && + ( shaderSampledImageArrayNonUniformIndexingNative == + rhs.shaderSampledImageArrayNonUniformIndexingNative ) && + ( shaderStorageBufferArrayNonUniformIndexingNative == + rhs.shaderStorageBufferArrayNonUniformIndexingNative ) && + ( shaderStorageImageArrayNonUniformIndexingNative == + rhs.shaderStorageImageArrayNonUniformIndexingNative ) && + ( shaderInputAttachmentArrayNonUniformIndexingNative == + rhs.shaderInputAttachmentArrayNonUniformIndexingNative ) && + ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind ) && + ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod ) && + ( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers ) && + ( maxPerStageDescriptorUpdateAfterBindUniformBuffers == + rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers ) && + ( maxPerStageDescriptorUpdateAfterBindStorageBuffers == + rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers ) && + ( maxPerStageDescriptorUpdateAfterBindSampledImages == + rhs.maxPerStageDescriptorUpdateAfterBindSampledImages ) && + ( maxPerStageDescriptorUpdateAfterBindStorageImages == + rhs.maxPerStageDescriptorUpdateAfterBindStorageImages ) && + ( maxPerStageDescriptorUpdateAfterBindInputAttachments == + rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments ) && + ( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources ) && + ( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers ) && + ( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers ) && + ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == + rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ) && + ( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers ) && + ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == + rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ) && + ( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages ) && + ( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages ) && + ( maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments ); } - bool operator!=( PhysicalDeviceDescriptorIndexingProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceDescriptorIndexingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -47413,90 +53523,100 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingProperties; - void* pNext = {}; - uint32_t maxUpdateAfterBindDescriptorsInAllPools = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative = {}; - VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind = {}; - VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod = {}; - uint32_t maxPerStageDescriptorUpdateAfterBindSamplers = {}; - uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers = {}; - uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers = {}; - uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages = {}; - uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages = {}; - uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments = {}; - uint32_t maxPerStageUpdateAfterBindResources = {}; - uint32_t maxDescriptorSetUpdateAfterBindSamplers = {}; - uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers = {}; - uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {}; - uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers = {}; - uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {}; - uint32_t maxDescriptorSetUpdateAfterBindSampledImages = {}; - uint32_t maxDescriptorSetUpdateAfterBindStorageImages = {}; - uint32_t maxDescriptorSetUpdateAfterBindInputAttachments = {}; + void * pNext = {}; + uint32_t maxUpdateAfterBindDescriptorsInAllPools = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments = {}; + uint32_t maxPerStageUpdateAfterBindResources = {}; + uint32_t maxDescriptorSetUpdateAfterBindSamplers = {}; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers = {}; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {}; + uint32_t maxDescriptorSetUpdateAfterBindSampledImages = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageImages = {}; + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments = {}; }; - static_assert( sizeof( PhysicalDeviceDescriptorIndexingProperties ) == sizeof( VkPhysicalDeviceDescriptorIndexingProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceDescriptorIndexingProperties ) == + sizeof( VkPhysicalDeviceDescriptorIndexingProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceDeviceGeneratedCommandsFeaturesNV { - VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ = {} ) VULKAN_HPP_NOEXCEPT : deviceGeneratedCommands( deviceGeneratedCommands_ ) {} - PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & operator=( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & + operator=( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV ) - offsetof( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV ) - + offsetof( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV, pNext ) ); return *this; } - PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceDeviceGeneratedCommandsFeaturesNV& operator=( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & + operator=( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & setDeviceGeneratedCommands( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & + setDeviceGeneratedCommands( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ ) VULKAN_HPP_NOEXCEPT { deviceGeneratedCommands = deviceGeneratedCommands_; return *this; } - operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & ) const = default; #else - bool operator==( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( deviceGeneratedCommands == rhs.deviceGeneratedCommands ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( deviceGeneratedCommands == rhs.deviceGeneratedCommands ); } - bool operator!=( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -47504,23 +53624,27 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands = {}; }; - static_assert( sizeof( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV ) == sizeof( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV ) == + sizeof( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceDeviceGeneratedCommandsPropertiesNV { - VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( uint32_t maxGraphicsShaderGroupCount_ = {}, - uint32_t maxIndirectSequenceCount_ = {}, - uint32_t maxIndirectCommandsTokenCount_ = {}, - uint32_t maxIndirectCommandsStreamCount_ = {}, - uint32_t maxIndirectCommandsTokenOffset_ = {}, - uint32_t maxIndirectCommandsStreamStride_ = {}, - uint32_t minSequencesCountBufferOffsetAlignment_ = {}, - uint32_t minSequencesIndexBufferOffsetAlignment_ = {}, - uint32_t minIndirectCommandsBufferOffsetAlignment_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( + uint32_t maxGraphicsShaderGroupCount_ = {}, + uint32_t maxIndirectSequenceCount_ = {}, + uint32_t maxIndirectCommandsTokenCount_ = {}, + uint32_t maxIndirectCommandsStreamCount_ = {}, + uint32_t maxIndirectCommandsTokenOffset_ = {}, + uint32_t maxIndirectCommandsStreamStride_ = {}, + uint32_t minSequencesCountBufferOffsetAlignment_ = {}, + uint32_t minSequencesIndexBufferOffsetAlignment_ = {}, + uint32_t minIndirectCommandsBufferOffsetAlignment_ = {} ) VULKAN_HPP_NOEXCEPT : maxGraphicsShaderGroupCount( maxGraphicsShaderGroupCount_ ) , maxIndirectSequenceCount( maxIndirectSequenceCount_ ) , maxIndirectCommandsTokenCount( maxIndirectCommandsTokenCount_ ) @@ -47532,52 +53656,58 @@ namespace VULKAN_HPP_NAMESPACE , minIndirectCommandsBufferOffsetAlignment( minIndirectCommandsBufferOffsetAlignment_ ) {} - PhysicalDeviceDeviceGeneratedCommandsPropertiesNV & operator=( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDeviceGeneratedCommandsPropertiesNV & + operator=( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV ) - offsetof( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV ) - + offsetof( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV, pNext ) ); return *this; } - PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceDeviceGeneratedCommandsPropertiesNV& operator=( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDeviceGeneratedCommandsPropertiesNV & + operator=( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = + *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & ) const = default; #else - bool operator==( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxGraphicsShaderGroupCount == rhs.maxGraphicsShaderGroupCount ) - && ( maxIndirectSequenceCount == rhs.maxIndirectSequenceCount ) - && ( maxIndirectCommandsTokenCount == rhs.maxIndirectCommandsTokenCount ) - && ( maxIndirectCommandsStreamCount == rhs.maxIndirectCommandsStreamCount ) - && ( maxIndirectCommandsTokenOffset == rhs.maxIndirectCommandsTokenOffset ) - && ( maxIndirectCommandsStreamStride == rhs.maxIndirectCommandsStreamStride ) - && ( minSequencesCountBufferOffsetAlignment == rhs.minSequencesCountBufferOffsetAlignment ) - && ( minSequencesIndexBufferOffsetAlignment == rhs.minSequencesIndexBufferOffsetAlignment ) - && ( minIndirectCommandsBufferOffsetAlignment == rhs.minIndirectCommandsBufferOffsetAlignment ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxGraphicsShaderGroupCount == rhs.maxGraphicsShaderGroupCount ) && + ( maxIndirectSequenceCount == rhs.maxIndirectSequenceCount ) && + ( maxIndirectCommandsTokenCount == rhs.maxIndirectCommandsTokenCount ) && + ( maxIndirectCommandsStreamCount == rhs.maxIndirectCommandsStreamCount ) && + ( maxIndirectCommandsTokenOffset == rhs.maxIndirectCommandsTokenOffset ) && + ( maxIndirectCommandsStreamStride == rhs.maxIndirectCommandsStreamStride ) && + ( minSequencesCountBufferOffsetAlignment == rhs.minSequencesCountBufferOffsetAlignment ) && + ( minSequencesIndexBufferOffsetAlignment == rhs.minSequencesIndexBufferOffsetAlignment ) && + ( minIndirectCommandsBufferOffsetAlignment == rhs.minIndirectCommandsBufferOffsetAlignment ); } - bool operator!=( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -47585,76 +53715,85 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV; - void* pNext = {}; - uint32_t maxGraphicsShaderGroupCount = {}; - uint32_t maxIndirectSequenceCount = {}; - uint32_t maxIndirectCommandsTokenCount = {}; - uint32_t maxIndirectCommandsStreamCount = {}; - uint32_t maxIndirectCommandsTokenOffset = {}; - uint32_t maxIndirectCommandsStreamStride = {}; - uint32_t minSequencesCountBufferOffsetAlignment = {}; - uint32_t minSequencesIndexBufferOffsetAlignment = {}; - uint32_t minIndirectCommandsBufferOffsetAlignment = {}; + void * pNext = {}; + uint32_t maxGraphicsShaderGroupCount = {}; + uint32_t maxIndirectSequenceCount = {}; + uint32_t maxIndirectCommandsTokenCount = {}; + uint32_t maxIndirectCommandsStreamCount = {}; + uint32_t maxIndirectCommandsTokenOffset = {}; + uint32_t maxIndirectCommandsStreamStride = {}; + uint32_t minSequencesCountBufferOffsetAlignment = {}; + uint32_t minSequencesIndexBufferOffsetAlignment = {}; + uint32_t minIndirectCommandsBufferOffsetAlignment = {}; }; - static_assert( sizeof( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV ) == sizeof( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV ) == + sizeof( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceDiagnosticsConfigFeaturesNV { - VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig_ = {} ) VULKAN_HPP_NOEXCEPT : diagnosticsConfig( diagnosticsConfig_ ) {} - PhysicalDeviceDiagnosticsConfigFeaturesNV & operator=( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDiagnosticsConfigFeaturesNV & + operator=( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceDiagnosticsConfigFeaturesNV ) - offsetof( PhysicalDeviceDiagnosticsConfigFeaturesNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceDiagnosticsConfigFeaturesNV ) - + offsetof( PhysicalDeviceDiagnosticsConfigFeaturesNV, pNext ) ); return *this; } - PhysicalDeviceDiagnosticsConfigFeaturesNV( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDiagnosticsConfigFeaturesNV( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceDiagnosticsConfigFeaturesNV& operator=( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDiagnosticsConfigFeaturesNV & + operator=( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceDiagnosticsConfigFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDiagnosticsConfigFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceDiagnosticsConfigFeaturesNV & setDiagnosticsConfig( VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDiagnosticsConfigFeaturesNV & + setDiagnosticsConfig( VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig_ ) VULKAN_HPP_NOEXCEPT { diagnosticsConfig = diagnosticsConfig_; return *this; } - operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceDiagnosticsConfigFeaturesNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDiagnosticsConfigFeaturesNV const & ) const = default; #else - bool operator==( PhysicalDeviceDiagnosticsConfigFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( diagnosticsConfig == rhs.diagnosticsConfig ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( diagnosticsConfig == rhs.diagnosticsConfig ); } - bool operator!=( PhysicalDeviceDiagnosticsConfigFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -47662,56 +53801,64 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig = {}; }; - static_assert( sizeof( PhysicalDeviceDiagnosticsConfigFeaturesNV ) == sizeof( VkPhysicalDeviceDiagnosticsConfigFeaturesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceDiagnosticsConfigFeaturesNV ) == + sizeof( VkPhysicalDeviceDiagnosticsConfigFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceDiscardRectanglePropertiesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT( uint32_t maxDiscardRectangles_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + PhysicalDeviceDiscardRectanglePropertiesEXT( uint32_t maxDiscardRectangles_ = {} ) VULKAN_HPP_NOEXCEPT : maxDiscardRectangles( maxDiscardRectangles_ ) {} - PhysicalDeviceDiscardRectanglePropertiesEXT & operator=( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDiscardRectanglePropertiesEXT & + operator=( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceDiscardRectanglePropertiesEXT ) - offsetof( PhysicalDeviceDiscardRectanglePropertiesEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceDiscardRectanglePropertiesEXT ) - + offsetof( PhysicalDeviceDiscardRectanglePropertiesEXT, pNext ) ); return *this; } - PhysicalDeviceDiscardRectanglePropertiesEXT( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDiscardRectanglePropertiesEXT( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceDiscardRectanglePropertiesEXT& operator=( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDiscardRectanglePropertiesEXT & + operator=( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceDiscardRectanglePropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceDiscardRectanglePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceDiscardRectanglePropertiesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceDiscardRectanglePropertiesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDiscardRectanglePropertiesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceDiscardRectanglePropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxDiscardRectangles == rhs.maxDiscardRectangles ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxDiscardRectangles == rhs.maxDiscardRectangles ); } - bool operator!=( PhysicalDeviceDiscardRectanglePropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -47719,18 +53866,22 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT; - void* pNext = {}; - uint32_t maxDiscardRectangles = {}; + void * pNext = {}; + uint32_t maxDiscardRectangles = {}; }; - static_assert( sizeof( PhysicalDeviceDiscardRectanglePropertiesEXT ) == sizeof( VkPhysicalDeviceDiscardRectanglePropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceDiscardRectanglePropertiesEXT ) == + sizeof( VkPhysicalDeviceDiscardRectanglePropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceDriverProperties { - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverProperties( VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary, - std::array const& driverName_ = {}, - std::array const& driverInfo_ = {}, - VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverProperties( + VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary, + std::array const & driverName_ = {}, + std::array const & driverInfo_ = {}, + VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {} ) VULKAN_HPP_NOEXCEPT : driverID( driverID_ ) , driverName( driverName_ ) , driverInfo( driverInfo_ ) @@ -47739,7 +53890,9 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceDriverProperties & operator=( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceDriverProperties ) - offsetof( PhysicalDeviceDriverProperties, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceDriverProperties ) - offsetof( PhysicalDeviceDriverProperties, pNext ) ); return *this; } @@ -47748,108 +53901,112 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceDriverProperties& operator=( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDriverProperties & operator=( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceDriverProperties const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceDriverProperties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceDriverProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceDriverProperties const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDriverProperties const & ) const = default; #else - bool operator==( PhysicalDeviceDriverProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( driverID == rhs.driverID ) - && ( driverName == rhs.driverName ) - && ( driverInfo == rhs.driverInfo ) - && ( conformanceVersion == rhs.conformanceVersion ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( driverID == rhs.driverID ) && + ( driverName == rhs.driverName ) && ( driverInfo == rhs.driverInfo ) && + ( conformanceVersion == rhs.conformanceVersion ); } - bool operator!=( PhysicalDeviceDriverProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDriverProperties; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverName = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverInfo = {}; - VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDriverProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverName = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverInfo = {}; + VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion = {}; }; - static_assert( sizeof( PhysicalDeviceDriverProperties ) == sizeof( VkPhysicalDeviceDriverProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceDriverProperties ) == sizeof( VkPhysicalDeviceDriverProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceExclusiveScissorFeaturesNV { - VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ = {} ) VULKAN_HPP_NOEXCEPT - : exclusiveScissor( exclusiveScissor_ ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ = {} ) + VULKAN_HPP_NOEXCEPT : exclusiveScissor( exclusiveScissor_ ) {} - PhysicalDeviceExclusiveScissorFeaturesNV & operator=( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceExclusiveScissorFeaturesNV & + operator=( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceExclusiveScissorFeaturesNV ) - offsetof( PhysicalDeviceExclusiveScissorFeaturesNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceExclusiveScissorFeaturesNV ) - + offsetof( PhysicalDeviceExclusiveScissorFeaturesNV, pNext ) ); return *this; } - PhysicalDeviceExclusiveScissorFeaturesNV( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceExclusiveScissorFeaturesNV( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceExclusiveScissorFeaturesNV& operator=( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceExclusiveScissorFeaturesNV & + operator=( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceExclusiveScissorFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceExclusiveScissorFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceExclusiveScissorFeaturesNV & setExclusiveScissor( VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceExclusiveScissorFeaturesNV & + setExclusiveScissor( VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ ) VULKAN_HPP_NOEXCEPT { exclusiveScissor = exclusiveScissor_; return *this; } - operator VkPhysicalDeviceExclusiveScissorFeaturesNV const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceExclusiveScissorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceExclusiveScissorFeaturesNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceExclusiveScissorFeaturesNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExclusiveScissorFeaturesNV const & ) const = default; #else - bool operator==( PhysicalDeviceExclusiveScissorFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( exclusiveScissor == rhs.exclusiveScissor ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( exclusiveScissor == rhs.exclusiveScissor ); } - bool operator!=( PhysicalDeviceExclusiveScissorFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -47857,17 +54014,22 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor = {}; }; - static_assert( sizeof( PhysicalDeviceExclusiveScissorFeaturesNV ) == sizeof( VkPhysicalDeviceExclusiveScissorFeaturesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceExclusiveScissorFeaturesNV ) == + sizeof( VkPhysicalDeviceExclusiveScissorFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceExternalBufferInfo { - VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo( + VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , usage( usage_ ) , handleType( handleType_ ) @@ -47875,7 +54037,9 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceExternalBufferInfo & operator=( PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceExternalBufferInfo ) - offsetof( PhysicalDeviceExternalBufferInfo, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceExternalBufferInfo ) - offsetof( PhysicalDeviceExternalBufferInfo, pNext ) ); return *this; } @@ -47884,13 +54048,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceExternalBufferInfo& operator=( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceExternalBufferInfo & operator=( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceExternalBufferInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceExternalBufferInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -47908,59 +54072,64 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PhysicalDeviceExternalBufferInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceExternalBufferInfo & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } - operator VkPhysicalDeviceExternalBufferInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceExternalBufferInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceExternalBufferInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceExternalBufferInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalBufferInfo const & ) const = default; #else - bool operator==( PhysicalDeviceExternalBufferInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceExternalBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( usage == rhs.usage ) - && ( handleType == rhs.handleType ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( usage == rhs.usage ) && + ( handleType == rhs.handleType ); } - bool operator!=( PhysicalDeviceExternalBufferInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceExternalBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalBufferInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::BufferCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalBufferInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::BufferCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; }; - static_assert( sizeof( PhysicalDeviceExternalBufferInfo ) == sizeof( VkPhysicalDeviceExternalBufferInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceExternalBufferInfo ) == sizeof( VkPhysicalDeviceExternalBufferInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceExternalFenceInfo { - VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo( + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT : handleType( handleType_ ) {} PhysicalDeviceExternalFenceInfo & operator=( PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceExternalFenceInfo ) - offsetof( PhysicalDeviceExternalFenceInfo, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceExternalFenceInfo ) - offsetof( PhysicalDeviceExternalFenceInfo, pNext ) ); return *this; } @@ -47969,67 +54138,75 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceExternalFenceInfo& operator=( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceExternalFenceInfo & operator=( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceExternalFenceInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceExternalFenceInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceExternalFenceInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceExternalFenceInfo & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } - operator VkPhysicalDeviceExternalFenceInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceExternalFenceInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceExternalFenceInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceExternalFenceInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalFenceInfo const & ) const = default; #else - bool operator==( PhysicalDeviceExternalFenceInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceExternalFenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleType == rhs.handleType ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ); } - bool operator!=( PhysicalDeviceExternalFenceInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceExternalFenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalFenceInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalFenceInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; }; - static_assert( sizeof( PhysicalDeviceExternalFenceInfo ) == sizeof( VkPhysicalDeviceExternalFenceInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceExternalFenceInfo ) == sizeof( VkPhysicalDeviceExternalFenceInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceExternalImageFormatInfo { - VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT : handleType( handleType_ ) {} - PhysicalDeviceExternalImageFormatInfo & operator=( PhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceExternalImageFormatInfo & + operator=( PhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceExternalImageFormatInfo ) - offsetof( PhysicalDeviceExternalImageFormatInfo, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceExternalImageFormatInfo ) - + offsetof( PhysicalDeviceExternalImageFormatInfo, pNext ) ); return *this; } @@ -48038,45 +54215,45 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceExternalImageFormatInfo& operator=( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceExternalImageFormatInfo & + operator=( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceExternalImageFormatInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceExternalImageFormatInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceExternalImageFormatInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceExternalImageFormatInfo & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } - operator VkPhysicalDeviceExternalImageFormatInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceExternalImageFormatInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceExternalImageFormatInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceExternalImageFormatInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalImageFormatInfo const & ) const = default; #else - bool operator==( PhysicalDeviceExternalImageFormatInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceExternalImageFormatInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleType == rhs.handleType ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ); } - bool operator!=( PhysicalDeviceExternalImageFormatInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceExternalImageFormatInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -48084,56 +54261,65 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalImageFormatInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; }; - static_assert( sizeof( PhysicalDeviceExternalImageFormatInfo ) == sizeof( VkPhysicalDeviceExternalImageFormatInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceExternalImageFormatInfo ) == sizeof( VkPhysicalDeviceExternalImageFormatInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceExternalMemoryHostPropertiesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryHostPropertiesEXT( VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryHostPropertiesEXT( + VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment_ = {} ) VULKAN_HPP_NOEXCEPT : minImportedHostPointerAlignment( minImportedHostPointerAlignment_ ) {} - PhysicalDeviceExternalMemoryHostPropertiesEXT & operator=( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceExternalMemoryHostPropertiesEXT & + operator=( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceExternalMemoryHostPropertiesEXT ) - offsetof( PhysicalDeviceExternalMemoryHostPropertiesEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceExternalMemoryHostPropertiesEXT ) - + offsetof( PhysicalDeviceExternalMemoryHostPropertiesEXT, pNext ) ); return *this; } - PhysicalDeviceExternalMemoryHostPropertiesEXT( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceExternalMemoryHostPropertiesEXT( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceExternalMemoryHostPropertiesEXT& operator=( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceExternalMemoryHostPropertiesEXT & + operator=( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceExternalMemoryHostPropertiesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalMemoryHostPropertiesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceExternalMemoryHostPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( minImportedHostPointerAlignment == rhs.minImportedHostPointerAlignment ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( minImportedHostPointerAlignment == rhs.minImportedHostPointerAlignment ); } - bool operator!=( PhysicalDeviceExternalMemoryHostPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -48141,21 +54327,29 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment = {}; }; - static_assert( sizeof( PhysicalDeviceExternalMemoryHostPropertiesEXT ) == sizeof( VkPhysicalDeviceExternalMemoryHostPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceExternalMemoryHostPropertiesEXT ) == + sizeof( VkPhysicalDeviceExternalMemoryHostPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceExternalSemaphoreInfo { - VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo( + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT : handleType( handleType_ ) {} - PhysicalDeviceExternalSemaphoreInfo & operator=( PhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceExternalSemaphoreInfo & + operator=( PhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceExternalSemaphoreInfo ) - offsetof( PhysicalDeviceExternalSemaphoreInfo, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceExternalSemaphoreInfo ) - offsetof( PhysicalDeviceExternalSemaphoreInfo, pNext ) ); return *this; } @@ -48164,45 +54358,45 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceExternalSemaphoreInfo& operator=( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceExternalSemaphoreInfo & + operator=( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceExternalSemaphoreInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceExternalSemaphoreInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceExternalSemaphoreInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceExternalSemaphoreInfo & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } - operator VkPhysicalDeviceExternalSemaphoreInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceExternalSemaphoreInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceExternalSemaphoreInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceExternalSemaphoreInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalSemaphoreInfo const & ) const = default; #else - bool operator==( PhysicalDeviceExternalSemaphoreInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceExternalSemaphoreInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleType == rhs.handleType ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ); } - bool operator!=( PhysicalDeviceExternalSemaphoreInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceExternalSemaphoreInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -48210,15 +54404,19 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalSemaphoreInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; }; - static_assert( sizeof( PhysicalDeviceExternalSemaphoreInfo ) == sizeof( VkPhysicalDeviceExternalSemaphoreInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceExternalSemaphoreInfo ) == sizeof( VkPhysicalDeviceExternalSemaphoreInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceFeatures2 { - VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + PhysicalDeviceFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features_ = {} ) VULKAN_HPP_NOEXCEPT : features( features_ ) {} @@ -48233,77 +54431,80 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceFeatures2& operator=( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures2 & operator=( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceFeatures2 & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures2 & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceFeatures2 & setFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const & features_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures2 & + setFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const & features_ ) VULKAN_HPP_NOEXCEPT { features = features_; return *this; } - operator VkPhysicalDeviceFeatures2 const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceFeatures2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceFeatures2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceFeatures2 const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFeatures2 const & ) const = default; #else - bool operator==( PhysicalDeviceFeatures2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceFeatures2 const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( features == rhs.features ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( features == rhs.features ); } - bool operator!=( PhysicalDeviceFeatures2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceFeatures2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFeatures2; - void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFeatures2; + void * pNext = {}; VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features = {}; }; - static_assert( sizeof( PhysicalDeviceFeatures2 ) == sizeof( VkPhysicalDeviceFeatures2 ), "struct and wrapper have different size!" ); + static_assert( sizeof( PhysicalDeviceFeatures2 ) == sizeof( VkPhysicalDeviceFeatures2 ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PhysicalDeviceFloatControlsProperties { - VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsProperties( VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, - VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, - VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsProperties( + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {} ) VULKAN_HPP_NOEXCEPT : denormBehaviorIndependence( denormBehaviorIndependence_ ) , roundingModeIndependence( roundingModeIndependence_ ) , shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ ) @@ -48323,9 +54524,13 @@ namespace VULKAN_HPP_NAMESPACE , shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ ) {} - PhysicalDeviceFloatControlsProperties & operator=( PhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFloatControlsProperties & + operator=( PhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceFloatControlsProperties ) - offsetof( PhysicalDeviceFloatControlsProperties, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceFloatControlsProperties ) - + offsetof( PhysicalDeviceFloatControlsProperties, pNext ) ); return *this; } @@ -48334,128 +54539,137 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceFloatControlsProperties& operator=( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFloatControlsProperties & + operator=( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceFloatControlsProperties const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceFloatControlsProperties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceFloatControlsProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceFloatControlsProperties const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFloatControlsProperties const & ) const = default; #else - bool operator==( PhysicalDeviceFloatControlsProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceFloatControlsProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( denormBehaviorIndependence == rhs.denormBehaviorIndependence ) - && ( roundingModeIndependence == rhs.roundingModeIndependence ) - && ( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 ) - && ( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 ) - && ( shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64 ) - && ( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 ) - && ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 ) - && ( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 ) - && ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 ) - && ( shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32 ) - && ( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 ) - && ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 ) - && ( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 ) - && ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 ) - && ( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 ) - && ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 ) - && ( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( denormBehaviorIndependence == rhs.denormBehaviorIndependence ) && + ( roundingModeIndependence == rhs.roundingModeIndependence ) && + ( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 ) && + ( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 ) && + ( shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64 ) && + ( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 ) && + ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 ) && + ( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 ) && + ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 ) && + ( shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32 ) && + ( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 ) && + ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 ) && + ( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 ) && + ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 ) && + ( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 ) && + ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 ) && + ( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 ); } - bool operator!=( PhysicalDeviceFloatControlsProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceFloatControlsProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFloatControlsProperties; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; - VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFloatControlsProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence = + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence = + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16 = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32 = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64 = {}; }; - static_assert( sizeof( PhysicalDeviceFloatControlsProperties ) == sizeof( VkPhysicalDeviceFloatControlsProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceFloatControlsProperties ) == sizeof( VkPhysicalDeviceFloatControlsProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceFragmentDensityMapFeaturesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ = {} ) VULKAN_HPP_NOEXCEPT : fragmentDensityMap( fragmentDensityMap_ ) , fragmentDensityMapDynamic( fragmentDensityMapDynamic_ ) , fragmentDensityMapNonSubsampledImages( fragmentDensityMapNonSubsampledImages_ ) {} - PhysicalDeviceFragmentDensityMapFeaturesEXT & operator=( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFragmentDensityMapFeaturesEXT & + operator=( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceFragmentDensityMapFeaturesEXT ) - offsetof( PhysicalDeviceFragmentDensityMapFeaturesEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceFragmentDensityMapFeaturesEXT ) - + offsetof( PhysicalDeviceFragmentDensityMapFeaturesEXT, pNext ) ); return *this; } - PhysicalDeviceFragmentDensityMapFeaturesEXT( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFragmentDensityMapFeaturesEXT( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceFragmentDensityMapFeaturesEXT& operator=( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFragmentDensityMapFeaturesEXT & + operator=( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceFragmentDensityMapFeaturesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentDensityMapFeaturesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceFragmentDensityMapFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( fragmentDensityMap == rhs.fragmentDensityMap ) - && ( fragmentDensityMapDynamic == rhs.fragmentDensityMapDynamic ) - && ( fragmentDensityMapNonSubsampledImages == rhs.fragmentDensityMapNonSubsampledImages ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityMap == rhs.fragmentDensityMap ) && + ( fragmentDensityMapDynamic == rhs.fragmentDensityMapDynamic ) && + ( fragmentDensityMapNonSubsampledImages == rhs.fragmentDensityMapNonSubsampledImages ); } - bool operator!=( PhysicalDeviceFragmentDensityMapFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -48463,64 +54677,73 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap = {}; - VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic = {}; - VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages = {}; }; - static_assert( sizeof( PhysicalDeviceFragmentDensityMapFeaturesEXT ) == sizeof( VkPhysicalDeviceFragmentDensityMapFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceFragmentDensityMapFeaturesEXT ) == + sizeof( VkPhysicalDeviceFragmentDensityMapFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceFragmentDensityMapPropertiesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapPropertiesEXT( VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapPropertiesEXT( + VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations_ = {} ) VULKAN_HPP_NOEXCEPT : minFragmentDensityTexelSize( minFragmentDensityTexelSize_ ) , maxFragmentDensityTexelSize( maxFragmentDensityTexelSize_ ) , fragmentDensityInvocations( fragmentDensityInvocations_ ) {} - PhysicalDeviceFragmentDensityMapPropertiesEXT & operator=( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFragmentDensityMapPropertiesEXT & + operator=( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceFragmentDensityMapPropertiesEXT ) - offsetof( PhysicalDeviceFragmentDensityMapPropertiesEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceFragmentDensityMapPropertiesEXT ) - + offsetof( PhysicalDeviceFragmentDensityMapPropertiesEXT, pNext ) ); return *this; } - PhysicalDeviceFragmentDensityMapPropertiesEXT( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFragmentDensityMapPropertiesEXT( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceFragmentDensityMapPropertiesEXT& operator=( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFragmentDensityMapPropertiesEXT & + operator=( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceFragmentDensityMapPropertiesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentDensityMapPropertiesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceFragmentDensityMapPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( minFragmentDensityTexelSize == rhs.minFragmentDensityTexelSize ) - && ( maxFragmentDensityTexelSize == rhs.maxFragmentDensityTexelSize ) - && ( fragmentDensityInvocations == rhs.fragmentDensityInvocations ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( minFragmentDensityTexelSize == rhs.minFragmentDensityTexelSize ) && + ( maxFragmentDensityTexelSize == rhs.maxFragmentDensityTexelSize ) && + ( fragmentDensityInvocations == rhs.fragmentDensityInvocations ); } - bool operator!=( PhysicalDeviceFragmentDensityMapPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -48528,70 +54751,81 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize = {}; - VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize = {}; - VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations = {}; }; - static_assert( sizeof( PhysicalDeviceFragmentDensityMapPropertiesEXT ) == sizeof( VkPhysicalDeviceFragmentDensityMapPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceFragmentDensityMapPropertiesEXT ) == + sizeof( VkPhysicalDeviceFragmentDensityMapPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceFragmentShaderBarycentricFeaturesNV { - VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ = {} ) VULKAN_HPP_NOEXCEPT : fragmentShaderBarycentric( fragmentShaderBarycentric_ ) {} - PhysicalDeviceFragmentShaderBarycentricFeaturesNV & operator=( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFragmentShaderBarycentricFeaturesNV & + operator=( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceFragmentShaderBarycentricFeaturesNV ) - offsetof( PhysicalDeviceFragmentShaderBarycentricFeaturesNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceFragmentShaderBarycentricFeaturesNV ) - + offsetof( PhysicalDeviceFragmentShaderBarycentricFeaturesNV, pNext ) ); return *this; } - PhysicalDeviceFragmentShaderBarycentricFeaturesNV( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFragmentShaderBarycentricFeaturesNV( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceFragmentShaderBarycentricFeaturesNV& operator=( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFragmentShaderBarycentricFeaturesNV & + operator=( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = + *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceFragmentShaderBarycentricFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFragmentShaderBarycentricFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceFragmentShaderBarycentricFeaturesNV & setFragmentShaderBarycentric( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFragmentShaderBarycentricFeaturesNV & + setFragmentShaderBarycentric( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ ) VULKAN_HPP_NOEXCEPT { fragmentShaderBarycentric = fragmentShaderBarycentric_; return *this; } - operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & ) const = default; #else - bool operator==( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( fragmentShaderBarycentric == rhs.fragmentShaderBarycentric ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( fragmentShaderBarycentric == rhs.fragmentShaderBarycentric ); } - bool operator!=( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -48599,86 +54833,98 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric = {}; }; - static_assert( sizeof( PhysicalDeviceFragmentShaderBarycentricFeaturesNV ) == sizeof( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceFragmentShaderBarycentricFeaturesNV ) == + sizeof( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceFragmentShaderInterlockFeaturesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ = {} ) VULKAN_HPP_NOEXCEPT : fragmentShaderSampleInterlock( fragmentShaderSampleInterlock_ ) , fragmentShaderPixelInterlock( fragmentShaderPixelInterlock_ ) , fragmentShaderShadingRateInterlock( fragmentShaderShadingRateInterlock_ ) {} - PhysicalDeviceFragmentShaderInterlockFeaturesEXT & operator=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFragmentShaderInterlockFeaturesEXT & + operator=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceFragmentShaderInterlockFeaturesEXT ) - offsetof( PhysicalDeviceFragmentShaderInterlockFeaturesEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceFragmentShaderInterlockFeaturesEXT ) - + offsetof( PhysicalDeviceFragmentShaderInterlockFeaturesEXT, pNext ) ); return *this; } - PhysicalDeviceFragmentShaderInterlockFeaturesEXT( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFragmentShaderInterlockFeaturesEXT( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceFragmentShaderInterlockFeaturesEXT& operator=( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFragmentShaderInterlockFeaturesEXT & + operator=( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderSampleInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderSampleInterlock( + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ ) VULKAN_HPP_NOEXCEPT { fragmentShaderSampleInterlock = fragmentShaderSampleInterlock_; return *this; } - PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderPixelInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFragmentShaderInterlockFeaturesEXT & + setFragmentShaderPixelInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ ) VULKAN_HPP_NOEXCEPT { fragmentShaderPixelInterlock = fragmentShaderPixelInterlock_; return *this; } - PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderShadingRateInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderShadingRateInterlock( + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ ) VULKAN_HPP_NOEXCEPT { fragmentShaderShadingRateInterlock = fragmentShaderShadingRateInterlock_; return *this; } - operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( fragmentShaderSampleInterlock == rhs.fragmentShaderSampleInterlock ) - && ( fragmentShaderPixelInterlock == rhs.fragmentShaderPixelInterlock ) - && ( fragmentShaderShadingRateInterlock == rhs.fragmentShaderShadingRateInterlock ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( fragmentShaderSampleInterlock == rhs.fragmentShaderSampleInterlock ) && + ( fragmentShaderPixelInterlock == rhs.fragmentShaderPixelInterlock ) && + ( fragmentShaderShadingRateInterlock == rhs.fragmentShaderShadingRateInterlock ); } - bool operator!=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -48686,19 +54932,23 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock = {}; - VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock = {}; - VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock = {}; }; - static_assert( sizeof( PhysicalDeviceFragmentShaderInterlockFeaturesEXT ) == sizeof( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceFragmentShaderInterlockFeaturesEXT ) == + sizeof( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceGroupProperties { - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGroupProperties( uint32_t physicalDeviceCount_ = {}, - std::array const& physicalDevices_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGroupProperties( + uint32_t physicalDeviceCount_ = {}, + std::array const & physicalDevices_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation_ = {} ) VULKAN_HPP_NOEXCEPT : physicalDeviceCount( physicalDeviceCount_ ) , physicalDevices( physicalDevices_ ) , subsetAllocation( subsetAllocation_ ) @@ -48706,7 +54956,9 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceGroupProperties & operator=( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceGroupProperties ) - offsetof( PhysicalDeviceGroupProperties, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceGroupProperties ) - offsetof( PhysicalDeviceGroupProperties, pNext ) ); return *this; } @@ -48715,59 +54967,64 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceGroupProperties& operator=( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceGroupProperties & operator=( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceGroupProperties const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceGroupProperties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceGroupProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceGroupProperties const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceGroupProperties const & ) const = default; #else - bool operator==( PhysicalDeviceGroupProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( physicalDeviceCount == rhs.physicalDeviceCount ) - && ( physicalDevices == rhs.physicalDevices ) - && ( subsetAllocation == rhs.subsetAllocation ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( physicalDeviceCount == rhs.physicalDeviceCount ) && + ( physicalDevices == rhs.physicalDevices ) && ( subsetAllocation == rhs.subsetAllocation ); } - bool operator!=( PhysicalDeviceGroupProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGroupProperties; - void* pNext = {}; - uint32_t physicalDeviceCount = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D physicalDevices = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGroupProperties; + void * pNext = {}; + uint32_t physicalDeviceCount = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D + physicalDevices = {}; VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation = {}; }; - static_assert( sizeof( PhysicalDeviceGroupProperties ) == sizeof( VkPhysicalDeviceGroupProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceGroupProperties ) == sizeof( VkPhysicalDeviceGroupProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceHostQueryResetFeatures { - VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeatures( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + PhysicalDeviceHostQueryResetFeatures( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {} ) VULKAN_HPP_NOEXCEPT : hostQueryReset( hostQueryReset_ ) {} - PhysicalDeviceHostQueryResetFeatures & operator=( PhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceHostQueryResetFeatures & + operator=( PhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceHostQueryResetFeatures ) - offsetof( PhysicalDeviceHostQueryResetFeatures, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceHostQueryResetFeatures ) - + offsetof( PhysicalDeviceHostQueryResetFeatures, pNext ) ); return *this; } @@ -48776,65 +55033,68 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceHostQueryResetFeatures& operator=( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceHostQueryResetFeatures & + operator=( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceHostQueryResetFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceHostQueryResetFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceHostQueryResetFeatures & setHostQueryReset( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceHostQueryResetFeatures & + setHostQueryReset( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT { hostQueryReset = hostQueryReset_; return *this; } - operator VkPhysicalDeviceHostQueryResetFeatures const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceHostQueryResetFeatures const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceHostQueryResetFeatures &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceHostQueryResetFeatures const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceHostQueryResetFeatures const & ) const = default; #else - bool operator==( PhysicalDeviceHostQueryResetFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceHostQueryResetFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( hostQueryReset == rhs.hostQueryReset ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hostQueryReset == rhs.hostQueryReset ); } - bool operator!=( PhysicalDeviceHostQueryResetFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceHostQueryResetFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceHostQueryResetFeatures; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceHostQueryResetFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset = {}; }; - static_assert( sizeof( PhysicalDeviceHostQueryResetFeatures ) == sizeof( VkPhysicalDeviceHostQueryResetFeatures ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceHostQueryResetFeatures ) == sizeof( VkPhysicalDeviceHostQueryResetFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceIDProperties { - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIDProperties( std::array const& deviceUUID_ = {}, - std::array const& driverUUID_ = {}, - std::array const& deviceLUID_ = {}, - uint32_t deviceNodeMask_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceIDProperties( std::array const & deviceUUID_ = {}, + std::array const & driverUUID_ = {}, + std::array const & deviceLUID_ = {}, + uint32_t deviceNodeMask_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {} ) VULKAN_HPP_NOEXCEPT : deviceUUID( deviceUUID_ ) , driverUUID( driverUUID_ ) , deviceLUID( deviceLUID_ ) @@ -48844,7 +55104,8 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceIDProperties & operator=( PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceIDProperties ) - offsetof( PhysicalDeviceIDProperties, pNext ) ); + memcpy( + &pNext, &rhs.pNext, sizeof( PhysicalDeviceIDProperties ) - offsetof( PhysicalDeviceIDProperties, pNext ) ); return *this; } @@ -48853,137 +55114,143 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceIDProperties& operator=( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceIDProperties & operator=( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceIDProperties const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceIDProperties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceIDProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceIDProperties const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceIDProperties const & ) const = default; #else - bool operator==( PhysicalDeviceIDProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceIDProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( deviceUUID == rhs.deviceUUID ) - && ( driverUUID == rhs.driverUUID ) - && ( deviceLUID == rhs.deviceLUID ) - && ( deviceNodeMask == rhs.deviceNodeMask ) - && ( deviceLUIDValid == rhs.deviceLUIDValid ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceUUID == rhs.deviceUUID ) && + ( driverUUID == rhs.driverUUID ) && ( deviceLUID == rhs.deviceLUID ) && + ( deviceNodeMask == rhs.deviceNodeMask ) && ( deviceLUIDValid == rhs.deviceLUIDValid ); } - bool operator!=( PhysicalDeviceIDProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceIDProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIdProperties; - void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIdProperties; + void * pNext = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceUUID = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverUUID = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceLUID = {}; - uint32_t deviceNodeMask = {}; - VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid = {}; + uint32_t deviceNodeMask = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid = {}; }; - static_assert( sizeof( PhysicalDeviceIDProperties ) == sizeof( VkPhysicalDeviceIDProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceIDProperties ) == sizeof( VkPhysicalDeviceIDProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceImageDrmFormatModifierInfoEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT( uint64_t drmFormatModifier_ = {}, - VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, - uint32_t queueFamilyIndexCount_ = {}, - const uint32_t* pQueueFamilyIndices_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT( + uint64_t drmFormatModifier_ = {}, + VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, + uint32_t queueFamilyIndexCount_ = {}, + const uint32_t * pQueueFamilyIndices_ = {} ) VULKAN_HPP_NOEXCEPT : drmFormatModifier( drmFormatModifier_ ) , sharingMode( sharingMode_ ) , queueFamilyIndexCount( queueFamilyIndexCount_ ) , pQueueFamilyIndices( pQueueFamilyIndices_ ) {} - PhysicalDeviceImageDrmFormatModifierInfoEXT & operator=( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceImageDrmFormatModifierInfoEXT & + operator=( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceImageDrmFormatModifierInfoEXT ) - offsetof( PhysicalDeviceImageDrmFormatModifierInfoEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceImageDrmFormatModifierInfoEXT ) - + offsetof( PhysicalDeviceImageDrmFormatModifierInfoEXT, pNext ) ); return *this; } - PhysicalDeviceImageDrmFormatModifierInfoEXT( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceImageDrmFormatModifierInfoEXT( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceImageDrmFormatModifierInfoEXT& operator=( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceImageDrmFormatModifierInfoEXT & + operator=( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceImageDrmFormatModifierInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceImageDrmFormatModifierInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceImageDrmFormatModifierInfoEXT & setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceImageDrmFormatModifierInfoEXT & + setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT { drmFormatModifier = drmFormatModifier_; return *this; } - PhysicalDeviceImageDrmFormatModifierInfoEXT & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceImageDrmFormatModifierInfoEXT & + setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT { sharingMode = sharingMode_; return *this; } - PhysicalDeviceImageDrmFormatModifierInfoEXT & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceImageDrmFormatModifierInfoEXT & + setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT { queueFamilyIndexCount = queueFamilyIndexCount_; return *this; } - PhysicalDeviceImageDrmFormatModifierInfoEXT & setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceImageDrmFormatModifierInfoEXT & + setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT { pQueueFamilyIndices = pQueueFamilyIndices_; return *this; } - operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceImageDrmFormatModifierInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageDrmFormatModifierInfoEXT const & ) const = default; #else - bool operator==( PhysicalDeviceImageDrmFormatModifierInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( drmFormatModifier == rhs.drmFormatModifier ) - && ( sharingMode == rhs.sharingMode ) - && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) - && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifier == rhs.drmFormatModifier ) && + ( sharingMode == rhs.sharingMode ) && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && + ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ); } - bool operator!=( PhysicalDeviceImageDrmFormatModifierInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -48991,22 +55258,26 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT; - const void* pNext = {}; - uint64_t drmFormatModifier = {}; - VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; - uint32_t queueFamilyIndexCount = {}; - const uint32_t* pQueueFamilyIndices = {}; + const void * pNext = {}; + uint64_t drmFormatModifier = {}; + VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; + uint32_t queueFamilyIndexCount = {}; + const uint32_t * pQueueFamilyIndices = {}; }; - static_assert( sizeof( PhysicalDeviceImageDrmFormatModifierInfoEXT ) == sizeof( VkPhysicalDeviceImageDrmFormatModifierInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceImageDrmFormatModifierInfoEXT ) == + sizeof( VkPhysicalDeviceImageDrmFormatModifierInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceImageFormatInfo2 { - VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, - VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, - VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, - VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2( + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, + VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT : format( format_ ) , type( type_ ) , tiling( tiling_ ) @@ -49016,7 +55287,9 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceImageFormatInfo2 & operator=( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceImageFormatInfo2 ) - offsetof( PhysicalDeviceImageFormatInfo2, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceImageFormatInfo2 ) - offsetof( PhysicalDeviceImageFormatInfo2, pNext ) ); return *this; } @@ -49025,13 +55298,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceImageFormatInfo2& operator=( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceImageFormatInfo2 & operator=( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceImageFormatInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceImageFormatInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -49067,173 +55340,186 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkPhysicalDeviceImageFormatInfo2 const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceImageFormatInfo2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceImageFormatInfo2 const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageFormatInfo2 const & ) const = default; #else - bool operator==( PhysicalDeviceImageFormatInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( format == rhs.format ) - && ( type == rhs.type ) - && ( tiling == rhs.tiling ) - && ( usage == rhs.usage ) - && ( flags == rhs.flags ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( type == rhs.type ) && + ( tiling == rhs.tiling ) && ( usage == rhs.usage ) && ( flags == rhs.flags ); } - bool operator!=( PhysicalDeviceImageFormatInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageFormatInfo2; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; - VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D; - VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal; - VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; - VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageFormatInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D; + VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal; + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {}; }; - static_assert( sizeof( PhysicalDeviceImageFormatInfo2 ) == sizeof( VkPhysicalDeviceImageFormatInfo2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceImageFormatInfo2 ) == sizeof( VkPhysicalDeviceImageFormatInfo2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceImageViewImageFormatInfoEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewImageFormatInfoEXT( VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + PhysicalDeviceImageViewImageFormatInfoEXT( VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ = + VULKAN_HPP_NAMESPACE::ImageViewType::e1D ) VULKAN_HPP_NOEXCEPT : imageViewType( imageViewType_ ) {} - PhysicalDeviceImageViewImageFormatInfoEXT & operator=( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceImageViewImageFormatInfoEXT & + operator=( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceImageViewImageFormatInfoEXT ) - offsetof( PhysicalDeviceImageViewImageFormatInfoEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceImageViewImageFormatInfoEXT ) - + offsetof( PhysicalDeviceImageViewImageFormatInfoEXT, pNext ) ); return *this; } - PhysicalDeviceImageViewImageFormatInfoEXT( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceImageViewImageFormatInfoEXT( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceImageViewImageFormatInfoEXT& operator=( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceImageViewImageFormatInfoEXT & + operator=( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceImageViewImageFormatInfoEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceImageViewImageFormatInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceImageViewImageFormatInfoEXT & setImageViewType( VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceImageViewImageFormatInfoEXT & + setImageViewType( VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ ) VULKAN_HPP_NOEXCEPT { imageViewType = imageViewType_; return *this; } - operator VkPhysicalDeviceImageViewImageFormatInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceImageViewImageFormatInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceImageViewImageFormatInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceImageViewImageFormatInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageViewImageFormatInfoEXT const & ) const = default; #else - bool operator==( PhysicalDeviceImageViewImageFormatInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( imageViewType == rhs.imageViewType ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageViewType == rhs.imageViewType ); } - bool operator!=( PhysicalDeviceImageViewImageFormatInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::ImageViewType imageViewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageViewType imageViewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D; }; - static_assert( sizeof( PhysicalDeviceImageViewImageFormatInfoEXT ) == sizeof( VkPhysicalDeviceImageViewImageFormatInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceImageViewImageFormatInfoEXT ) == + sizeof( VkPhysicalDeviceImageViewImageFormatInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceImagelessFramebufferFeatures { - VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures( + VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {} ) VULKAN_HPP_NOEXCEPT : imagelessFramebuffer( imagelessFramebuffer_ ) {} - PhysicalDeviceImagelessFramebufferFeatures & operator=( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceImagelessFramebufferFeatures & + operator=( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceImagelessFramebufferFeatures ) - offsetof( PhysicalDeviceImagelessFramebufferFeatures, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceImagelessFramebufferFeatures ) - + offsetof( PhysicalDeviceImagelessFramebufferFeatures, pNext ) ); return *this; } - PhysicalDeviceImagelessFramebufferFeatures( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceImagelessFramebufferFeatures( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceImagelessFramebufferFeatures& operator=( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceImagelessFramebufferFeatures & + operator=( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceImagelessFramebufferFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceImagelessFramebufferFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceImagelessFramebufferFeatures & setImagelessFramebuffer( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceImagelessFramebufferFeatures & + setImagelessFramebuffer( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT { imagelessFramebuffer = imagelessFramebuffer_; return *this; } - operator VkPhysicalDeviceImagelessFramebufferFeatures const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceImagelessFramebufferFeatures const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceImagelessFramebufferFeatures &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceImagelessFramebufferFeatures const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImagelessFramebufferFeatures const & ) const = default; #else - bool operator==( PhysicalDeviceImagelessFramebufferFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( imagelessFramebuffer == rhs.imagelessFramebuffer ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imagelessFramebuffer == rhs.imagelessFramebuffer ); } - bool operator!=( PhysicalDeviceImagelessFramebufferFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -49241,21 +55527,29 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImagelessFramebufferFeatures; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer = {}; }; - static_assert( sizeof( PhysicalDeviceImagelessFramebufferFeatures ) == sizeof( VkPhysicalDeviceImagelessFramebufferFeatures ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceImagelessFramebufferFeatures ) == + sizeof( VkPhysicalDeviceImagelessFramebufferFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceIndexTypeUint8FeaturesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + PhysicalDeviceIndexTypeUint8FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ = {} ) VULKAN_HPP_NOEXCEPT : indexTypeUint8( indexTypeUint8_ ) {} - PhysicalDeviceIndexTypeUint8FeaturesEXT & operator=( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceIndexTypeUint8FeaturesEXT & + operator=( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceIndexTypeUint8FeaturesEXT ) - offsetof( PhysicalDeviceIndexTypeUint8FeaturesEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceIndexTypeUint8FeaturesEXT ) - + offsetof( PhysicalDeviceIndexTypeUint8FeaturesEXT, pNext ) ); return *this; } @@ -49264,123 +55558,134 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceIndexTypeUint8FeaturesEXT& operator=( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceIndexTypeUint8FeaturesEXT & + operator=( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceIndexTypeUint8FeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceIndexTypeUint8FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceIndexTypeUint8FeaturesEXT & setIndexTypeUint8( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceIndexTypeUint8FeaturesEXT & + setIndexTypeUint8( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ ) VULKAN_HPP_NOEXCEPT { indexTypeUint8 = indexTypeUint8_; return *this; } - operator VkPhysicalDeviceIndexTypeUint8FeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceIndexTypeUint8FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceIndexTypeUint8FeaturesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceIndexTypeUint8FeaturesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceIndexTypeUint8FeaturesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceIndexTypeUint8FeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( indexTypeUint8 == rhs.indexTypeUint8 ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( indexTypeUint8 == rhs.indexTypeUint8 ); } - bool operator!=( PhysicalDeviceIndexTypeUint8FeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8 = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8 = {}; }; - static_assert( sizeof( PhysicalDeviceIndexTypeUint8FeaturesEXT ) == sizeof( VkPhysicalDeviceIndexTypeUint8FeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceIndexTypeUint8FeaturesEXT ) == + sizeof( VkPhysicalDeviceIndexTypeUint8FeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceInlineUniformBlockFeaturesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = {} ) VULKAN_HPP_NOEXCEPT : inlineUniformBlock( inlineUniformBlock_ ) , descriptorBindingInlineUniformBlockUpdateAfterBind( descriptorBindingInlineUniformBlockUpdateAfterBind_ ) {} - PhysicalDeviceInlineUniformBlockFeaturesEXT & operator=( PhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceInlineUniformBlockFeaturesEXT & + operator=( PhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceInlineUniformBlockFeaturesEXT ) - offsetof( PhysicalDeviceInlineUniformBlockFeaturesEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceInlineUniformBlockFeaturesEXT ) - + offsetof( PhysicalDeviceInlineUniformBlockFeaturesEXT, pNext ) ); return *this; } - PhysicalDeviceInlineUniformBlockFeaturesEXT( VkPhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceInlineUniformBlockFeaturesEXT( VkPhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceInlineUniformBlockFeaturesEXT& operator=( VkPhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceInlineUniformBlockFeaturesEXT & + operator=( VkPhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceInlineUniformBlockFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceInlineUniformBlockFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceInlineUniformBlockFeaturesEXT & setInlineUniformBlock( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceInlineUniformBlockFeaturesEXT & + setInlineUniformBlock( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ ) VULKAN_HPP_NOEXCEPT { inlineUniformBlock = inlineUniformBlock_; return *this; } - PhysicalDeviceInlineUniformBlockFeaturesEXT & setDescriptorBindingInlineUniformBlockUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceInlineUniformBlockFeaturesEXT & setDescriptorBindingInlineUniformBlockUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT { descriptorBindingInlineUniformBlockUpdateAfterBind = descriptorBindingInlineUniformBlockUpdateAfterBind_; return *this; } - operator VkPhysicalDeviceInlineUniformBlockFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceInlineUniformBlockFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceInlineUniformBlockFeaturesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceInlineUniformBlockFeaturesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceInlineUniformBlockFeaturesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceInlineUniformBlockFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( inlineUniformBlock == rhs.inlineUniformBlock ) - && ( descriptorBindingInlineUniformBlockUpdateAfterBind == rhs.descriptorBindingInlineUniformBlockUpdateAfterBind ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( inlineUniformBlock == rhs.inlineUniformBlock ) && + ( descriptorBindingInlineUniformBlockUpdateAfterBind == + rhs.descriptorBindingInlineUniformBlockUpdateAfterBind ); } - bool operator!=( PhysicalDeviceInlineUniformBlockFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -49388,69 +55693,81 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockFeaturesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind = {}; }; - static_assert( sizeof( PhysicalDeviceInlineUniformBlockFeaturesEXT ) == sizeof( VkPhysicalDeviceInlineUniformBlockFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceInlineUniformBlockFeaturesEXT ) == + sizeof( VkPhysicalDeviceInlineUniformBlockFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceInlineUniformBlockPropertiesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockPropertiesEXT( uint32_t maxInlineUniformBlockSize_ = {}, - uint32_t maxPerStageDescriptorInlineUniformBlocks_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ = {}, - uint32_t maxDescriptorSetInlineUniformBlocks_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockPropertiesEXT( + uint32_t maxInlineUniformBlockSize_ = {}, + uint32_t maxPerStageDescriptorInlineUniformBlocks_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ = {}, + uint32_t maxDescriptorSetInlineUniformBlocks_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ = {} ) VULKAN_HPP_NOEXCEPT : maxInlineUniformBlockSize( maxInlineUniformBlockSize_ ) , maxPerStageDescriptorInlineUniformBlocks( maxPerStageDescriptorInlineUniformBlocks_ ) - , maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ ) + , maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks( + maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ ) , maxDescriptorSetInlineUniformBlocks( maxDescriptorSetInlineUniformBlocks_ ) , maxDescriptorSetUpdateAfterBindInlineUniformBlocks( maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ ) {} - PhysicalDeviceInlineUniformBlockPropertiesEXT & operator=( PhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceInlineUniformBlockPropertiesEXT & + operator=( PhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceInlineUniformBlockPropertiesEXT ) - offsetof( PhysicalDeviceInlineUniformBlockPropertiesEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceInlineUniformBlockPropertiesEXT ) - + offsetof( PhysicalDeviceInlineUniformBlockPropertiesEXT, pNext ) ); return *this; } - PhysicalDeviceInlineUniformBlockPropertiesEXT( VkPhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceInlineUniformBlockPropertiesEXT( VkPhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceInlineUniformBlockPropertiesEXT& operator=( VkPhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceInlineUniformBlockPropertiesEXT & + operator=( VkPhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceInlineUniformBlockPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceInlineUniformBlockPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceInlineUniformBlockPropertiesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceInlineUniformBlockPropertiesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceInlineUniformBlockPropertiesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceInlineUniformBlockPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxInlineUniformBlockSize == rhs.maxInlineUniformBlockSize ) - && ( maxPerStageDescriptorInlineUniformBlocks == rhs.maxPerStageDescriptorInlineUniformBlocks ) - && ( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks == rhs.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks ) - && ( maxDescriptorSetInlineUniformBlocks == rhs.maxDescriptorSetInlineUniformBlocks ) - && ( maxDescriptorSetUpdateAfterBindInlineUniformBlocks == rhs.maxDescriptorSetUpdateAfterBindInlineUniformBlocks ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxInlineUniformBlockSize == rhs.maxInlineUniformBlockSize ) && + ( maxPerStageDescriptorInlineUniformBlocks == rhs.maxPerStageDescriptorInlineUniformBlocks ) && + ( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks == + rhs.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks ) && + ( maxDescriptorSetInlineUniformBlocks == rhs.maxDescriptorSetInlineUniformBlocks ) && + ( maxDescriptorSetUpdateAfterBindInlineUniformBlocks == + rhs.maxDescriptorSetUpdateAfterBindInlineUniformBlocks ); } - bool operator!=( PhysicalDeviceInlineUniformBlockPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -49458,124 +55775,128 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockPropertiesEXT; - void* pNext = {}; - uint32_t maxInlineUniformBlockSize = {}; - uint32_t maxPerStageDescriptorInlineUniformBlocks = {}; - uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = {}; - uint32_t maxDescriptorSetInlineUniformBlocks = {}; - uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks = {}; + void * pNext = {}; + uint32_t maxInlineUniformBlockSize = {}; + uint32_t maxPerStageDescriptorInlineUniformBlocks = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = {}; + uint32_t maxDescriptorSetInlineUniformBlocks = {}; + uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks = {}; }; - static_assert( sizeof( PhysicalDeviceInlineUniformBlockPropertiesEXT ) == sizeof( VkPhysicalDeviceInlineUniformBlockPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceInlineUniformBlockPropertiesEXT ) == + sizeof( VkPhysicalDeviceInlineUniformBlockPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceLimits { - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits( uint32_t maxImageDimension1D_ = {}, - uint32_t maxImageDimension2D_ = {}, - uint32_t maxImageDimension3D_ = {}, - uint32_t maxImageDimensionCube_ = {}, - uint32_t maxImageArrayLayers_ = {}, - uint32_t maxTexelBufferElements_ = {}, - uint32_t maxUniformBufferRange_ = {}, - uint32_t maxStorageBufferRange_ = {}, - uint32_t maxPushConstantsSize_ = {}, - uint32_t maxMemoryAllocationCount_ = {}, - uint32_t maxSamplerAllocationCount_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize_ = {}, - uint32_t maxBoundDescriptorSets_ = {}, - uint32_t maxPerStageDescriptorSamplers_ = {}, - uint32_t maxPerStageDescriptorUniformBuffers_ = {}, - uint32_t maxPerStageDescriptorStorageBuffers_ = {}, - uint32_t maxPerStageDescriptorSampledImages_ = {}, - uint32_t maxPerStageDescriptorStorageImages_ = {}, - uint32_t maxPerStageDescriptorInputAttachments_ = {}, - uint32_t maxPerStageResources_ = {}, - uint32_t maxDescriptorSetSamplers_ = {}, - uint32_t maxDescriptorSetUniformBuffers_ = {}, - uint32_t maxDescriptorSetUniformBuffersDynamic_ = {}, - uint32_t maxDescriptorSetStorageBuffers_ = {}, - uint32_t maxDescriptorSetStorageBuffersDynamic_ = {}, - uint32_t maxDescriptorSetSampledImages_ = {}, - uint32_t maxDescriptorSetStorageImages_ = {}, - uint32_t maxDescriptorSetInputAttachments_ = {}, - uint32_t maxVertexInputAttributes_ = {}, - uint32_t maxVertexInputBindings_ = {}, - uint32_t maxVertexInputAttributeOffset_ = {}, - uint32_t maxVertexInputBindingStride_ = {}, - uint32_t maxVertexOutputComponents_ = {}, - uint32_t maxTessellationGenerationLevel_ = {}, - uint32_t maxTessellationPatchSize_ = {}, - uint32_t maxTessellationControlPerVertexInputComponents_ = {}, - uint32_t maxTessellationControlPerVertexOutputComponents_ = {}, - uint32_t maxTessellationControlPerPatchOutputComponents_ = {}, - uint32_t maxTessellationControlTotalOutputComponents_ = {}, - uint32_t maxTessellationEvaluationInputComponents_ = {}, - uint32_t maxTessellationEvaluationOutputComponents_ = {}, - uint32_t maxGeometryShaderInvocations_ = {}, - uint32_t maxGeometryInputComponents_ = {}, - uint32_t maxGeometryOutputComponents_ = {}, - uint32_t maxGeometryOutputVertices_ = {}, - uint32_t maxGeometryTotalOutputComponents_ = {}, - uint32_t maxFragmentInputComponents_ = {}, - uint32_t maxFragmentOutputAttachments_ = {}, - uint32_t maxFragmentDualSrcAttachments_ = {}, - uint32_t maxFragmentCombinedOutputResources_ = {}, - uint32_t maxComputeSharedMemorySize_ = {}, - std::array const& maxComputeWorkGroupCount_ = {}, - uint32_t maxComputeWorkGroupInvocations_ = {}, - std::array const& maxComputeWorkGroupSize_ = {}, - uint32_t subPixelPrecisionBits_ = {}, - uint32_t subTexelPrecisionBits_ = {}, - uint32_t mipmapPrecisionBits_ = {}, - uint32_t maxDrawIndexedIndexValue_ = {}, - uint32_t maxDrawIndirectCount_ = {}, - float maxSamplerLodBias_ = {}, - float maxSamplerAnisotropy_ = {}, - uint32_t maxViewports_ = {}, - std::array const& maxViewportDimensions_ = {}, - std::array const& viewportBoundsRange_ = {}, - uint32_t viewportSubPixelBits_ = {}, - size_t minMemoryMapAlignment_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment_ = {}, - int32_t minTexelOffset_ = {}, - uint32_t maxTexelOffset_ = {}, - int32_t minTexelGatherOffset_ = {}, - uint32_t maxTexelGatherOffset_ = {}, - float minInterpolationOffset_ = {}, - float maxInterpolationOffset_ = {}, - uint32_t subPixelInterpolationOffsetBits_ = {}, - uint32_t maxFramebufferWidth_ = {}, - uint32_t maxFramebufferHeight_ = {}, - uint32_t maxFramebufferLayers_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts_ = {}, - uint32_t maxColorAttachments_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts_ = {}, - uint32_t maxSampleMaskWords_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics_ = {}, - float timestampPeriod_ = {}, - uint32_t maxClipDistances_ = {}, - uint32_t maxCullDistances_ = {}, - uint32_t maxCombinedClipAndCullDistances_ = {}, - uint32_t discreteQueuePriorities_ = {}, - std::array const& pointSizeRange_ = {}, - std::array const& lineWidthRange_ = {}, - float pointSizeGranularity_ = {}, - float lineWidthGranularity_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 strictLines_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceLimits( uint32_t maxImageDimension1D_ = {}, + uint32_t maxImageDimension2D_ = {}, + uint32_t maxImageDimension3D_ = {}, + uint32_t maxImageDimensionCube_ = {}, + uint32_t maxImageArrayLayers_ = {}, + uint32_t maxTexelBufferElements_ = {}, + uint32_t maxUniformBufferRange_ = {}, + uint32_t maxStorageBufferRange_ = {}, + uint32_t maxPushConstantsSize_ = {}, + uint32_t maxMemoryAllocationCount_ = {}, + uint32_t maxSamplerAllocationCount_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize_ = {}, + uint32_t maxBoundDescriptorSets_ = {}, + uint32_t maxPerStageDescriptorSamplers_ = {}, + uint32_t maxPerStageDescriptorUniformBuffers_ = {}, + uint32_t maxPerStageDescriptorStorageBuffers_ = {}, + uint32_t maxPerStageDescriptorSampledImages_ = {}, + uint32_t maxPerStageDescriptorStorageImages_ = {}, + uint32_t maxPerStageDescriptorInputAttachments_ = {}, + uint32_t maxPerStageResources_ = {}, + uint32_t maxDescriptorSetSamplers_ = {}, + uint32_t maxDescriptorSetUniformBuffers_ = {}, + uint32_t maxDescriptorSetUniformBuffersDynamic_ = {}, + uint32_t maxDescriptorSetStorageBuffers_ = {}, + uint32_t maxDescriptorSetStorageBuffersDynamic_ = {}, + uint32_t maxDescriptorSetSampledImages_ = {}, + uint32_t maxDescriptorSetStorageImages_ = {}, + uint32_t maxDescriptorSetInputAttachments_ = {}, + uint32_t maxVertexInputAttributes_ = {}, + uint32_t maxVertexInputBindings_ = {}, + uint32_t maxVertexInputAttributeOffset_ = {}, + uint32_t maxVertexInputBindingStride_ = {}, + uint32_t maxVertexOutputComponents_ = {}, + uint32_t maxTessellationGenerationLevel_ = {}, + uint32_t maxTessellationPatchSize_ = {}, + uint32_t maxTessellationControlPerVertexInputComponents_ = {}, + uint32_t maxTessellationControlPerVertexOutputComponents_ = {}, + uint32_t maxTessellationControlPerPatchOutputComponents_ = {}, + uint32_t maxTessellationControlTotalOutputComponents_ = {}, + uint32_t maxTessellationEvaluationInputComponents_ = {}, + uint32_t maxTessellationEvaluationOutputComponents_ = {}, + uint32_t maxGeometryShaderInvocations_ = {}, + uint32_t maxGeometryInputComponents_ = {}, + uint32_t maxGeometryOutputComponents_ = {}, + uint32_t maxGeometryOutputVertices_ = {}, + uint32_t maxGeometryTotalOutputComponents_ = {}, + uint32_t maxFragmentInputComponents_ = {}, + uint32_t maxFragmentOutputAttachments_ = {}, + uint32_t maxFragmentDualSrcAttachments_ = {}, + uint32_t maxFragmentCombinedOutputResources_ = {}, + uint32_t maxComputeSharedMemorySize_ = {}, + std::array const & maxComputeWorkGroupCount_ = {}, + uint32_t maxComputeWorkGroupInvocations_ = {}, + std::array const & maxComputeWorkGroupSize_ = {}, + uint32_t subPixelPrecisionBits_ = {}, + uint32_t subTexelPrecisionBits_ = {}, + uint32_t mipmapPrecisionBits_ = {}, + uint32_t maxDrawIndexedIndexValue_ = {}, + uint32_t maxDrawIndirectCount_ = {}, + float maxSamplerLodBias_ = {}, + float maxSamplerAnisotropy_ = {}, + uint32_t maxViewports_ = {}, + std::array const & maxViewportDimensions_ = {}, + std::array const & viewportBoundsRange_ = {}, + uint32_t viewportSubPixelBits_ = {}, + size_t minMemoryMapAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment_ = {}, + int32_t minTexelOffset_ = {}, + uint32_t maxTexelOffset_ = {}, + int32_t minTexelGatherOffset_ = {}, + uint32_t maxTexelGatherOffset_ = {}, + float minInterpolationOffset_ = {}, + float maxInterpolationOffset_ = {}, + uint32_t subPixelInterpolationOffsetBits_ = {}, + uint32_t maxFramebufferWidth_ = {}, + uint32_t maxFramebufferHeight_ = {}, + uint32_t maxFramebufferLayers_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts_ = {}, + uint32_t maxColorAttachments_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts_ = {}, + uint32_t maxSampleMaskWords_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics_ = {}, + float timestampPeriod_ = {}, + uint32_t maxClipDistances_ = {}, + uint32_t maxCullDistances_ = {}, + uint32_t maxCombinedClipAndCullDistances_ = {}, + uint32_t discreteQueuePriorities_ = {}, + std::array const & pointSizeRange_ = {}, + std::array const & lineWidthRange_ = {}, + float pointSizeGranularity_ = {}, + float lineWidthGranularity_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 strictLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize_ = {} ) VULKAN_HPP_NOEXCEPT : maxImageDimension1D( maxImageDimension1D_ ) , maxImageDimension2D( maxImageDimension2D_ ) , maxImageDimension3D( maxImageDimension3D_ ) @@ -49689,260 +56010,255 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceLimits& operator=( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceLimits & operator=( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceLimits const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceLimits const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceLimits &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceLimits const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLimits const & ) const = default; #else - bool operator==( PhysicalDeviceLimits const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceLimits const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( maxImageDimension1D == rhs.maxImageDimension1D ) - && ( maxImageDimension2D == rhs.maxImageDimension2D ) - && ( maxImageDimension3D == rhs.maxImageDimension3D ) - && ( maxImageDimensionCube == rhs.maxImageDimensionCube ) - && ( maxImageArrayLayers == rhs.maxImageArrayLayers ) - && ( maxTexelBufferElements == rhs.maxTexelBufferElements ) - && ( maxUniformBufferRange == rhs.maxUniformBufferRange ) - && ( maxStorageBufferRange == rhs.maxStorageBufferRange ) - && ( maxPushConstantsSize == rhs.maxPushConstantsSize ) - && ( maxMemoryAllocationCount == rhs.maxMemoryAllocationCount ) - && ( maxSamplerAllocationCount == rhs.maxSamplerAllocationCount ) - && ( bufferImageGranularity == rhs.bufferImageGranularity ) - && ( sparseAddressSpaceSize == rhs.sparseAddressSpaceSize ) - && ( maxBoundDescriptorSets == rhs.maxBoundDescriptorSets ) - && ( maxPerStageDescriptorSamplers == rhs.maxPerStageDescriptorSamplers ) - && ( maxPerStageDescriptorUniformBuffers == rhs.maxPerStageDescriptorUniformBuffers ) - && ( maxPerStageDescriptorStorageBuffers == rhs.maxPerStageDescriptorStorageBuffers ) - && ( maxPerStageDescriptorSampledImages == rhs.maxPerStageDescriptorSampledImages ) - && ( maxPerStageDescriptorStorageImages == rhs.maxPerStageDescriptorStorageImages ) - && ( maxPerStageDescriptorInputAttachments == rhs.maxPerStageDescriptorInputAttachments ) - && ( maxPerStageResources == rhs.maxPerStageResources ) - && ( maxDescriptorSetSamplers == rhs.maxDescriptorSetSamplers ) - && ( maxDescriptorSetUniformBuffers == rhs.maxDescriptorSetUniformBuffers ) - && ( maxDescriptorSetUniformBuffersDynamic == rhs.maxDescriptorSetUniformBuffersDynamic ) - && ( maxDescriptorSetStorageBuffers == rhs.maxDescriptorSetStorageBuffers ) - && ( maxDescriptorSetStorageBuffersDynamic == rhs.maxDescriptorSetStorageBuffersDynamic ) - && ( maxDescriptorSetSampledImages == rhs.maxDescriptorSetSampledImages ) - && ( maxDescriptorSetStorageImages == rhs.maxDescriptorSetStorageImages ) - && ( maxDescriptorSetInputAttachments == rhs.maxDescriptorSetInputAttachments ) - && ( maxVertexInputAttributes == rhs.maxVertexInputAttributes ) - && ( maxVertexInputBindings == rhs.maxVertexInputBindings ) - && ( maxVertexInputAttributeOffset == rhs.maxVertexInputAttributeOffset ) - && ( maxVertexInputBindingStride == rhs.maxVertexInputBindingStride ) - && ( maxVertexOutputComponents == rhs.maxVertexOutputComponents ) - && ( maxTessellationGenerationLevel == rhs.maxTessellationGenerationLevel ) - && ( maxTessellationPatchSize == rhs.maxTessellationPatchSize ) - && ( maxTessellationControlPerVertexInputComponents == rhs.maxTessellationControlPerVertexInputComponents ) - && ( maxTessellationControlPerVertexOutputComponents == rhs.maxTessellationControlPerVertexOutputComponents ) - && ( maxTessellationControlPerPatchOutputComponents == rhs.maxTessellationControlPerPatchOutputComponents ) - && ( maxTessellationControlTotalOutputComponents == rhs.maxTessellationControlTotalOutputComponents ) - && ( maxTessellationEvaluationInputComponents == rhs.maxTessellationEvaluationInputComponents ) - && ( maxTessellationEvaluationOutputComponents == rhs.maxTessellationEvaluationOutputComponents ) - && ( maxGeometryShaderInvocations == rhs.maxGeometryShaderInvocations ) - && ( maxGeometryInputComponents == rhs.maxGeometryInputComponents ) - && ( maxGeometryOutputComponents == rhs.maxGeometryOutputComponents ) - && ( maxGeometryOutputVertices == rhs.maxGeometryOutputVertices ) - && ( maxGeometryTotalOutputComponents == rhs.maxGeometryTotalOutputComponents ) - && ( maxFragmentInputComponents == rhs.maxFragmentInputComponents ) - && ( maxFragmentOutputAttachments == rhs.maxFragmentOutputAttachments ) - && ( maxFragmentDualSrcAttachments == rhs.maxFragmentDualSrcAttachments ) - && ( maxFragmentCombinedOutputResources == rhs.maxFragmentCombinedOutputResources ) - && ( maxComputeSharedMemorySize == rhs.maxComputeSharedMemorySize ) - && ( maxComputeWorkGroupCount == rhs.maxComputeWorkGroupCount ) - && ( maxComputeWorkGroupInvocations == rhs.maxComputeWorkGroupInvocations ) - && ( maxComputeWorkGroupSize == rhs.maxComputeWorkGroupSize ) - && ( subPixelPrecisionBits == rhs.subPixelPrecisionBits ) - && ( subTexelPrecisionBits == rhs.subTexelPrecisionBits ) - && ( mipmapPrecisionBits == rhs.mipmapPrecisionBits ) - && ( maxDrawIndexedIndexValue == rhs.maxDrawIndexedIndexValue ) - && ( maxDrawIndirectCount == rhs.maxDrawIndirectCount ) - && ( maxSamplerLodBias == rhs.maxSamplerLodBias ) - && ( maxSamplerAnisotropy == rhs.maxSamplerAnisotropy ) - && ( maxViewports == rhs.maxViewports ) - && ( maxViewportDimensions == rhs.maxViewportDimensions ) - && ( viewportBoundsRange == rhs.viewportBoundsRange ) - && ( viewportSubPixelBits == rhs.viewportSubPixelBits ) - && ( minMemoryMapAlignment == rhs.minMemoryMapAlignment ) - && ( minTexelBufferOffsetAlignment == rhs.minTexelBufferOffsetAlignment ) - && ( minUniformBufferOffsetAlignment == rhs.minUniformBufferOffsetAlignment ) - && ( minStorageBufferOffsetAlignment == rhs.minStorageBufferOffsetAlignment ) - && ( minTexelOffset == rhs.minTexelOffset ) - && ( maxTexelOffset == rhs.maxTexelOffset ) - && ( minTexelGatherOffset == rhs.minTexelGatherOffset ) - && ( maxTexelGatherOffset == rhs.maxTexelGatherOffset ) - && ( minInterpolationOffset == rhs.minInterpolationOffset ) - && ( maxInterpolationOffset == rhs.maxInterpolationOffset ) - && ( subPixelInterpolationOffsetBits == rhs.subPixelInterpolationOffsetBits ) - && ( maxFramebufferWidth == rhs.maxFramebufferWidth ) - && ( maxFramebufferHeight == rhs.maxFramebufferHeight ) - && ( maxFramebufferLayers == rhs.maxFramebufferLayers ) - && ( framebufferColorSampleCounts == rhs.framebufferColorSampleCounts ) - && ( framebufferDepthSampleCounts == rhs.framebufferDepthSampleCounts ) - && ( framebufferStencilSampleCounts == rhs.framebufferStencilSampleCounts ) - && ( framebufferNoAttachmentsSampleCounts == rhs.framebufferNoAttachmentsSampleCounts ) - && ( maxColorAttachments == rhs.maxColorAttachments ) - && ( sampledImageColorSampleCounts == rhs.sampledImageColorSampleCounts ) - && ( sampledImageIntegerSampleCounts == rhs.sampledImageIntegerSampleCounts ) - && ( sampledImageDepthSampleCounts == rhs.sampledImageDepthSampleCounts ) - && ( sampledImageStencilSampleCounts == rhs.sampledImageStencilSampleCounts ) - && ( storageImageSampleCounts == rhs.storageImageSampleCounts ) - && ( maxSampleMaskWords == rhs.maxSampleMaskWords ) - && ( timestampComputeAndGraphics == rhs.timestampComputeAndGraphics ) - && ( timestampPeriod == rhs.timestampPeriod ) - && ( maxClipDistances == rhs.maxClipDistances ) - && ( maxCullDistances == rhs.maxCullDistances ) - && ( maxCombinedClipAndCullDistances == rhs.maxCombinedClipAndCullDistances ) - && ( discreteQueuePriorities == rhs.discreteQueuePriorities ) - && ( pointSizeRange == rhs.pointSizeRange ) - && ( lineWidthRange == rhs.lineWidthRange ) - && ( pointSizeGranularity == rhs.pointSizeGranularity ) - && ( lineWidthGranularity == rhs.lineWidthGranularity ) - && ( strictLines == rhs.strictLines ) - && ( standardSampleLocations == rhs.standardSampleLocations ) - && ( optimalBufferCopyOffsetAlignment == rhs.optimalBufferCopyOffsetAlignment ) - && ( optimalBufferCopyRowPitchAlignment == rhs.optimalBufferCopyRowPitchAlignment ) - && ( nonCoherentAtomSize == rhs.nonCoherentAtomSize ); + return ( maxImageDimension1D == rhs.maxImageDimension1D ) && ( maxImageDimension2D == rhs.maxImageDimension2D ) && + ( maxImageDimension3D == rhs.maxImageDimension3D ) && + ( maxImageDimensionCube == rhs.maxImageDimensionCube ) && + ( maxImageArrayLayers == rhs.maxImageArrayLayers ) && + ( maxTexelBufferElements == rhs.maxTexelBufferElements ) && + ( maxUniformBufferRange == rhs.maxUniformBufferRange ) && + ( maxStorageBufferRange == rhs.maxStorageBufferRange ) && + ( maxPushConstantsSize == rhs.maxPushConstantsSize ) && + ( maxMemoryAllocationCount == rhs.maxMemoryAllocationCount ) && + ( maxSamplerAllocationCount == rhs.maxSamplerAllocationCount ) && + ( bufferImageGranularity == rhs.bufferImageGranularity ) && + ( sparseAddressSpaceSize == rhs.sparseAddressSpaceSize ) && + ( maxBoundDescriptorSets == rhs.maxBoundDescriptorSets ) && + ( maxPerStageDescriptorSamplers == rhs.maxPerStageDescriptorSamplers ) && + ( maxPerStageDescriptorUniformBuffers == rhs.maxPerStageDescriptorUniformBuffers ) && + ( maxPerStageDescriptorStorageBuffers == rhs.maxPerStageDescriptorStorageBuffers ) && + ( maxPerStageDescriptorSampledImages == rhs.maxPerStageDescriptorSampledImages ) && + ( maxPerStageDescriptorStorageImages == rhs.maxPerStageDescriptorStorageImages ) && + ( maxPerStageDescriptorInputAttachments == rhs.maxPerStageDescriptorInputAttachments ) && + ( maxPerStageResources == rhs.maxPerStageResources ) && + ( maxDescriptorSetSamplers == rhs.maxDescriptorSetSamplers ) && + ( maxDescriptorSetUniformBuffers == rhs.maxDescriptorSetUniformBuffers ) && + ( maxDescriptorSetUniformBuffersDynamic == rhs.maxDescriptorSetUniformBuffersDynamic ) && + ( maxDescriptorSetStorageBuffers == rhs.maxDescriptorSetStorageBuffers ) && + ( maxDescriptorSetStorageBuffersDynamic == rhs.maxDescriptorSetStorageBuffersDynamic ) && + ( maxDescriptorSetSampledImages == rhs.maxDescriptorSetSampledImages ) && + ( maxDescriptorSetStorageImages == rhs.maxDescriptorSetStorageImages ) && + ( maxDescriptorSetInputAttachments == rhs.maxDescriptorSetInputAttachments ) && + ( maxVertexInputAttributes == rhs.maxVertexInputAttributes ) && + ( maxVertexInputBindings == rhs.maxVertexInputBindings ) && + ( maxVertexInputAttributeOffset == rhs.maxVertexInputAttributeOffset ) && + ( maxVertexInputBindingStride == rhs.maxVertexInputBindingStride ) && + ( maxVertexOutputComponents == rhs.maxVertexOutputComponents ) && + ( maxTessellationGenerationLevel == rhs.maxTessellationGenerationLevel ) && + ( maxTessellationPatchSize == rhs.maxTessellationPatchSize ) && + ( maxTessellationControlPerVertexInputComponents == rhs.maxTessellationControlPerVertexInputComponents ) && + ( maxTessellationControlPerVertexOutputComponents == + rhs.maxTessellationControlPerVertexOutputComponents ) && + ( maxTessellationControlPerPatchOutputComponents == rhs.maxTessellationControlPerPatchOutputComponents ) && + ( maxTessellationControlTotalOutputComponents == rhs.maxTessellationControlTotalOutputComponents ) && + ( maxTessellationEvaluationInputComponents == rhs.maxTessellationEvaluationInputComponents ) && + ( maxTessellationEvaluationOutputComponents == rhs.maxTessellationEvaluationOutputComponents ) && + ( maxGeometryShaderInvocations == rhs.maxGeometryShaderInvocations ) && + ( maxGeometryInputComponents == rhs.maxGeometryInputComponents ) && + ( maxGeometryOutputComponents == rhs.maxGeometryOutputComponents ) && + ( maxGeometryOutputVertices == rhs.maxGeometryOutputVertices ) && + ( maxGeometryTotalOutputComponents == rhs.maxGeometryTotalOutputComponents ) && + ( maxFragmentInputComponents == rhs.maxFragmentInputComponents ) && + ( maxFragmentOutputAttachments == rhs.maxFragmentOutputAttachments ) && + ( maxFragmentDualSrcAttachments == rhs.maxFragmentDualSrcAttachments ) && + ( maxFragmentCombinedOutputResources == rhs.maxFragmentCombinedOutputResources ) && + ( maxComputeSharedMemorySize == rhs.maxComputeSharedMemorySize ) && + ( maxComputeWorkGroupCount == rhs.maxComputeWorkGroupCount ) && + ( maxComputeWorkGroupInvocations == rhs.maxComputeWorkGroupInvocations ) && + ( maxComputeWorkGroupSize == rhs.maxComputeWorkGroupSize ) && + ( subPixelPrecisionBits == rhs.subPixelPrecisionBits ) && + ( subTexelPrecisionBits == rhs.subTexelPrecisionBits ) && + ( mipmapPrecisionBits == rhs.mipmapPrecisionBits ) && + ( maxDrawIndexedIndexValue == rhs.maxDrawIndexedIndexValue ) && + ( maxDrawIndirectCount == rhs.maxDrawIndirectCount ) && ( maxSamplerLodBias == rhs.maxSamplerLodBias ) && + ( maxSamplerAnisotropy == rhs.maxSamplerAnisotropy ) && ( maxViewports == rhs.maxViewports ) && + ( maxViewportDimensions == rhs.maxViewportDimensions ) && + ( viewportBoundsRange == rhs.viewportBoundsRange ) && + ( viewportSubPixelBits == rhs.viewportSubPixelBits ) && + ( minMemoryMapAlignment == rhs.minMemoryMapAlignment ) && + ( minTexelBufferOffsetAlignment == rhs.minTexelBufferOffsetAlignment ) && + ( minUniformBufferOffsetAlignment == rhs.minUniformBufferOffsetAlignment ) && + ( minStorageBufferOffsetAlignment == rhs.minStorageBufferOffsetAlignment ) && + ( minTexelOffset == rhs.minTexelOffset ) && ( maxTexelOffset == rhs.maxTexelOffset ) && + ( minTexelGatherOffset == rhs.minTexelGatherOffset ) && + ( maxTexelGatherOffset == rhs.maxTexelGatherOffset ) && + ( minInterpolationOffset == rhs.minInterpolationOffset ) && + ( maxInterpolationOffset == rhs.maxInterpolationOffset ) && + ( subPixelInterpolationOffsetBits == rhs.subPixelInterpolationOffsetBits ) && + ( maxFramebufferWidth == rhs.maxFramebufferWidth ) && + ( maxFramebufferHeight == rhs.maxFramebufferHeight ) && + ( maxFramebufferLayers == rhs.maxFramebufferLayers ) && + ( framebufferColorSampleCounts == rhs.framebufferColorSampleCounts ) && + ( framebufferDepthSampleCounts == rhs.framebufferDepthSampleCounts ) && + ( framebufferStencilSampleCounts == rhs.framebufferStencilSampleCounts ) && + ( framebufferNoAttachmentsSampleCounts == rhs.framebufferNoAttachmentsSampleCounts ) && + ( maxColorAttachments == rhs.maxColorAttachments ) && + ( sampledImageColorSampleCounts == rhs.sampledImageColorSampleCounts ) && + ( sampledImageIntegerSampleCounts == rhs.sampledImageIntegerSampleCounts ) && + ( sampledImageDepthSampleCounts == rhs.sampledImageDepthSampleCounts ) && + ( sampledImageStencilSampleCounts == rhs.sampledImageStencilSampleCounts ) && + ( storageImageSampleCounts == rhs.storageImageSampleCounts ) && + ( maxSampleMaskWords == rhs.maxSampleMaskWords ) && + ( timestampComputeAndGraphics == rhs.timestampComputeAndGraphics ) && + ( timestampPeriod == rhs.timestampPeriod ) && ( maxClipDistances == rhs.maxClipDistances ) && + ( maxCullDistances == rhs.maxCullDistances ) && + ( maxCombinedClipAndCullDistances == rhs.maxCombinedClipAndCullDistances ) && + ( discreteQueuePriorities == rhs.discreteQueuePriorities ) && ( pointSizeRange == rhs.pointSizeRange ) && + ( lineWidthRange == rhs.lineWidthRange ) && ( pointSizeGranularity == rhs.pointSizeGranularity ) && + ( lineWidthGranularity == rhs.lineWidthGranularity ) && ( strictLines == rhs.strictLines ) && + ( standardSampleLocations == rhs.standardSampleLocations ) && + ( optimalBufferCopyOffsetAlignment == rhs.optimalBufferCopyOffsetAlignment ) && + ( optimalBufferCopyRowPitchAlignment == rhs.optimalBufferCopyRowPitchAlignment ) && + ( nonCoherentAtomSize == rhs.nonCoherentAtomSize ); } - bool operator!=( PhysicalDeviceLimits const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceLimits const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint32_t maxImageDimension1D = {}; - uint32_t maxImageDimension2D = {}; - uint32_t maxImageDimension3D = {}; - uint32_t maxImageDimensionCube = {}; - uint32_t maxImageArrayLayers = {}; - uint32_t maxTexelBufferElements = {}; - uint32_t maxUniformBufferRange = {}; - uint32_t maxStorageBufferRange = {}; - uint32_t maxPushConstantsSize = {}; - uint32_t maxMemoryAllocationCount = {}; - uint32_t maxSamplerAllocationCount = {}; - VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity = {}; - VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize = {}; - uint32_t maxBoundDescriptorSets = {}; - uint32_t maxPerStageDescriptorSamplers = {}; - uint32_t maxPerStageDescriptorUniformBuffers = {}; - uint32_t maxPerStageDescriptorStorageBuffers = {}; - uint32_t maxPerStageDescriptorSampledImages = {}; - uint32_t maxPerStageDescriptorStorageImages = {}; - uint32_t maxPerStageDescriptorInputAttachments = {}; - uint32_t maxPerStageResources = {}; - uint32_t maxDescriptorSetSamplers = {}; - uint32_t maxDescriptorSetUniformBuffers = {}; - uint32_t maxDescriptorSetUniformBuffersDynamic = {}; - uint32_t maxDescriptorSetStorageBuffers = {}; - uint32_t maxDescriptorSetStorageBuffersDynamic = {}; - uint32_t maxDescriptorSetSampledImages = {}; - uint32_t maxDescriptorSetStorageImages = {}; - uint32_t maxDescriptorSetInputAttachments = {}; - uint32_t maxVertexInputAttributes = {}; - uint32_t maxVertexInputBindings = {}; - uint32_t maxVertexInputAttributeOffset = {}; - uint32_t maxVertexInputBindingStride = {}; - uint32_t maxVertexOutputComponents = {}; - uint32_t maxTessellationGenerationLevel = {}; - uint32_t maxTessellationPatchSize = {}; - uint32_t maxTessellationControlPerVertexInputComponents = {}; - uint32_t maxTessellationControlPerVertexOutputComponents = {}; - uint32_t maxTessellationControlPerPatchOutputComponents = {}; - uint32_t maxTessellationControlTotalOutputComponents = {}; - uint32_t maxTessellationEvaluationInputComponents = {}; - uint32_t maxTessellationEvaluationOutputComponents = {}; - uint32_t maxGeometryShaderInvocations = {}; - uint32_t maxGeometryInputComponents = {}; - uint32_t maxGeometryOutputComponents = {}; - uint32_t maxGeometryOutputVertices = {}; - uint32_t maxGeometryTotalOutputComponents = {}; - uint32_t maxFragmentInputComponents = {}; - uint32_t maxFragmentOutputAttachments = {}; - uint32_t maxFragmentDualSrcAttachments = {}; - uint32_t maxFragmentCombinedOutputResources = {}; - uint32_t maxComputeSharedMemorySize = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxComputeWorkGroupCount = {}; - uint32_t maxComputeWorkGroupInvocations = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxComputeWorkGroupSize = {}; - uint32_t subPixelPrecisionBits = {}; - uint32_t subTexelPrecisionBits = {}; - uint32_t mipmapPrecisionBits = {}; - uint32_t maxDrawIndexedIndexValue = {}; - uint32_t maxDrawIndirectCount = {}; - float maxSamplerLodBias = {}; - float maxSamplerAnisotropy = {}; - uint32_t maxViewports = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxViewportDimensions = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D viewportBoundsRange = {}; - uint32_t viewportSubPixelBits = {}; - size_t minMemoryMapAlignment = {}; - VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment = {}; - VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment = {}; - VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment = {}; - int32_t minTexelOffset = {}; - uint32_t maxTexelOffset = {}; - int32_t minTexelGatherOffset = {}; - uint32_t maxTexelGatherOffset = {}; - float minInterpolationOffset = {}; - float maxInterpolationOffset = {}; - uint32_t subPixelInterpolationOffsetBits = {}; - uint32_t maxFramebufferWidth = {}; - uint32_t maxFramebufferHeight = {}; - uint32_t maxFramebufferLayers = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts = {}; - uint32_t maxColorAttachments = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts = {}; - uint32_t maxSampleMaskWords = {}; - VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics = {}; - float timestampPeriod = {}; - uint32_t maxClipDistances = {}; - uint32_t maxCullDistances = {}; - uint32_t maxCombinedClipAndCullDistances = {}; - uint32_t discreteQueuePriorities = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D pointSizeRange = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D lineWidthRange = {}; - float pointSizeGranularity = {}; - float lineWidthGranularity = {}; - VULKAN_HPP_NAMESPACE::Bool32 strictLines = {}; - VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations = {}; - VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment = {}; - VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment = {}; - VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize = {}; + uint32_t maxImageDimension1D = {}; + uint32_t maxImageDimension2D = {}; + uint32_t maxImageDimension3D = {}; + uint32_t maxImageDimensionCube = {}; + uint32_t maxImageArrayLayers = {}; + uint32_t maxTexelBufferElements = {}; + uint32_t maxUniformBufferRange = {}; + uint32_t maxStorageBufferRange = {}; + uint32_t maxPushConstantsSize = {}; + uint32_t maxMemoryAllocationCount = {}; + uint32_t maxSamplerAllocationCount = {}; + VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity = {}; + VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize = {}; + uint32_t maxBoundDescriptorSets = {}; + uint32_t maxPerStageDescriptorSamplers = {}; + uint32_t maxPerStageDescriptorUniformBuffers = {}; + uint32_t maxPerStageDescriptorStorageBuffers = {}; + uint32_t maxPerStageDescriptorSampledImages = {}; + uint32_t maxPerStageDescriptorStorageImages = {}; + uint32_t maxPerStageDescriptorInputAttachments = {}; + uint32_t maxPerStageResources = {}; + uint32_t maxDescriptorSetSamplers = {}; + uint32_t maxDescriptorSetUniformBuffers = {}; + uint32_t maxDescriptorSetUniformBuffersDynamic = {}; + uint32_t maxDescriptorSetStorageBuffers = {}; + uint32_t maxDescriptorSetStorageBuffersDynamic = {}; + uint32_t maxDescriptorSetSampledImages = {}; + uint32_t maxDescriptorSetStorageImages = {}; + uint32_t maxDescriptorSetInputAttachments = {}; + uint32_t maxVertexInputAttributes = {}; + uint32_t maxVertexInputBindings = {}; + uint32_t maxVertexInputAttributeOffset = {}; + uint32_t maxVertexInputBindingStride = {}; + uint32_t maxVertexOutputComponents = {}; + uint32_t maxTessellationGenerationLevel = {}; + uint32_t maxTessellationPatchSize = {}; + uint32_t maxTessellationControlPerVertexInputComponents = {}; + uint32_t maxTessellationControlPerVertexOutputComponents = {}; + uint32_t maxTessellationControlPerPatchOutputComponents = {}; + uint32_t maxTessellationControlTotalOutputComponents = {}; + uint32_t maxTessellationEvaluationInputComponents = {}; + uint32_t maxTessellationEvaluationOutputComponents = {}; + uint32_t maxGeometryShaderInvocations = {}; + uint32_t maxGeometryInputComponents = {}; + uint32_t maxGeometryOutputComponents = {}; + uint32_t maxGeometryOutputVertices = {}; + uint32_t maxGeometryTotalOutputComponents = {}; + uint32_t maxFragmentInputComponents = {}; + uint32_t maxFragmentOutputAttachments = {}; + uint32_t maxFragmentDualSrcAttachments = {}; + uint32_t maxFragmentCombinedOutputResources = {}; + uint32_t maxComputeSharedMemorySize = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxComputeWorkGroupCount = {}; + uint32_t maxComputeWorkGroupInvocations = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxComputeWorkGroupSize = {}; + uint32_t subPixelPrecisionBits = {}; + uint32_t subTexelPrecisionBits = {}; + uint32_t mipmapPrecisionBits = {}; + uint32_t maxDrawIndexedIndexValue = {}; + uint32_t maxDrawIndirectCount = {}; + float maxSamplerLodBias = {}; + float maxSamplerAnisotropy = {}; + uint32_t maxViewports = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxViewportDimensions = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D viewportBoundsRange = {}; + uint32_t viewportSubPixelBits = {}; + size_t minMemoryMapAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment = {}; + int32_t minTexelOffset = {}; + uint32_t maxTexelOffset = {}; + int32_t minTexelGatherOffset = {}; + uint32_t maxTexelGatherOffset = {}; + float minInterpolationOffset = {}; + float maxInterpolationOffset = {}; + uint32_t subPixelInterpolationOffsetBits = {}; + uint32_t maxFramebufferWidth = {}; + uint32_t maxFramebufferHeight = {}; + uint32_t maxFramebufferLayers = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts = {}; + uint32_t maxColorAttachments = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts = {}; + uint32_t maxSampleMaskWords = {}; + VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics = {}; + float timestampPeriod = {}; + uint32_t maxClipDistances = {}; + uint32_t maxCullDistances = {}; + uint32_t maxCombinedClipAndCullDistances = {}; + uint32_t discreteQueuePriorities = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D pointSizeRange = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D lineWidthRange = {}; + float pointSizeGranularity = {}; + float lineWidthGranularity = {}; + VULKAN_HPP_NAMESPACE::Bool32 strictLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations = {}; + VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize = {}; }; - static_assert( sizeof( PhysicalDeviceLimits ) == sizeof( VkPhysicalDeviceLimits ), "struct and wrapper have different size!" ); + static_assert( sizeof( PhysicalDeviceLimits ) == sizeof( VkPhysicalDeviceLimits ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PhysicalDeviceLineRasterizationFeaturesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ = {} ) VULKAN_HPP_NOEXCEPT : rectangularLines( rectangularLines_ ) , bresenhamLines( bresenhamLines_ ) , smoothLines( smoothLines_ ) @@ -49951,91 +56267,100 @@ namespace VULKAN_HPP_NAMESPACE , stippledSmoothLines( stippledSmoothLines_ ) {} - PhysicalDeviceLineRasterizationFeaturesEXT & operator=( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceLineRasterizationFeaturesEXT & + operator=( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceLineRasterizationFeaturesEXT ) - offsetof( PhysicalDeviceLineRasterizationFeaturesEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceLineRasterizationFeaturesEXT ) - + offsetof( PhysicalDeviceLineRasterizationFeaturesEXT, pNext ) ); return *this; } - PhysicalDeviceLineRasterizationFeaturesEXT( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceLineRasterizationFeaturesEXT( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceLineRasterizationFeaturesEXT& operator=( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceLineRasterizationFeaturesEXT & + operator=( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceLineRasterizationFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceLineRasterizationFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceLineRasterizationFeaturesEXT & setRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceLineRasterizationFeaturesEXT & + setRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ ) VULKAN_HPP_NOEXCEPT { rectangularLines = rectangularLines_; return *this; } - PhysicalDeviceLineRasterizationFeaturesEXT & setBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceLineRasterizationFeaturesEXT & + setBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ ) VULKAN_HPP_NOEXCEPT { bresenhamLines = bresenhamLines_; return *this; } - PhysicalDeviceLineRasterizationFeaturesEXT & setSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceLineRasterizationFeaturesEXT & + setSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ ) VULKAN_HPP_NOEXCEPT { smoothLines = smoothLines_; return *this; } - PhysicalDeviceLineRasterizationFeaturesEXT & setStippledRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceLineRasterizationFeaturesEXT & + setStippledRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ ) VULKAN_HPP_NOEXCEPT { stippledRectangularLines = stippledRectangularLines_; return *this; } - PhysicalDeviceLineRasterizationFeaturesEXT & setStippledBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceLineRasterizationFeaturesEXT & + setStippledBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ ) VULKAN_HPP_NOEXCEPT { stippledBresenhamLines = stippledBresenhamLines_; return *this; } - PhysicalDeviceLineRasterizationFeaturesEXT & setStippledSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceLineRasterizationFeaturesEXT & + setStippledSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ ) VULKAN_HPP_NOEXCEPT { stippledSmoothLines = stippledSmoothLines_; return *this; } - operator VkPhysicalDeviceLineRasterizationFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceLineRasterizationFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceLineRasterizationFeaturesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceLineRasterizationFeaturesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLineRasterizationFeaturesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceLineRasterizationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( rectangularLines == rhs.rectangularLines ) - && ( bresenhamLines == rhs.bresenhamLines ) - && ( smoothLines == rhs.smoothLines ) - && ( stippledRectangularLines == rhs.stippledRectangularLines ) - && ( stippledBresenhamLines == rhs.stippledBresenhamLines ) - && ( stippledSmoothLines == rhs.stippledSmoothLines ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rectangularLines == rhs.rectangularLines ) && + ( bresenhamLines == rhs.bresenhamLines ) && ( smoothLines == rhs.smoothLines ) && + ( stippledRectangularLines == rhs.stippledRectangularLines ) && + ( stippledBresenhamLines == rhs.stippledBresenhamLines ) && + ( stippledSmoothLines == rhs.stippledSmoothLines ); } - bool operator!=( PhysicalDeviceLineRasterizationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -50043,61 +56368,70 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 rectangularLines = {}; - VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines = {}; - VULKAN_HPP_NAMESPACE::Bool32 smoothLines = {}; - VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines = {}; - VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines = {}; - VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 rectangularLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 smoothLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines = {}; }; - static_assert( sizeof( PhysicalDeviceLineRasterizationFeaturesEXT ) == sizeof( VkPhysicalDeviceLineRasterizationFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceLineRasterizationFeaturesEXT ) == + sizeof( VkPhysicalDeviceLineRasterizationFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceLineRasterizationPropertiesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationPropertiesEXT( uint32_t lineSubPixelPrecisionBits_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + PhysicalDeviceLineRasterizationPropertiesEXT( uint32_t lineSubPixelPrecisionBits_ = {} ) VULKAN_HPP_NOEXCEPT : lineSubPixelPrecisionBits( lineSubPixelPrecisionBits_ ) {} - PhysicalDeviceLineRasterizationPropertiesEXT & operator=( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceLineRasterizationPropertiesEXT & + operator=( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceLineRasterizationPropertiesEXT ) - offsetof( PhysicalDeviceLineRasterizationPropertiesEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceLineRasterizationPropertiesEXT ) - + offsetof( PhysicalDeviceLineRasterizationPropertiesEXT, pNext ) ); return *this; } - PhysicalDeviceLineRasterizationPropertiesEXT( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceLineRasterizationPropertiesEXT( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceLineRasterizationPropertiesEXT& operator=( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceLineRasterizationPropertiesEXT & + operator=( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceLineRasterizationPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceLineRasterizationPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceLineRasterizationPropertiesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceLineRasterizationPropertiesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLineRasterizationPropertiesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceLineRasterizationPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( lineSubPixelPrecisionBits == rhs.lineSubPixelPrecisionBits ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( lineSubPixelPrecisionBits == rhs.lineSubPixelPrecisionBits ); } - bool operator!=( PhysicalDeviceLineRasterizationPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -50105,23 +56439,31 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT; - void* pNext = {}; - uint32_t lineSubPixelPrecisionBits = {}; + void * pNext = {}; + uint32_t lineSubPixelPrecisionBits = {}; }; - static_assert( sizeof( PhysicalDeviceLineRasterizationPropertiesEXT ) == sizeof( VkPhysicalDeviceLineRasterizationPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceLineRasterizationPropertiesEXT ) == + sizeof( VkPhysicalDeviceLineRasterizationPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceMaintenance3Properties { - VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties( uint32_t maxPerSetDescriptors_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties( + uint32_t maxPerSetDescriptors_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {} ) VULKAN_HPP_NOEXCEPT : maxPerSetDescriptors( maxPerSetDescriptors_ ) , maxMemoryAllocationSize( maxMemoryAllocationSize_ ) {} - PhysicalDeviceMaintenance3Properties & operator=( PhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceMaintenance3Properties & + operator=( PhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceMaintenance3Properties ) - offsetof( PhysicalDeviceMaintenance3Properties, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceMaintenance3Properties ) - + offsetof( PhysicalDeviceMaintenance3Properties, pNext ) ); return *this; } @@ -50130,34 +56472,33 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceMaintenance3Properties& operator=( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceMaintenance3Properties & + operator=( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceMaintenance3Properties const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceMaintenance3Properties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceMaintenance3Properties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceMaintenance3Properties const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMaintenance3Properties const & ) const = default; #else - bool operator==( PhysicalDeviceMaintenance3Properties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceMaintenance3Properties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxPerSetDescriptors == rhs.maxPerSetDescriptors ) - && ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxPerSetDescriptors == rhs.maxPerSetDescriptors ) && + ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize ); } - bool operator!=( PhysicalDeviceMaintenance3Properties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceMaintenance3Properties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -50165,24 +56506,31 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance3Properties; - void* pNext = {}; - uint32_t maxPerSetDescriptors = {}; - VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {}; + void * pNext = {}; + uint32_t maxPerSetDescriptors = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {}; }; - static_assert( sizeof( PhysicalDeviceMaintenance3Properties ) == sizeof( VkPhysicalDeviceMaintenance3Properties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceMaintenance3Properties ) == sizeof( VkPhysicalDeviceMaintenance3Properties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceMemoryBudgetPropertiesEXT { - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT( std::array const& heapBudget_ = {}, - std::array const& heapUsage_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT( + std::array const & heapBudget_ = {}, + std::array const & heapUsage_ = {} ) VULKAN_HPP_NOEXCEPT : heapBudget( heapBudget_ ) , heapUsage( heapUsage_ ) {} - PhysicalDeviceMemoryBudgetPropertiesEXT & operator=( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceMemoryBudgetPropertiesEXT & + operator=( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceMemoryBudgetPropertiesEXT ) - offsetof( PhysicalDeviceMemoryBudgetPropertiesEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceMemoryBudgetPropertiesEXT ) - + offsetof( PhysicalDeviceMemoryBudgetPropertiesEXT, pNext ) ); return *this; } @@ -50191,34 +56539,33 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceMemoryBudgetPropertiesEXT& operator=( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceMemoryBudgetPropertiesEXT & + operator=( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceMemoryBudgetPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceMemoryBudgetPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceMemoryBudgetPropertiesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceMemoryBudgetPropertiesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMemoryBudgetPropertiesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceMemoryBudgetPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( heapBudget == rhs.heapBudget ) - && ( heapUsage == rhs.heapUsage ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( heapBudget == rhs.heapBudget ) && + ( heapUsage == rhs.heapUsage ); } - bool operator!=( PhysicalDeviceMemoryBudgetPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -50226,22 +56573,30 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT; - void* pNext = {}; + void * pNext = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D heapBudget = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D heapUsage = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D heapUsage = {}; }; - static_assert( sizeof( PhysicalDeviceMemoryBudgetPropertiesEXT ) == sizeof( VkPhysicalDeviceMemoryBudgetPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceMemoryBudgetPropertiesEXT ) == + sizeof( VkPhysicalDeviceMemoryBudgetPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceMemoryPriorityFeaturesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + PhysicalDeviceMemoryPriorityFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ = {} ) VULKAN_HPP_NOEXCEPT : memoryPriority( memoryPriority_ ) {} - PhysicalDeviceMemoryPriorityFeaturesEXT & operator=( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceMemoryPriorityFeaturesEXT & + operator=( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceMemoryPriorityFeaturesEXT ) - offsetof( PhysicalDeviceMemoryPriorityFeaturesEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceMemoryPriorityFeaturesEXT ) - + offsetof( PhysicalDeviceMemoryPriorityFeaturesEXT, pNext ) ); return *this; } @@ -50250,64 +56605,68 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceMemoryPriorityFeaturesEXT& operator=( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceMemoryPriorityFeaturesEXT & + operator=( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceMemoryPriorityFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceMemoryPriorityFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceMemoryPriorityFeaturesEXT & setMemoryPriority( VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceMemoryPriorityFeaturesEXT & + setMemoryPriority( VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ ) VULKAN_HPP_NOEXCEPT { memoryPriority = memoryPriority_; return *this; } - operator VkPhysicalDeviceMemoryPriorityFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceMemoryPriorityFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceMemoryPriorityFeaturesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceMemoryPriorityFeaturesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMemoryPriorityFeaturesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceMemoryPriorityFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memoryPriority == rhs.memoryPriority ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryPriority == rhs.memoryPriority ); } - bool operator!=( PhysicalDeviceMemoryPriorityFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 memoryPriority = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 memoryPriority = {}; }; - static_assert( sizeof( PhysicalDeviceMemoryPriorityFeaturesEXT ) == sizeof( VkPhysicalDeviceMemoryPriorityFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceMemoryPriorityFeaturesEXT ) == + sizeof( VkPhysicalDeviceMemoryPriorityFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceMemoryProperties { - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties( uint32_t memoryTypeCount_ = {}, - std::array const& memoryTypes_ = {}, - uint32_t memoryHeapCount_ = {}, - std::array const& memoryHeaps_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties( + uint32_t memoryTypeCount_ = {}, + std::array const & memoryTypes_ = {}, + uint32_t memoryHeapCount_ = {}, + std::array const & memoryHeaps_ = {} ) VULKAN_HPP_NOEXCEPT : memoryTypeCount( memoryTypeCount_ ) , memoryTypes( memoryTypes_ ) , memoryHeapCount( memoryHeapCount_ ) @@ -50319,57 +56678,60 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceMemoryProperties& operator=( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceMemoryProperties & operator=( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceMemoryProperties const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceMemoryProperties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceMemoryProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceMemoryProperties const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMemoryProperties const & ) const = default; #else - bool operator==( PhysicalDeviceMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( memoryTypeCount == rhs.memoryTypeCount ) - && ( memoryTypes == rhs.memoryTypes ) - && ( memoryHeapCount == rhs.memoryHeapCount ) - && ( memoryHeaps == rhs.memoryHeaps ); + return ( memoryTypeCount == rhs.memoryTypeCount ) && ( memoryTypes == rhs.memoryTypes ) && + ( memoryHeapCount == rhs.memoryHeapCount ) && ( memoryHeaps == rhs.memoryHeaps ); } - bool operator!=( PhysicalDeviceMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint32_t memoryTypeCount = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D memoryTypes = {}; - uint32_t memoryHeapCount = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D memoryHeaps = {}; + uint32_t memoryTypeCount = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D memoryTypes = {}; + uint32_t memoryHeapCount = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D memoryHeaps = {}; }; - static_assert( sizeof( PhysicalDeviceMemoryProperties ) == sizeof( VkPhysicalDeviceMemoryProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceMemoryProperties ) == sizeof( VkPhysicalDeviceMemoryProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceMemoryProperties2 { - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2( + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties_ = {} ) VULKAN_HPP_NOEXCEPT : memoryProperties( memoryProperties_ ) {} PhysicalDeviceMemoryProperties2 & operator=( PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceMemoryProperties2 ) - offsetof( PhysicalDeviceMemoryProperties2, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceMemoryProperties2 ) - offsetof( PhysicalDeviceMemoryProperties2, pNext ) ); return *this; } @@ -50378,57 +56740,60 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceMemoryProperties2& operator=( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceMemoryProperties2 & operator=( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceMemoryProperties2 const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceMemoryProperties2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceMemoryProperties2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceMemoryProperties2 const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMemoryProperties2 const & ) const = default; #else - bool operator==( PhysicalDeviceMemoryProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceMemoryProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memoryProperties == rhs.memoryProperties ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryProperties == rhs.memoryProperties ); } - bool operator!=( PhysicalDeviceMemoryProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceMemoryProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryProperties2; - void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryProperties2; + void * pNext = {}; VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties = {}; }; - static_assert( sizeof( PhysicalDeviceMemoryProperties2 ) == sizeof( VkPhysicalDeviceMemoryProperties2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceMemoryProperties2 ) == sizeof( VkPhysicalDeviceMemoryProperties2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceMeshShaderFeaturesNV { - VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 meshShader_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + PhysicalDeviceMeshShaderFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 meshShader_ = {} ) VULKAN_HPP_NOEXCEPT : taskShader( taskShader_ ) , meshShader( meshShader_ ) {} PhysicalDeviceMeshShaderFeaturesNV & operator=( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceMeshShaderFeaturesNV ) - offsetof( PhysicalDeviceMeshShaderFeaturesNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceMeshShaderFeaturesNV ) - offsetof( PhysicalDeviceMeshShaderFeaturesNV, pNext ) ); return *this; } @@ -50437,13 +56802,14 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceMeshShaderFeaturesNV& operator=( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceMeshShaderFeaturesNV & + operator=( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceMeshShaderFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceMeshShaderFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -50461,57 +56827,58 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkPhysicalDeviceMeshShaderFeaturesNV const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceMeshShaderFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceMeshShaderFeaturesNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceMeshShaderFeaturesNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMeshShaderFeaturesNV const & ) const = default; #else - bool operator==( PhysicalDeviceMeshShaderFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( taskShader == rhs.taskShader ) - && ( meshShader == rhs.meshShader ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( taskShader == rhs.taskShader ) && + ( meshShader == rhs.meshShader ); } - bool operator!=( PhysicalDeviceMeshShaderFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderFeaturesNV; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 taskShader = {}; - VULKAN_HPP_NAMESPACE::Bool32 meshShader = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 taskShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 meshShader = {}; }; - static_assert( sizeof( PhysicalDeviceMeshShaderFeaturesNV ) == sizeof( VkPhysicalDeviceMeshShaderFeaturesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceMeshShaderFeaturesNV ) == sizeof( VkPhysicalDeviceMeshShaderFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceMeshShaderPropertiesNV { - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV( uint32_t maxDrawMeshTasksCount_ = {}, - uint32_t maxTaskWorkGroupInvocations_ = {}, - std::array const& maxTaskWorkGroupSize_ = {}, - uint32_t maxTaskTotalMemorySize_ = {}, - uint32_t maxTaskOutputCount_ = {}, - uint32_t maxMeshWorkGroupInvocations_ = {}, - std::array const& maxMeshWorkGroupSize_ = {}, - uint32_t maxMeshTotalMemorySize_ = {}, - uint32_t maxMeshOutputVertices_ = {}, - uint32_t maxMeshOutputPrimitives_ = {}, - uint32_t maxMeshMultiviewViewCount_ = {}, - uint32_t meshOutputPerVertexGranularity_ = {}, - uint32_t meshOutputPerPrimitiveGranularity_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceMeshShaderPropertiesNV( uint32_t maxDrawMeshTasksCount_ = {}, + uint32_t maxTaskWorkGroupInvocations_ = {}, + std::array const & maxTaskWorkGroupSize_ = {}, + uint32_t maxTaskTotalMemorySize_ = {}, + uint32_t maxTaskOutputCount_ = {}, + uint32_t maxMeshWorkGroupInvocations_ = {}, + std::array const & maxMeshWorkGroupSize_ = {}, + uint32_t maxMeshTotalMemorySize_ = {}, + uint32_t maxMeshOutputVertices_ = {}, + uint32_t maxMeshOutputPrimitives_ = {}, + uint32_t maxMeshMultiviewViewCount_ = {}, + uint32_t meshOutputPerVertexGranularity_ = {}, + uint32_t meshOutputPerPrimitiveGranularity_ = {} ) VULKAN_HPP_NOEXCEPT : maxDrawMeshTasksCount( maxDrawMeshTasksCount_ ) , maxTaskWorkGroupInvocations( maxTaskWorkGroupInvocations_ ) , maxTaskWorkGroupSize( maxTaskWorkGroupSize_ ) @@ -50527,9 +56894,13 @@ namespace VULKAN_HPP_NAMESPACE , meshOutputPerPrimitiveGranularity( meshOutputPerPrimitiveGranularity_ ) {} - PhysicalDeviceMeshShaderPropertiesNV & operator=( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceMeshShaderPropertiesNV & + operator=( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceMeshShaderPropertiesNV ) - offsetof( PhysicalDeviceMeshShaderPropertiesNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceMeshShaderPropertiesNV ) - + offsetof( PhysicalDeviceMeshShaderPropertiesNV, pNext ) ); return *this; } @@ -50538,75 +56909,78 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceMeshShaderPropertiesNV& operator=( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceMeshShaderPropertiesNV & + operator=( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceMeshShaderPropertiesNV const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceMeshShaderPropertiesNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceMeshShaderPropertiesNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceMeshShaderPropertiesNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMeshShaderPropertiesNV const & ) const = default; #else - bool operator==( PhysicalDeviceMeshShaderPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxDrawMeshTasksCount == rhs.maxDrawMeshTasksCount ) - && ( maxTaskWorkGroupInvocations == rhs.maxTaskWorkGroupInvocations ) - && ( maxTaskWorkGroupSize == rhs.maxTaskWorkGroupSize ) - && ( maxTaskTotalMemorySize == rhs.maxTaskTotalMemorySize ) - && ( maxTaskOutputCount == rhs.maxTaskOutputCount ) - && ( maxMeshWorkGroupInvocations == rhs.maxMeshWorkGroupInvocations ) - && ( maxMeshWorkGroupSize == rhs.maxMeshWorkGroupSize ) - && ( maxMeshTotalMemorySize == rhs.maxMeshTotalMemorySize ) - && ( maxMeshOutputVertices == rhs.maxMeshOutputVertices ) - && ( maxMeshOutputPrimitives == rhs.maxMeshOutputPrimitives ) - && ( maxMeshMultiviewViewCount == rhs.maxMeshMultiviewViewCount ) - && ( meshOutputPerVertexGranularity == rhs.meshOutputPerVertexGranularity ) - && ( meshOutputPerPrimitiveGranularity == rhs.meshOutputPerPrimitiveGranularity ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxDrawMeshTasksCount == rhs.maxDrawMeshTasksCount ) && + ( maxTaskWorkGroupInvocations == rhs.maxTaskWorkGroupInvocations ) && + ( maxTaskWorkGroupSize == rhs.maxTaskWorkGroupSize ) && + ( maxTaskTotalMemorySize == rhs.maxTaskTotalMemorySize ) && + ( maxTaskOutputCount == rhs.maxTaskOutputCount ) && + ( maxMeshWorkGroupInvocations == rhs.maxMeshWorkGroupInvocations ) && + ( maxMeshWorkGroupSize == rhs.maxMeshWorkGroupSize ) && + ( maxMeshTotalMemorySize == rhs.maxMeshTotalMemorySize ) && + ( maxMeshOutputVertices == rhs.maxMeshOutputVertices ) && + ( maxMeshOutputPrimitives == rhs.maxMeshOutputPrimitives ) && + ( maxMeshMultiviewViewCount == rhs.maxMeshMultiviewViewCount ) && + ( meshOutputPerVertexGranularity == rhs.meshOutputPerVertexGranularity ) && + ( meshOutputPerPrimitiveGranularity == rhs.meshOutputPerPrimitiveGranularity ); } - bool operator!=( PhysicalDeviceMeshShaderPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV; - void* pNext = {}; - uint32_t maxDrawMeshTasksCount = {}; - uint32_t maxTaskWorkGroupInvocations = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxTaskWorkGroupSize = {}; - uint32_t maxTaskTotalMemorySize = {}; - uint32_t maxTaskOutputCount = {}; - uint32_t maxMeshWorkGroupInvocations = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxMeshWorkGroupSize = {}; - uint32_t maxMeshTotalMemorySize = {}; - uint32_t maxMeshOutputVertices = {}; - uint32_t maxMeshOutputPrimitives = {}; - uint32_t maxMeshMultiviewViewCount = {}; - uint32_t meshOutputPerVertexGranularity = {}; - uint32_t meshOutputPerPrimitiveGranularity = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV; + void * pNext = {}; + uint32_t maxDrawMeshTasksCount = {}; + uint32_t maxTaskWorkGroupInvocations = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxTaskWorkGroupSize = {}; + uint32_t maxTaskTotalMemorySize = {}; + uint32_t maxTaskOutputCount = {}; + uint32_t maxMeshWorkGroupInvocations = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxMeshWorkGroupSize = {}; + uint32_t maxMeshTotalMemorySize = {}; + uint32_t maxMeshOutputVertices = {}; + uint32_t maxMeshOutputPrimitives = {}; + uint32_t maxMeshMultiviewViewCount = {}; + uint32_t meshOutputPerVertexGranularity = {}; + uint32_t meshOutputPerPrimitiveGranularity = {}; }; - static_assert( sizeof( PhysicalDeviceMeshShaderPropertiesNV ) == sizeof( VkPhysicalDeviceMeshShaderPropertiesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceMeshShaderPropertiesNV ) == sizeof( VkPhysicalDeviceMeshShaderPropertiesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceMultiviewFeatures { - VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures( VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures( + VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {} ) VULKAN_HPP_NOEXCEPT : multiview( multiview_ ) , multiviewGeometryShader( multiviewGeometryShader_ ) , multiviewTessellationShader( multiviewTessellationShader_ ) @@ -50614,7 +56988,9 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceMultiviewFeatures & operator=( PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceMultiviewFeatures ) - offsetof( PhysicalDeviceMultiviewFeatures, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceMultiviewFeatures ) - offsetof( PhysicalDeviceMultiviewFeatures, pNext ) ); return *this; } @@ -50623,13 +56999,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceMultiviewFeatures& operator=( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceMultiviewFeatures & operator=( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceMultiviewFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceMultiviewFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -50641,124 +57017,140 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PhysicalDeviceMultiviewFeatures & setMultiviewGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceMultiviewFeatures & + setMultiviewGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT { multiviewGeometryShader = multiviewGeometryShader_; return *this; } - PhysicalDeviceMultiviewFeatures & setMultiviewTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceMultiviewFeatures & + setMultiviewTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT { multiviewTessellationShader = multiviewTessellationShader_; return *this; } - operator VkPhysicalDeviceMultiviewFeatures const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceMultiviewFeatures const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceMultiviewFeatures &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceMultiviewFeatures const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMultiviewFeatures const & ) const = default; #else - bool operator==( PhysicalDeviceMultiviewFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceMultiviewFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( multiview == rhs.multiview ) - && ( multiviewGeometryShader == rhs.multiviewGeometryShader ) - && ( multiviewTessellationShader == rhs.multiviewTessellationShader ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multiview == rhs.multiview ) && + ( multiviewGeometryShader == rhs.multiviewGeometryShader ) && + ( multiviewTessellationShader == rhs.multiviewTessellationShader ); } - bool operator!=( PhysicalDeviceMultiviewFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceMultiviewFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewFeatures; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 multiview = {}; - VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader = {}; - VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiview = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader = {}; }; - static_assert( sizeof( PhysicalDeviceMultiviewFeatures ) == sizeof( VkPhysicalDeviceMultiviewFeatures ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceMultiviewFeatures ) == sizeof( VkPhysicalDeviceMultiviewFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX { - VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( + VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents_ = {} ) VULKAN_HPP_NOEXCEPT : perViewPositionAllComponents( perViewPositionAllComponents_ ) {} - PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX & operator=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX & + operator=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ) - offsetof( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ) - + offsetof( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX, pNext ) ); return *this; } - PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( + VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX& operator=( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX & + operator=( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = + *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & ) const = default; #else - bool operator==( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( perViewPositionAllComponents == rhs.perViewPositionAllComponents ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( perViewPositionAllComponents == rhs.perViewPositionAllComponents ); } - bool operator!=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; - void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = + StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + void * pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents = {}; }; - static_assert( sizeof( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ) == sizeof( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ) == + sizeof( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceMultiviewProperties { - VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewProperties( uint32_t maxMultiviewViewCount_ = {}, - uint32_t maxMultiviewInstanceIndex_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + PhysicalDeviceMultiviewProperties( uint32_t maxMultiviewViewCount_ = {}, + uint32_t maxMultiviewInstanceIndex_ = {} ) VULKAN_HPP_NOEXCEPT : maxMultiviewViewCount( maxMultiviewViewCount_ ) , maxMultiviewInstanceIndex( maxMultiviewInstanceIndex_ ) {} PhysicalDeviceMultiviewProperties & operator=( PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceMultiviewProperties ) - offsetof( PhysicalDeviceMultiviewProperties, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceMultiviewProperties ) - offsetof( PhysicalDeviceMultiviewProperties, pNext ) ); return *this; } @@ -50767,53 +57159,54 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceMultiviewProperties& operator=( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceMultiviewProperties & operator=( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceMultiviewProperties const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceMultiviewProperties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceMultiviewProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceMultiviewProperties const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMultiviewProperties const & ) const = default; #else - bool operator==( PhysicalDeviceMultiviewProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceMultiviewProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxMultiviewViewCount == rhs.maxMultiviewViewCount ) - && ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxMultiviewViewCount == rhs.maxMultiviewViewCount ) && + ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex ); } - bool operator!=( PhysicalDeviceMultiviewProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceMultiviewProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewProperties; - void* pNext = {}; - uint32_t maxMultiviewViewCount = {}; - uint32_t maxMultiviewInstanceIndex = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewProperties; + void * pNext = {}; + uint32_t maxMultiviewViewCount = {}; + uint32_t maxMultiviewInstanceIndex = {}; }; - static_assert( sizeof( PhysicalDeviceMultiviewProperties ) == sizeof( VkPhysicalDeviceMultiviewProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceMultiviewProperties ) == sizeof( VkPhysicalDeviceMultiviewProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDevicePCIBusInfoPropertiesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT( uint32_t pciDomain_ = {}, - uint32_t pciBus_ = {}, - uint32_t pciDevice_ = {}, + VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT( uint32_t pciDomain_ = {}, + uint32_t pciBus_ = {}, + uint32_t pciDevice_ = {}, uint32_t pciFunction_ = {} ) VULKAN_HPP_NOEXCEPT : pciDomain( pciDomain_ ) , pciBus( pciBus_ ) @@ -50821,9 +57214,13 @@ namespace VULKAN_HPP_NAMESPACE , pciFunction( pciFunction_ ) {} - PhysicalDevicePCIBusInfoPropertiesEXT & operator=( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDevicePCIBusInfoPropertiesEXT & + operator=( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDevicePCIBusInfoPropertiesEXT ) - offsetof( PhysicalDevicePCIBusInfoPropertiesEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDevicePCIBusInfoPropertiesEXT ) - + offsetof( PhysicalDevicePCIBusInfoPropertiesEXT, pNext ) ); return *this; } @@ -50832,117 +57229,124 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDevicePCIBusInfoPropertiesEXT& operator=( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDevicePCIBusInfoPropertiesEXT & + operator=( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDevicePCIBusInfoPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDevicePCIBusInfoPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDevicePCIBusInfoPropertiesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDevicePCIBusInfoPropertiesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePCIBusInfoPropertiesEXT const & ) const = default; #else - bool operator==( PhysicalDevicePCIBusInfoPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pciDomain == rhs.pciDomain ) - && ( pciBus == rhs.pciBus ) - && ( pciDevice == rhs.pciDevice ) - && ( pciFunction == rhs.pciFunction ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pciDomain == rhs.pciDomain ) && + ( pciBus == rhs.pciBus ) && ( pciDevice == rhs.pciDevice ) && ( pciFunction == rhs.pciFunction ); } - bool operator!=( PhysicalDevicePCIBusInfoPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT; - void* pNext = {}; - uint32_t pciDomain = {}; - uint32_t pciBus = {}; - uint32_t pciDevice = {}; - uint32_t pciFunction = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT; + void * pNext = {}; + uint32_t pciDomain = {}; + uint32_t pciBus = {}; + uint32_t pciDevice = {}; + uint32_t pciFunction = {}; }; - static_assert( sizeof( PhysicalDevicePCIBusInfoPropertiesEXT ) == sizeof( VkPhysicalDevicePCIBusInfoPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDevicePCIBusInfoPropertiesEXT ) == sizeof( VkPhysicalDevicePCIBusInfoPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDevicePerformanceQueryFeaturesKHR { - VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR( + VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ = {} ) VULKAN_HPP_NOEXCEPT : performanceCounterQueryPools( performanceCounterQueryPools_ ) , performanceCounterMultipleQueryPools( performanceCounterMultipleQueryPools_ ) {} - PhysicalDevicePerformanceQueryFeaturesKHR & operator=( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDevicePerformanceQueryFeaturesKHR & + operator=( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDevicePerformanceQueryFeaturesKHR ) - offsetof( PhysicalDevicePerformanceQueryFeaturesKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDevicePerformanceQueryFeaturesKHR ) - + offsetof( PhysicalDevicePerformanceQueryFeaturesKHR, pNext ) ); return *this; } - PhysicalDevicePerformanceQueryFeaturesKHR( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDevicePerformanceQueryFeaturesKHR( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDevicePerformanceQueryFeaturesKHR& operator=( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDevicePerformanceQueryFeaturesKHR & + operator=( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDevicePerformanceQueryFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDevicePerformanceQueryFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDevicePerformanceQueryFeaturesKHR & setPerformanceCounterQueryPools( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ ) VULKAN_HPP_NOEXCEPT + PhysicalDevicePerformanceQueryFeaturesKHR & + setPerformanceCounterQueryPools( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ ) VULKAN_HPP_NOEXCEPT { performanceCounterQueryPools = performanceCounterQueryPools_; return *this; } - PhysicalDevicePerformanceQueryFeaturesKHR & setPerformanceCounterMultipleQueryPools( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ ) VULKAN_HPP_NOEXCEPT + PhysicalDevicePerformanceQueryFeaturesKHR & setPerformanceCounterMultipleQueryPools( + VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ ) VULKAN_HPP_NOEXCEPT { performanceCounterMultipleQueryPools = performanceCounterMultipleQueryPools_; return *this; } - operator VkPhysicalDevicePerformanceQueryFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDevicePerformanceQueryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDevicePerformanceQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDevicePerformanceQueryFeaturesKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePerformanceQueryFeaturesKHR const & ) const = default; #else - bool operator==( PhysicalDevicePerformanceQueryFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( performanceCounterQueryPools == rhs.performanceCounterQueryPools ) - && ( performanceCounterMultipleQueryPools == rhs.performanceCounterMultipleQueryPools ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( performanceCounterQueryPools == rhs.performanceCounterQueryPools ) && + ( performanceCounterMultipleQueryPools == rhs.performanceCounterMultipleQueryPools ); } - bool operator!=( PhysicalDevicePerformanceQueryFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -50950,57 +57354,66 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools = {}; - VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools = {}; + VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools = {}; }; - static_assert( sizeof( PhysicalDevicePerformanceQueryFeaturesKHR ) == sizeof( VkPhysicalDevicePerformanceQueryFeaturesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDevicePerformanceQueryFeaturesKHR ) == + sizeof( VkPhysicalDevicePerformanceQueryFeaturesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDevicePerformanceQueryPropertiesKHR { - VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR( VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR( + VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies_ = {} ) VULKAN_HPP_NOEXCEPT : allowCommandBufferQueryCopies( allowCommandBufferQueryCopies_ ) {} - PhysicalDevicePerformanceQueryPropertiesKHR & operator=( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDevicePerformanceQueryPropertiesKHR & + operator=( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDevicePerformanceQueryPropertiesKHR ) - offsetof( PhysicalDevicePerformanceQueryPropertiesKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDevicePerformanceQueryPropertiesKHR ) - + offsetof( PhysicalDevicePerformanceQueryPropertiesKHR, pNext ) ); return *this; } - PhysicalDevicePerformanceQueryPropertiesKHR( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDevicePerformanceQueryPropertiesKHR( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDevicePerformanceQueryPropertiesKHR& operator=( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDevicePerformanceQueryPropertiesKHR & + operator=( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDevicePerformanceQueryPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDevicePerformanceQueryPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDevicePerformanceQueryPropertiesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDevicePerformanceQueryPropertiesKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePerformanceQueryPropertiesKHR const & ) const = default; #else - bool operator==( PhysicalDevicePerformanceQueryPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( allowCommandBufferQueryCopies == rhs.allowCommandBufferQueryCopies ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( allowCommandBufferQueryCopies == rhs.allowCommandBufferQueryCopies ); } - bool operator!=( PhysicalDevicePerformanceQueryPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -51008,159 +57421,192 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies = {}; }; - static_assert( sizeof( PhysicalDevicePerformanceQueryPropertiesKHR ) == sizeof( VkPhysicalDevicePerformanceQueryPropertiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDevicePerformanceQueryPropertiesKHR ) == + sizeof( VkPhysicalDevicePerformanceQueryPropertiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDevicePipelineCreationCacheControlFeaturesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineCreationCacheControlFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineCreationCacheControlFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ = {} ) VULKAN_HPP_NOEXCEPT : pipelineCreationCacheControl( pipelineCreationCacheControl_ ) {} - PhysicalDevicePipelineCreationCacheControlFeaturesEXT & operator=( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDevicePipelineCreationCacheControlFeaturesEXT & + operator=( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDevicePipelineCreationCacheControlFeaturesEXT ) - offsetof( PhysicalDevicePipelineCreationCacheControlFeaturesEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDevicePipelineCreationCacheControlFeaturesEXT ) - + offsetof( PhysicalDevicePipelineCreationCacheControlFeaturesEXT, pNext ) ); return *this; } - PhysicalDevicePipelineCreationCacheControlFeaturesEXT( VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDevicePipelineCreationCacheControlFeaturesEXT( + VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDevicePipelineCreationCacheControlFeaturesEXT& operator=( VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDevicePipelineCreationCacheControlFeaturesEXT & + operator=( VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = + *reinterpret_cast( &rhs ); return *this; } - PhysicalDevicePipelineCreationCacheControlFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDevicePipelineCreationCacheControlFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDevicePipelineCreationCacheControlFeaturesEXT & setPipelineCreationCacheControl( VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ ) VULKAN_HPP_NOEXCEPT + PhysicalDevicePipelineCreationCacheControlFeaturesEXT & + setPipelineCreationCacheControl( VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ ) VULKAN_HPP_NOEXCEPT { pipelineCreationCacheControl = pipelineCreationCacheControl_; return *this; } - operator VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const & ) const = default; #else - bool operator==( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pipelineCreationCacheControl == rhs.pipelineCreationCacheControl ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( pipelineCreationCacheControl == rhs.pipelineCreationCacheControl ); } - bool operator!=( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineCreationCacheControlFeaturesEXT; - void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = + StructureType::ePhysicalDevicePipelineCreationCacheControlFeaturesEXT; + void * pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl = {}; }; - static_assert( sizeof( PhysicalDevicePipelineCreationCacheControlFeaturesEXT ) == sizeof( VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDevicePipelineCreationCacheControlFeaturesEXT ) == + sizeof( VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR { - VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( + VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ = {} ) VULKAN_HPP_NOEXCEPT : pipelineExecutableInfo( pipelineExecutableInfo_ ) {} - PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & operator=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & + operator=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR ) - offsetof( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR ) - + offsetof( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR, pNext ) ); return *this; } - PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( + VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDevicePipelineExecutablePropertiesFeaturesKHR& operator=( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & + operator=( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = + *reinterpret_cast( &rhs ); return *this; } - PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & setPipelineExecutableInfo( VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ ) VULKAN_HPP_NOEXCEPT + PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & + setPipelineExecutableInfo( VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ ) VULKAN_HPP_NOEXCEPT { pipelineExecutableInfo = pipelineExecutableInfo_; return *this; } - operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & ) const = default; #else - bool operator==( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pipelineExecutableInfo == rhs.pipelineExecutableInfo ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( pipelineExecutableInfo == rhs.pipelineExecutableInfo ); } - bool operator!=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR; - void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = + StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR; + void * pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo = {}; }; - static_assert( sizeof( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR ) == sizeof( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR ) == + sizeof( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDevicePointClippingProperties { - VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties( VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties( + VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = + VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes ) VULKAN_HPP_NOEXCEPT : pointClippingBehavior( pointClippingBehavior_ ) {} - PhysicalDevicePointClippingProperties & operator=( PhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDevicePointClippingProperties & + operator=( PhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDevicePointClippingProperties ) - offsetof( PhysicalDevicePointClippingProperties, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDevicePointClippingProperties ) - + offsetof( PhysicalDevicePointClippingProperties, pNext ) ); return *this; } @@ -51169,53 +57615,56 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDevicePointClippingProperties& operator=( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDevicePointClippingProperties & + operator=( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDevicePointClippingProperties const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDevicePointClippingProperties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDevicePointClippingProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDevicePointClippingProperties const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePointClippingProperties const & ) const = default; #else - bool operator==( PhysicalDevicePointClippingProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDevicePointClippingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pointClippingBehavior == rhs.pointClippingBehavior ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pointClippingBehavior == rhs.pointClippingBehavior ); } - bool operator!=( PhysicalDevicePointClippingProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDevicePointClippingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePointClippingProperties; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePointClippingProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior = + VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes; }; - static_assert( sizeof( PhysicalDevicePointClippingProperties ) == sizeof( VkPhysicalDevicePointClippingProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDevicePointClippingProperties ) == sizeof( VkPhysicalDevicePointClippingProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceSparseProperties { - VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties( VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties( + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict_ = {} ) VULKAN_HPP_NOEXCEPT : residencyStandard2DBlockShape( residencyStandard2DBlockShape_ ) , residencyStandard2DMultisampleBlockShape( residencyStandard2DMultisampleBlockShape_ ) , residencyStandard3DBlockShape( residencyStandard3DBlockShape_ ) @@ -51228,61 +57677,64 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceSparseProperties& operator=( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSparseProperties & operator=( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceSparseProperties const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceSparseProperties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceSparseProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceSparseProperties const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSparseProperties const & ) const = default; #else - bool operator==( PhysicalDeviceSparseProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceSparseProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( residencyStandard2DBlockShape == rhs.residencyStandard2DBlockShape ) - && ( residencyStandard2DMultisampleBlockShape == rhs.residencyStandard2DMultisampleBlockShape ) - && ( residencyStandard3DBlockShape == rhs.residencyStandard3DBlockShape ) - && ( residencyAlignedMipSize == rhs.residencyAlignedMipSize ) - && ( residencyNonResidentStrict == rhs.residencyNonResidentStrict ); + return ( residencyStandard2DBlockShape == rhs.residencyStandard2DBlockShape ) && + ( residencyStandard2DMultisampleBlockShape == rhs.residencyStandard2DMultisampleBlockShape ) && + ( residencyStandard3DBlockShape == rhs.residencyStandard3DBlockShape ) && + ( residencyAlignedMipSize == rhs.residencyAlignedMipSize ) && + ( residencyNonResidentStrict == rhs.residencyNonResidentStrict ); } - bool operator!=( PhysicalDeviceSparseProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceSparseProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape = {}; + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape = {}; VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape = {}; - VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape = {}; - VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize = {}; - VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict = {}; + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape = {}; + VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize = {}; + VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict = {}; }; - static_assert( sizeof( PhysicalDeviceSparseProperties ) == sizeof( VkPhysicalDeviceSparseProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceSparseProperties ) == sizeof( VkPhysicalDeviceSparseProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceProperties { - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties( uint32_t apiVersion_ = {}, - uint32_t driverVersion_ = {}, - uint32_t vendorID_ = {}, - uint32_t deviceID_ = {}, - VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType_ = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther, - std::array const& deviceName_ = {}, - std::array const& pipelineCacheUUID_ = {}, - VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits_ = {}, - VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties( + uint32_t apiVersion_ = {}, + uint32_t driverVersion_ = {}, + uint32_t vendorID_ = {}, + uint32_t deviceID_ = {}, + VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType_ = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther, + std::array const & deviceName_ = {}, + std::array const & pipelineCacheUUID_ = {}, + VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits_ = {}, + VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties_ = {} ) VULKAN_HPP_NOEXCEPT : apiVersion( apiVersion_ ) , driverVersion( driverVersion_ ) , vendorID( vendorID_ ) @@ -51299,61 +57751,58 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceProperties& operator=( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceProperties & operator=( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceProperties const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceProperties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceProperties const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceProperties const & ) const = default; #else - bool operator==( PhysicalDeviceProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( apiVersion == rhs.apiVersion ) - && ( driverVersion == rhs.driverVersion ) - && ( vendorID == rhs.vendorID ) - && ( deviceID == rhs.deviceID ) - && ( deviceType == rhs.deviceType ) - && ( deviceName == rhs.deviceName ) - && ( pipelineCacheUUID == rhs.pipelineCacheUUID ) - && ( limits == rhs.limits ) - && ( sparseProperties == rhs.sparseProperties ); + return ( apiVersion == rhs.apiVersion ) && ( driverVersion == rhs.driverVersion ) && + ( vendorID == rhs.vendorID ) && ( deviceID == rhs.deviceID ) && ( deviceType == rhs.deviceType ) && + ( deviceName == rhs.deviceName ) && ( pipelineCacheUUID == rhs.pipelineCacheUUID ) && + ( limits == rhs.limits ) && ( sparseProperties == rhs.sparseProperties ); } - bool operator!=( PhysicalDeviceProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint32_t apiVersion = {}; - uint32_t driverVersion = {}; - uint32_t vendorID = {}; - uint32_t deviceID = {}; - VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceName = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D pipelineCacheUUID = {}; - VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits = {}; - VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties = {}; + uint32_t apiVersion = {}; + uint32_t driverVersion = {}; + uint32_t vendorID = {}; + uint32_t deviceID = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceName = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D pipelineCacheUUID = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties = {}; }; - static_assert( sizeof( PhysicalDeviceProperties ) == sizeof( VkPhysicalDeviceProperties ), "struct and wrapper have different size!" ); + static_assert( sizeof( PhysicalDeviceProperties ) == sizeof( VkPhysicalDeviceProperties ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PhysicalDeviceProperties2 { - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties_ = {} ) VULKAN_HPP_NOEXCEPT : properties( properties_ ) {} @@ -51368,55 +57817,60 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceProperties2& operator=( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceProperties2 & operator=( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceProperties2 const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceProperties2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceProperties2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceProperties2 const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceProperties2 const & ) const = default; #else - bool operator==( PhysicalDeviceProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( properties == rhs.properties ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( properties == rhs.properties ); } - bool operator!=( PhysicalDeviceProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProperties2; - void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProperties2; + void * pNext = {}; VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties = {}; }; - static_assert( sizeof( PhysicalDeviceProperties2 ) == sizeof( VkPhysicalDeviceProperties2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceProperties2 ) == sizeof( VkPhysicalDeviceProperties2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceProtectedMemoryFeatures { - VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + PhysicalDeviceProtectedMemoryFeatures( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {} ) VULKAN_HPP_NOEXCEPT : protectedMemory( protectedMemory_ ) {} - PhysicalDeviceProtectedMemoryFeatures & operator=( PhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceProtectedMemoryFeatures & + operator=( PhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceProtectedMemoryFeatures ) - offsetof( PhysicalDeviceProtectedMemoryFeatures, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceProtectedMemoryFeatures ) - + offsetof( PhysicalDeviceProtectedMemoryFeatures, pNext ) ); return *this; } @@ -51425,67 +57879,74 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceProtectedMemoryFeatures& operator=( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceProtectedMemoryFeatures & + operator=( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceProtectedMemoryFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceProtectedMemoryFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceProtectedMemoryFeatures & setProtectedMemory( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceProtectedMemoryFeatures & + setProtectedMemory( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT { protectedMemory = protectedMemory_; return *this; } - operator VkPhysicalDeviceProtectedMemoryFeatures const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceProtectedMemoryFeatures const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceProtectedMemoryFeatures &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceProtectedMemoryFeatures const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceProtectedMemoryFeatures const & ) const = default; #else - bool operator==( PhysicalDeviceProtectedMemoryFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceProtectedMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( protectedMemory == rhs.protectedMemory ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( protectedMemory == rhs.protectedMemory ); } - bool operator!=( PhysicalDeviceProtectedMemoryFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceProtectedMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryFeatures; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 protectedMemory = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 protectedMemory = {}; }; - static_assert( sizeof( PhysicalDeviceProtectedMemoryFeatures ) == sizeof( VkPhysicalDeviceProtectedMemoryFeatures ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceProtectedMemoryFeatures ) == sizeof( VkPhysicalDeviceProtectedMemoryFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceProtectedMemoryProperties { - VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties( VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + PhysicalDeviceProtectedMemoryProperties( VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {} ) VULKAN_HPP_NOEXCEPT : protectedNoFault( protectedNoFault_ ) {} - PhysicalDeviceProtectedMemoryProperties & operator=( PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceProtectedMemoryProperties & + operator=( PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceProtectedMemoryProperties ) - offsetof( PhysicalDeviceProtectedMemoryProperties, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceProtectedMemoryProperties ) - + offsetof( PhysicalDeviceProtectedMemoryProperties, pNext ) ); return *this; } @@ -51494,33 +57955,32 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceProtectedMemoryProperties& operator=( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceProtectedMemoryProperties & + operator=( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceProtectedMemoryProperties const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceProtectedMemoryProperties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceProtectedMemoryProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceProtectedMemoryProperties const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceProtectedMemoryProperties const & ) const = default; #else - bool operator==( PhysicalDeviceProtectedMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceProtectedMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( protectedNoFault == rhs.protectedNoFault ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( protectedNoFault == rhs.protectedNoFault ); } - bool operator!=( PhysicalDeviceProtectedMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceProtectedMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -51528,56 +57988,64 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryProperties; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {}; }; - static_assert( sizeof( PhysicalDeviceProtectedMemoryProperties ) == sizeof( VkPhysicalDeviceProtectedMemoryProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceProtectedMemoryProperties ) == + sizeof( VkPhysicalDeviceProtectedMemoryProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDevicePushDescriptorPropertiesKHR { - VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorPropertiesKHR( uint32_t maxPushDescriptors_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + PhysicalDevicePushDescriptorPropertiesKHR( uint32_t maxPushDescriptors_ = {} ) VULKAN_HPP_NOEXCEPT : maxPushDescriptors( maxPushDescriptors_ ) {} - PhysicalDevicePushDescriptorPropertiesKHR & operator=( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDevicePushDescriptorPropertiesKHR & + operator=( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDevicePushDescriptorPropertiesKHR ) - offsetof( PhysicalDevicePushDescriptorPropertiesKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDevicePushDescriptorPropertiesKHR ) - + offsetof( PhysicalDevicePushDescriptorPropertiesKHR, pNext ) ); return *this; } - PhysicalDevicePushDescriptorPropertiesKHR( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDevicePushDescriptorPropertiesKHR( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDevicePushDescriptorPropertiesKHR& operator=( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDevicePushDescriptorPropertiesKHR & + operator=( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDevicePushDescriptorPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDevicePushDescriptorPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDevicePushDescriptorPropertiesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDevicePushDescriptorPropertiesKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePushDescriptorPropertiesKHR const & ) const = default; #else - bool operator==( PhysicalDevicePushDescriptorPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxPushDescriptors == rhs.maxPushDescriptors ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxPushDescriptors == rhs.maxPushDescriptors ); } - bool operator!=( PhysicalDevicePushDescriptorPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -51585,24 +58053,28 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePushDescriptorPropertiesKHR; - void* pNext = {}; - uint32_t maxPushDescriptors = {}; + void * pNext = {}; + uint32_t maxPushDescriptors = {}; }; - static_assert( sizeof( PhysicalDevicePushDescriptorPropertiesKHR ) == sizeof( VkPhysicalDevicePushDescriptorPropertiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDevicePushDescriptorPropertiesKHR ) == + sizeof( VkPhysicalDevicePushDescriptorPropertiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #ifdef VK_ENABLE_BETA_EXTENSIONS struct PhysicalDeviceRayTracingFeaturesKHR { - VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 rayTracing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 rayTracingShaderGroupHandleCaptureReplay_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 rayTracingShaderGroupHandleCaptureReplayMixed_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 rayTracingAccelerationStructureCaptureReplay_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 rayTracingIndirectTraceRays_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 rayTracingIndirectAccelerationStructureBuild_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 rayTracingHostAccelerationStructureCommands_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 rayQuery_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 rayTracingPrimitiveCulling_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingFeaturesKHR( + VULKAN_HPP_NAMESPACE::Bool32 rayTracing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTracingShaderGroupHandleCaptureReplay_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTracingShaderGroupHandleCaptureReplayMixed_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTracingAccelerationStructureCaptureReplay_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTracingIndirectTraceRays_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTracingIndirectAccelerationStructureBuild_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTracingHostAccelerationStructureCommands_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayQuery_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPrimitiveCulling_ = {} ) VULKAN_HPP_NOEXCEPT : rayTracing( rayTracing_ ) , rayTracingShaderGroupHandleCaptureReplay( rayTracingShaderGroupHandleCaptureReplay_ ) , rayTracingShaderGroupHandleCaptureReplayMixed( rayTracingShaderGroupHandleCaptureReplayMixed_ ) @@ -51614,9 +58086,12 @@ namespace VULKAN_HPP_NAMESPACE , rayTracingPrimitiveCulling( rayTracingPrimitiveCulling_ ) {} - PhysicalDeviceRayTracingFeaturesKHR & operator=( PhysicalDeviceRayTracingFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceRayTracingFeaturesKHR & + operator=( PhysicalDeviceRayTracingFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceRayTracingFeaturesKHR ) - offsetof( PhysicalDeviceRayTracingFeaturesKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceRayTracingFeaturesKHR ) - offsetof( PhysicalDeviceRayTracingFeaturesKHR, pNext ) ); return *this; } @@ -51625,13 +58100,14 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceRayTracingFeaturesKHR& operator=( VkPhysicalDeviceRayTracingFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceRayTracingFeaturesKHR & + operator=( VkPhysicalDeviceRayTracingFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceRayTracingFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceRayTracingFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -51643,37 +58119,43 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PhysicalDeviceRayTracingFeaturesKHR & setRayTracingShaderGroupHandleCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 rayTracingShaderGroupHandleCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceRayTracingFeaturesKHR & setRayTracingShaderGroupHandleCaptureReplay( + VULKAN_HPP_NAMESPACE::Bool32 rayTracingShaderGroupHandleCaptureReplay_ ) VULKAN_HPP_NOEXCEPT { rayTracingShaderGroupHandleCaptureReplay = rayTracingShaderGroupHandleCaptureReplay_; return *this; } - PhysicalDeviceRayTracingFeaturesKHR & setRayTracingShaderGroupHandleCaptureReplayMixed( VULKAN_HPP_NAMESPACE::Bool32 rayTracingShaderGroupHandleCaptureReplayMixed_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceRayTracingFeaturesKHR & setRayTracingShaderGroupHandleCaptureReplayMixed( + VULKAN_HPP_NAMESPACE::Bool32 rayTracingShaderGroupHandleCaptureReplayMixed_ ) VULKAN_HPP_NOEXCEPT { rayTracingShaderGroupHandleCaptureReplayMixed = rayTracingShaderGroupHandleCaptureReplayMixed_; return *this; } - PhysicalDeviceRayTracingFeaturesKHR & setRayTracingAccelerationStructureCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 rayTracingAccelerationStructureCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceRayTracingFeaturesKHR & setRayTracingAccelerationStructureCaptureReplay( + VULKAN_HPP_NAMESPACE::Bool32 rayTracingAccelerationStructureCaptureReplay_ ) VULKAN_HPP_NOEXCEPT { rayTracingAccelerationStructureCaptureReplay = rayTracingAccelerationStructureCaptureReplay_; return *this; } - PhysicalDeviceRayTracingFeaturesKHR & setRayTracingIndirectTraceRays( VULKAN_HPP_NAMESPACE::Bool32 rayTracingIndirectTraceRays_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceRayTracingFeaturesKHR & + setRayTracingIndirectTraceRays( VULKAN_HPP_NAMESPACE::Bool32 rayTracingIndirectTraceRays_ ) VULKAN_HPP_NOEXCEPT { rayTracingIndirectTraceRays = rayTracingIndirectTraceRays_; return *this; } - PhysicalDeviceRayTracingFeaturesKHR & setRayTracingIndirectAccelerationStructureBuild( VULKAN_HPP_NAMESPACE::Bool32 rayTracingIndirectAccelerationStructureBuild_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceRayTracingFeaturesKHR & setRayTracingIndirectAccelerationStructureBuild( + VULKAN_HPP_NAMESPACE::Bool32 rayTracingIndirectAccelerationStructureBuild_ ) VULKAN_HPP_NOEXCEPT { rayTracingIndirectAccelerationStructureBuild = rayTracingIndirectAccelerationStructureBuild_; return *this; } - PhysicalDeviceRayTracingFeaturesKHR & setRayTracingHostAccelerationStructureCommands( VULKAN_HPP_NAMESPACE::Bool32 rayTracingHostAccelerationStructureCommands_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceRayTracingFeaturesKHR & setRayTracingHostAccelerationStructureCommands( + VULKAN_HPP_NAMESPACE::Bool32 rayTracingHostAccelerationStructureCommands_ ) VULKAN_HPP_NOEXCEPT { rayTracingHostAccelerationStructureCommands = rayTracingHostAccelerationStructureCommands_; return *this; @@ -51685,75 +58167,76 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PhysicalDeviceRayTracingFeaturesKHR & setRayTracingPrimitiveCulling( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPrimitiveCulling_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceRayTracingFeaturesKHR & + setRayTracingPrimitiveCulling( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPrimitiveCulling_ ) VULKAN_HPP_NOEXCEPT { rayTracingPrimitiveCulling = rayTracingPrimitiveCulling_; return *this; } - operator VkPhysicalDeviceRayTracingFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceRayTracingFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceRayTracingFeaturesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceRayTracingFeaturesKHR const& ) const = default; -#else - bool operator==( PhysicalDeviceRayTracingFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayTracingFeaturesKHR const & ) const = default; +# else + bool operator==( PhysicalDeviceRayTracingFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( rayTracing == rhs.rayTracing ) - && ( rayTracingShaderGroupHandleCaptureReplay == rhs.rayTracingShaderGroupHandleCaptureReplay ) - && ( rayTracingShaderGroupHandleCaptureReplayMixed == rhs.rayTracingShaderGroupHandleCaptureReplayMixed ) - && ( rayTracingAccelerationStructureCaptureReplay == rhs.rayTracingAccelerationStructureCaptureReplay ) - && ( rayTracingIndirectTraceRays == rhs.rayTracingIndirectTraceRays ) - && ( rayTracingIndirectAccelerationStructureBuild == rhs.rayTracingIndirectAccelerationStructureBuild ) - && ( rayTracingHostAccelerationStructureCommands == rhs.rayTracingHostAccelerationStructureCommands ) - && ( rayQuery == rhs.rayQuery ) - && ( rayTracingPrimitiveCulling == rhs.rayTracingPrimitiveCulling ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracing == rhs.rayTracing ) && + ( rayTracingShaderGroupHandleCaptureReplay == rhs.rayTracingShaderGroupHandleCaptureReplay ) && + ( rayTracingShaderGroupHandleCaptureReplayMixed == rhs.rayTracingShaderGroupHandleCaptureReplayMixed ) && + ( rayTracingAccelerationStructureCaptureReplay == rhs.rayTracingAccelerationStructureCaptureReplay ) && + ( rayTracingIndirectTraceRays == rhs.rayTracingIndirectTraceRays ) && + ( rayTracingIndirectAccelerationStructureBuild == rhs.rayTracingIndirectAccelerationStructureBuild ) && + ( rayTracingHostAccelerationStructureCommands == rhs.rayTracingHostAccelerationStructureCommands ) && + ( rayQuery == rhs.rayQuery ) && ( rayTracingPrimitiveCulling == rhs.rayTracingPrimitiveCulling ); } - bool operator!=( PhysicalDeviceRayTracingFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceRayTracingFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingFeaturesKHR; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 rayTracing = {}; - VULKAN_HPP_NAMESPACE::Bool32 rayTracingShaderGroupHandleCaptureReplay = {}; - VULKAN_HPP_NAMESPACE::Bool32 rayTracingShaderGroupHandleCaptureReplayMixed = {}; - VULKAN_HPP_NAMESPACE::Bool32 rayTracingAccelerationStructureCaptureReplay = {}; - VULKAN_HPP_NAMESPACE::Bool32 rayTracingIndirectTraceRays = {}; - VULKAN_HPP_NAMESPACE::Bool32 rayTracingIndirectAccelerationStructureBuild = {}; - VULKAN_HPP_NAMESPACE::Bool32 rayTracingHostAccelerationStructureCommands = {}; - VULKAN_HPP_NAMESPACE::Bool32 rayQuery = {}; - VULKAN_HPP_NAMESPACE::Bool32 rayTracingPrimitiveCulling = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracing = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingShaderGroupHandleCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingShaderGroupHandleCaptureReplayMixed = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingAccelerationStructureCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingIndirectTraceRays = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingIndirectAccelerationStructureBuild = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingHostAccelerationStructureCommands = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayQuery = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPrimitiveCulling = {}; }; - static_assert( sizeof( PhysicalDeviceRayTracingFeaturesKHR ) == sizeof( VkPhysicalDeviceRayTracingFeaturesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceRayTracingFeaturesKHR ) == sizeof( VkPhysicalDeviceRayTracingFeaturesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_ENABLE_BETA_EXTENSIONS struct PhysicalDeviceRayTracingPropertiesKHR { - VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesKHR( uint32_t shaderGroupHandleSize_ = {}, - uint32_t maxRecursionDepth_ = {}, - uint32_t maxShaderGroupStride_ = {}, - uint32_t shaderGroupBaseAlignment_ = {}, - uint64_t maxGeometryCount_ = {}, - uint64_t maxInstanceCount_ = {}, - uint64_t maxPrimitiveCount_ = {}, - uint32_t maxDescriptorSetAccelerationStructures_ = {}, - uint32_t shaderGroupHandleCaptureReplaySize_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRayTracingPropertiesKHR( uint32_t shaderGroupHandleSize_ = {}, + uint32_t maxRecursionDepth_ = {}, + uint32_t maxShaderGroupStride_ = {}, + uint32_t shaderGroupBaseAlignment_ = {}, + uint64_t maxGeometryCount_ = {}, + uint64_t maxInstanceCount_ = {}, + uint64_t maxPrimitiveCount_ = {}, + uint32_t maxDescriptorSetAccelerationStructures_ = {}, + uint32_t shaderGroupHandleCaptureReplaySize_ = {} ) VULKAN_HPP_NOEXCEPT : shaderGroupHandleSize( shaderGroupHandleSize_ ) , maxRecursionDepth( maxRecursionDepth_ ) , maxShaderGroupStride( maxShaderGroupStride_ ) @@ -51765,9 +58248,13 @@ namespace VULKAN_HPP_NAMESPACE , shaderGroupHandleCaptureReplaySize( shaderGroupHandleCaptureReplaySize_ ) {} - PhysicalDeviceRayTracingPropertiesKHR & operator=( PhysicalDeviceRayTracingPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceRayTracingPropertiesKHR & + operator=( PhysicalDeviceRayTracingPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceRayTracingPropertiesKHR ) - offsetof( PhysicalDeviceRayTracingPropertiesKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceRayTracingPropertiesKHR ) - + offsetof( PhysicalDeviceRayTracingPropertiesKHR, pNext ) ); return *this; } @@ -51776,73 +58263,74 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceRayTracingPropertiesKHR& operator=( VkPhysicalDeviceRayTracingPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceRayTracingPropertiesKHR & + operator=( VkPhysicalDeviceRayTracingPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceRayTracingPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceRayTracingPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceRayTracingPropertiesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceRayTracingPropertiesKHR const& ) const = default; -#else - bool operator==( PhysicalDeviceRayTracingPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayTracingPropertiesKHR const & ) const = default; +# else + bool operator==( PhysicalDeviceRayTracingPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shaderGroupHandleSize == rhs.shaderGroupHandleSize ) - && ( maxRecursionDepth == rhs.maxRecursionDepth ) - && ( maxShaderGroupStride == rhs.maxShaderGroupStride ) - && ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment ) - && ( maxGeometryCount == rhs.maxGeometryCount ) - && ( maxInstanceCount == rhs.maxInstanceCount ) - && ( maxPrimitiveCount == rhs.maxPrimitiveCount ) - && ( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures ) - && ( shaderGroupHandleCaptureReplaySize == rhs.shaderGroupHandleCaptureReplaySize ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderGroupHandleSize == rhs.shaderGroupHandleSize ) && ( maxRecursionDepth == rhs.maxRecursionDepth ) && + ( maxShaderGroupStride == rhs.maxShaderGroupStride ) && + ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment ) && + ( maxGeometryCount == rhs.maxGeometryCount ) && ( maxInstanceCount == rhs.maxInstanceCount ) && + ( maxPrimitiveCount == rhs.maxPrimitiveCount ) && + ( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures ) && + ( shaderGroupHandleCaptureReplaySize == rhs.shaderGroupHandleCaptureReplaySize ); } - bool operator!=( PhysicalDeviceRayTracingPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceRayTracingPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPropertiesKHR; - void* pNext = {}; - uint32_t shaderGroupHandleSize = {}; - uint32_t maxRecursionDepth = {}; - uint32_t maxShaderGroupStride = {}; - uint32_t shaderGroupBaseAlignment = {}; - uint64_t maxGeometryCount = {}; - uint64_t maxInstanceCount = {}; - uint64_t maxPrimitiveCount = {}; - uint32_t maxDescriptorSetAccelerationStructures = {}; - uint32_t shaderGroupHandleCaptureReplaySize = {}; + void * pNext = {}; + uint32_t shaderGroupHandleSize = {}; + uint32_t maxRecursionDepth = {}; + uint32_t maxShaderGroupStride = {}; + uint32_t shaderGroupBaseAlignment = {}; + uint64_t maxGeometryCount = {}; + uint64_t maxInstanceCount = {}; + uint64_t maxPrimitiveCount = {}; + uint32_t maxDescriptorSetAccelerationStructures = {}; + uint32_t shaderGroupHandleCaptureReplaySize = {}; }; - static_assert( sizeof( PhysicalDeviceRayTracingPropertiesKHR ) == sizeof( VkPhysicalDeviceRayTracingPropertiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceRayTracingPropertiesKHR ) == sizeof( VkPhysicalDeviceRayTracingPropertiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct PhysicalDeviceRayTracingPropertiesNV { - VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesNV( uint32_t shaderGroupHandleSize_ = {}, - uint32_t maxRecursionDepth_ = {}, - uint32_t maxShaderGroupStride_ = {}, - uint32_t shaderGroupBaseAlignment_ = {}, - uint64_t maxGeometryCount_ = {}, - uint64_t maxInstanceCount_ = {}, - uint64_t maxTriangleCount_ = {}, - uint32_t maxDescriptorSetAccelerationStructures_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRayTracingPropertiesNV( uint32_t shaderGroupHandleSize_ = {}, + uint32_t maxRecursionDepth_ = {}, + uint32_t maxShaderGroupStride_ = {}, + uint32_t shaderGroupBaseAlignment_ = {}, + uint64_t maxGeometryCount_ = {}, + uint64_t maxInstanceCount_ = {}, + uint64_t maxTriangleCount_ = {}, + uint32_t maxDescriptorSetAccelerationStructures_ = {} ) VULKAN_HPP_NOEXCEPT : shaderGroupHandleSize( shaderGroupHandleSize_ ) , maxRecursionDepth( maxRecursionDepth_ ) , maxShaderGroupStride( maxShaderGroupStride_ ) @@ -51853,9 +58341,13 @@ namespace VULKAN_HPP_NAMESPACE , maxDescriptorSetAccelerationStructures( maxDescriptorSetAccelerationStructures_ ) {} - PhysicalDeviceRayTracingPropertiesNV & operator=( PhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceRayTracingPropertiesNV & + operator=( PhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceRayTracingPropertiesNV ) - offsetof( PhysicalDeviceRayTracingPropertiesNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceRayTracingPropertiesNV ) - + offsetof( PhysicalDeviceRayTracingPropertiesNV, pNext ) ); return *this; } @@ -51864,40 +58356,38 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceRayTracingPropertiesNV& operator=( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceRayTracingPropertiesNV & + operator=( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceRayTracingPropertiesNV const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceRayTracingPropertiesNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceRayTracingPropertiesNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceRayTracingPropertiesNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayTracingPropertiesNV const & ) const = default; #else - bool operator==( PhysicalDeviceRayTracingPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceRayTracingPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shaderGroupHandleSize == rhs.shaderGroupHandleSize ) - && ( maxRecursionDepth == rhs.maxRecursionDepth ) - && ( maxShaderGroupStride == rhs.maxShaderGroupStride ) - && ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment ) - && ( maxGeometryCount == rhs.maxGeometryCount ) - && ( maxInstanceCount == rhs.maxInstanceCount ) - && ( maxTriangleCount == rhs.maxTriangleCount ) - && ( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderGroupHandleSize == rhs.shaderGroupHandleSize ) && ( maxRecursionDepth == rhs.maxRecursionDepth ) && + ( maxShaderGroupStride == rhs.maxShaderGroupStride ) && + ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment ) && + ( maxGeometryCount == rhs.maxGeometryCount ) && ( maxInstanceCount == rhs.maxInstanceCount ) && + ( maxTriangleCount == rhs.maxTriangleCount ) && + ( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures ); } - bool operator!=( PhysicalDeviceRayTracingPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceRayTracingPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -51905,95 +58395,110 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPropertiesNV; - void* pNext = {}; - uint32_t shaderGroupHandleSize = {}; - uint32_t maxRecursionDepth = {}; - uint32_t maxShaderGroupStride = {}; - uint32_t shaderGroupBaseAlignment = {}; - uint64_t maxGeometryCount = {}; - uint64_t maxInstanceCount = {}; - uint64_t maxTriangleCount = {}; - uint32_t maxDescriptorSetAccelerationStructures = {}; + void * pNext = {}; + uint32_t shaderGroupHandleSize = {}; + uint32_t maxRecursionDepth = {}; + uint32_t maxShaderGroupStride = {}; + uint32_t shaderGroupBaseAlignment = {}; + uint64_t maxGeometryCount = {}; + uint64_t maxInstanceCount = {}; + uint64_t maxTriangleCount = {}; + uint32_t maxDescriptorSetAccelerationStructures = {}; }; - static_assert( sizeof( PhysicalDeviceRayTracingPropertiesNV ) == sizeof( VkPhysicalDeviceRayTracingPropertiesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceRayTracingPropertiesNV ) == sizeof( VkPhysicalDeviceRayTracingPropertiesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceRepresentativeFragmentTestFeaturesNV { - VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ = {} ) VULKAN_HPP_NOEXCEPT : representativeFragmentTest( representativeFragmentTest_ ) {} - PhysicalDeviceRepresentativeFragmentTestFeaturesNV & operator=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceRepresentativeFragmentTestFeaturesNV & + operator=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceRepresentativeFragmentTestFeaturesNV ) - offsetof( PhysicalDeviceRepresentativeFragmentTestFeaturesNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceRepresentativeFragmentTestFeaturesNV ) - + offsetof( PhysicalDeviceRepresentativeFragmentTestFeaturesNV, pNext ) ); return *this; } - PhysicalDeviceRepresentativeFragmentTestFeaturesNV( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceRepresentativeFragmentTestFeaturesNV( + VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceRepresentativeFragmentTestFeaturesNV& operator=( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceRepresentativeFragmentTestFeaturesNV & + operator=( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = + *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceRepresentativeFragmentTestFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceRepresentativeFragmentTestFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceRepresentativeFragmentTestFeaturesNV & setRepresentativeFragmentTest( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceRepresentativeFragmentTestFeaturesNV & + setRepresentativeFragmentTest( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ ) VULKAN_HPP_NOEXCEPT { representativeFragmentTest = representativeFragmentTest_; return *this; } - operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & ) const = default; #else - bool operator==( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( representativeFragmentTest == rhs.representativeFragmentTest ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( representativeFragmentTest == rhs.representativeFragmentTest ); } - bool operator!=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV; - void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = + StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV; + void * pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest = {}; }; - static_assert( sizeof( PhysicalDeviceRepresentativeFragmentTestFeaturesNV ) == sizeof( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceRepresentativeFragmentTestFeaturesNV ) == + sizeof( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceSampleLocationsPropertiesEXT { - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {}, - std::array const& sampleLocationCoordinateRange_ = {}, - uint32_t sampleLocationSubPixelBits_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT( + VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {}, + std::array const & sampleLocationCoordinateRange_ = {}, + uint32_t sampleLocationSubPixelBits_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT : sampleLocationSampleCounts( sampleLocationSampleCounts_ ) , maxSampleLocationGridSize( maxSampleLocationGridSize_ ) , sampleLocationCoordinateRange( sampleLocationCoordinateRange_ ) @@ -52001,112 +58506,126 @@ namespace VULKAN_HPP_NAMESPACE , variableSampleLocations( variableSampleLocations_ ) {} - PhysicalDeviceSampleLocationsPropertiesEXT & operator=( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSampleLocationsPropertiesEXT & + operator=( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceSampleLocationsPropertiesEXT ) - offsetof( PhysicalDeviceSampleLocationsPropertiesEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceSampleLocationsPropertiesEXT ) - + offsetof( PhysicalDeviceSampleLocationsPropertiesEXT, pNext ) ); return *this; } - PhysicalDeviceSampleLocationsPropertiesEXT( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSampleLocationsPropertiesEXT( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceSampleLocationsPropertiesEXT& operator=( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSampleLocationsPropertiesEXT & + operator=( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceSampleLocationsPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceSampleLocationsPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceSampleLocationsPropertiesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceSampleLocationsPropertiesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSampleLocationsPropertiesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceSampleLocationsPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( sampleLocationSampleCounts == rhs.sampleLocationSampleCounts ) - && ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize ) - && ( sampleLocationCoordinateRange == rhs.sampleLocationCoordinateRange ) - && ( sampleLocationSubPixelBits == rhs.sampleLocationSubPixelBits ) - && ( variableSampleLocations == rhs.variableSampleLocations ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( sampleLocationSampleCounts == rhs.sampleLocationSampleCounts ) && + ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize ) && + ( sampleLocationCoordinateRange == rhs.sampleLocationCoordinateRange ) && + ( sampleLocationSubPixelBits == rhs.sampleLocationSubPixelBits ) && + ( variableSampleLocations == rhs.variableSampleLocations ); } - bool operator!=( PhysicalDeviceSampleLocationsPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts = {}; - VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D sampleLocationCoordinateRange = {}; - uint32_t sampleLocationSubPixelBits = {}; - VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations = {}; + uint32_t sampleLocationSubPixelBits = {}; + VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations = {}; }; - static_assert( sizeof( PhysicalDeviceSampleLocationsPropertiesEXT ) == sizeof( VkPhysicalDeviceSampleLocationsPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceSampleLocationsPropertiesEXT ) == + sizeof( VkPhysicalDeviceSampleLocationsPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceSamplerFilterMinmaxProperties { - VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxProperties( VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxProperties( + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {} ) VULKAN_HPP_NOEXCEPT : filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ ) , filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ ) {} - PhysicalDeviceSamplerFilterMinmaxProperties & operator=( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSamplerFilterMinmaxProperties & + operator=( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceSamplerFilterMinmaxProperties ) - offsetof( PhysicalDeviceSamplerFilterMinmaxProperties, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceSamplerFilterMinmaxProperties ) - + offsetof( PhysicalDeviceSamplerFilterMinmaxProperties, pNext ) ); return *this; } - PhysicalDeviceSamplerFilterMinmaxProperties( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSamplerFilterMinmaxProperties( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceSamplerFilterMinmaxProperties& operator=( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSamplerFilterMinmaxProperties & + operator=( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceSamplerFilterMinmaxProperties const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceSamplerFilterMinmaxProperties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceSamplerFilterMinmaxProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceSamplerFilterMinmaxProperties const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSamplerFilterMinmaxProperties const & ) const = default; #else - bool operator==( PhysicalDeviceSamplerFilterMinmaxProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats ) - && ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats ) && + ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping ); } - bool operator!=( PhysicalDeviceSamplerFilterMinmaxProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -52114,69 +58633,79 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats = {}; - VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {}; }; - static_assert( sizeof( PhysicalDeviceSamplerFilterMinmaxProperties ) == sizeof( VkPhysicalDeviceSamplerFilterMinmaxProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceSamplerFilterMinmaxProperties ) == + sizeof( VkPhysicalDeviceSamplerFilterMinmaxProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceSamplerYcbcrConversionFeatures { - VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures( + VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {} ) VULKAN_HPP_NOEXCEPT : samplerYcbcrConversion( samplerYcbcrConversion_ ) {} - PhysicalDeviceSamplerYcbcrConversionFeatures & operator=( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSamplerYcbcrConversionFeatures & + operator=( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceSamplerYcbcrConversionFeatures ) - offsetof( PhysicalDeviceSamplerYcbcrConversionFeatures, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceSamplerYcbcrConversionFeatures ) - + offsetof( PhysicalDeviceSamplerYcbcrConversionFeatures, pNext ) ); return *this; } - PhysicalDeviceSamplerYcbcrConversionFeatures( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSamplerYcbcrConversionFeatures( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceSamplerYcbcrConversionFeatures& operator=( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSamplerYcbcrConversionFeatures & + operator=( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceSamplerYcbcrConversionFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSamplerYcbcrConversionFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceSamplerYcbcrConversionFeatures & setSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSamplerYcbcrConversionFeatures & + setSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT { samplerYcbcrConversion = samplerYcbcrConversion_; return *this; } - operator VkPhysicalDeviceSamplerYcbcrConversionFeatures const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceSamplerYcbcrConversionFeatures const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceSamplerYcbcrConversionFeatures &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceSamplerYcbcrConversionFeatures const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSamplerYcbcrConversionFeatures const & ) const = default; #else - bool operator==( PhysicalDeviceSamplerYcbcrConversionFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( samplerYcbcrConversion == rhs.samplerYcbcrConversion ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( samplerYcbcrConversion == rhs.samplerYcbcrConversion ); } - bool operator!=( PhysicalDeviceSamplerYcbcrConversionFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -52184,21 +58713,28 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion = {}; }; - static_assert( sizeof( PhysicalDeviceSamplerYcbcrConversionFeatures ) == sizeof( VkPhysicalDeviceSamplerYcbcrConversionFeatures ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceSamplerYcbcrConversionFeatures ) == + sizeof( VkPhysicalDeviceSamplerYcbcrConversionFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceScalarBlockLayoutFeatures { - VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {} ) VULKAN_HPP_NOEXCEPT - : scalarBlockLayout( scalarBlockLayout_ ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {} ) + VULKAN_HPP_NOEXCEPT : scalarBlockLayout( scalarBlockLayout_ ) {} - PhysicalDeviceScalarBlockLayoutFeatures & operator=( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceScalarBlockLayoutFeatures & + operator=( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceScalarBlockLayoutFeatures ) - offsetof( PhysicalDeviceScalarBlockLayoutFeatures, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceScalarBlockLayoutFeatures ) - + offsetof( PhysicalDeviceScalarBlockLayoutFeatures, pNext ) ); return *this; } @@ -52207,45 +58743,45 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceScalarBlockLayoutFeatures& operator=( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceScalarBlockLayoutFeatures & + operator=( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceScalarBlockLayoutFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceScalarBlockLayoutFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceScalarBlockLayoutFeatures & setScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceScalarBlockLayoutFeatures & + setScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT { scalarBlockLayout = scalarBlockLayout_; return *this; } - operator VkPhysicalDeviceScalarBlockLayoutFeatures const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceScalarBlockLayoutFeatures const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceScalarBlockLayoutFeatures &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceScalarBlockLayoutFeatures const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceScalarBlockLayoutFeatures const & ) const = default; #else - bool operator==( PhysicalDeviceScalarBlockLayoutFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( scalarBlockLayout == rhs.scalarBlockLayout ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( scalarBlockLayout == rhs.scalarBlockLayout ); } - bool operator!=( PhysicalDeviceScalarBlockLayoutFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -52253,68 +58789,79 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceScalarBlockLayoutFeatures; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout = {}; }; - static_assert( sizeof( PhysicalDeviceScalarBlockLayoutFeatures ) == sizeof( VkPhysicalDeviceScalarBlockLayoutFeatures ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceScalarBlockLayoutFeatures ) == + sizeof( VkPhysicalDeviceScalarBlockLayoutFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceSeparateDepthStencilLayoutsFeatures { - VULKAN_HPP_CONSTEXPR PhysicalDeviceSeparateDepthStencilLayoutsFeatures( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceSeparateDepthStencilLayoutsFeatures( + VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {} ) VULKAN_HPP_NOEXCEPT : separateDepthStencilLayouts( separateDepthStencilLayouts_ ) {} - PhysicalDeviceSeparateDepthStencilLayoutsFeatures & operator=( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSeparateDepthStencilLayoutsFeatures & + operator=( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceSeparateDepthStencilLayoutsFeatures ) - offsetof( PhysicalDeviceSeparateDepthStencilLayoutsFeatures, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceSeparateDepthStencilLayoutsFeatures ) - + offsetof( PhysicalDeviceSeparateDepthStencilLayoutsFeatures, pNext ) ); return *this; } - PhysicalDeviceSeparateDepthStencilLayoutsFeatures( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSeparateDepthStencilLayoutsFeatures( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceSeparateDepthStencilLayoutsFeatures& operator=( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSeparateDepthStencilLayoutsFeatures & + operator=( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = + *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceSeparateDepthStencilLayoutsFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSeparateDepthStencilLayoutsFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceSeparateDepthStencilLayoutsFeatures & setSeparateDepthStencilLayouts( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSeparateDepthStencilLayoutsFeatures & + setSeparateDepthStencilLayouts( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ ) VULKAN_HPP_NOEXCEPT { separateDepthStencilLayouts = separateDepthStencilLayouts_; return *this; } - operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & ) const = default; #else - bool operator==( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( separateDepthStencilLayouts == rhs.separateDepthStencilLayouts ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( separateDepthStencilLayouts == rhs.separateDepthStencilLayouts ); } - bool operator!=( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -52322,23 +58869,31 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts = {}; }; - static_assert( sizeof( PhysicalDeviceSeparateDepthStencilLayoutsFeatures ) == sizeof( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceSeparateDepthStencilLayoutsFeatures ) == + sizeof( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceShaderAtomicInt64Features { - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features( + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {} ) VULKAN_HPP_NOEXCEPT : shaderBufferInt64Atomics( shaderBufferInt64Atomics_ ) , shaderSharedInt64Atomics( shaderSharedInt64Atomics_ ) {} - PhysicalDeviceShaderAtomicInt64Features & operator=( PhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderAtomicInt64Features & + operator=( PhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShaderAtomicInt64Features ) - offsetof( PhysicalDeviceShaderAtomicInt64Features, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceShaderAtomicInt64Features ) - + offsetof( PhysicalDeviceShaderAtomicInt64Features, pNext ) ); return *this; } @@ -52347,52 +58902,54 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceShaderAtomicInt64Features& operator=( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderAtomicInt64Features & + operator=( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceShaderAtomicInt64Features & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderAtomicInt64Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceShaderAtomicInt64Features & setShaderBufferInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderAtomicInt64Features & + setShaderBufferInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT { shaderBufferInt64Atomics = shaderBufferInt64Atomics_; return *this; } - PhysicalDeviceShaderAtomicInt64Features & setShaderSharedInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderAtomicInt64Features & + setShaderSharedInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT { shaderSharedInt64Atomics = shaderSharedInt64Atomics_; return *this; } - operator VkPhysicalDeviceShaderAtomicInt64Features const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceShaderAtomicInt64Features const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceShaderAtomicInt64Features &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceShaderAtomicInt64Features const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderAtomicInt64Features const & ) const = default; #else - bool operator==( PhysicalDeviceShaderAtomicInt64Features const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceShaderAtomicInt64Features const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics ) - && ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics ) && + ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics ); } - bool operator!=( PhysicalDeviceShaderAtomicInt64Features const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceShaderAtomicInt64Features const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -52400,24 +58957,32 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicInt64Features; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics = {}; }; - static_assert( sizeof( PhysicalDeviceShaderAtomicInt64Features ) == sizeof( VkPhysicalDeviceShaderAtomicInt64Features ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceShaderAtomicInt64Features ) == + sizeof( VkPhysicalDeviceShaderAtomicInt64Features ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceShaderClockFeaturesKHR { - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderClockFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ = {} ) VULKAN_HPP_NOEXCEPT : shaderSubgroupClock( shaderSubgroupClock_ ) , shaderDeviceClock( shaderDeviceClock_ ) {} - PhysicalDeviceShaderClockFeaturesKHR & operator=( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderClockFeaturesKHR & + operator=( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShaderClockFeaturesKHR ) - offsetof( PhysicalDeviceShaderClockFeaturesKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceShaderClockFeaturesKHR ) - + offsetof( PhysicalDeviceShaderClockFeaturesKHR, pNext ) ); return *this; } @@ -52426,52 +58991,53 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceShaderClockFeaturesKHR& operator=( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderClockFeaturesKHR & + operator=( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceShaderClockFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderClockFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceShaderClockFeaturesKHR & setShaderSubgroupClock( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderClockFeaturesKHR & + setShaderSubgroupClock( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ ) VULKAN_HPP_NOEXCEPT { shaderSubgroupClock = shaderSubgroupClock_; return *this; } - PhysicalDeviceShaderClockFeaturesKHR & setShaderDeviceClock( VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderClockFeaturesKHR & + setShaderDeviceClock( VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ ) VULKAN_HPP_NOEXCEPT { shaderDeviceClock = shaderDeviceClock_; return *this; } - operator VkPhysicalDeviceShaderClockFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceShaderClockFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceShaderClockFeaturesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceShaderClockFeaturesKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderClockFeaturesKHR const & ) const = default; #else - bool operator==( PhysicalDeviceShaderClockFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shaderSubgroupClock == rhs.shaderSubgroupClock ) - && ( shaderDeviceClock == rhs.shaderDeviceClock ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSubgroupClock == rhs.shaderSubgroupClock ) && + ( shaderDeviceClock == rhs.shaderDeviceClock ); } - bool operator!=( PhysicalDeviceShaderClockFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -52479,24 +59045,31 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderClockFeaturesKHR; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock = {}; }; - static_assert( sizeof( PhysicalDeviceShaderClockFeaturesKHR ) == sizeof( VkPhysicalDeviceShaderClockFeaturesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceShaderClockFeaturesKHR ) == sizeof( VkPhysicalDeviceShaderClockFeaturesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceShaderCoreProperties2AMD { - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD( VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures_ = {}, - uint32_t activeComputeUnitCount_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD( + VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures_ = {}, + uint32_t activeComputeUnitCount_ = {} ) VULKAN_HPP_NOEXCEPT : shaderCoreFeatures( shaderCoreFeatures_ ) , activeComputeUnitCount( activeComputeUnitCount_ ) {} - PhysicalDeviceShaderCoreProperties2AMD & operator=( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderCoreProperties2AMD & + operator=( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShaderCoreProperties2AMD ) - offsetof( PhysicalDeviceShaderCoreProperties2AMD, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceShaderCoreProperties2AMD ) - + offsetof( PhysicalDeviceShaderCoreProperties2AMD, pNext ) ); return *this; } @@ -52505,64 +59078,66 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceShaderCoreProperties2AMD& operator=( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderCoreProperties2AMD & + operator=( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceShaderCoreProperties2AMD const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceShaderCoreProperties2AMD const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceShaderCoreProperties2AMD &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceShaderCoreProperties2AMD const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderCoreProperties2AMD const & ) const = default; #else - bool operator==( PhysicalDeviceShaderCoreProperties2AMD const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shaderCoreFeatures == rhs.shaderCoreFeatures ) - && ( activeComputeUnitCount == rhs.activeComputeUnitCount ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderCoreFeatures == rhs.shaderCoreFeatures ) && + ( activeComputeUnitCount == rhs.activeComputeUnitCount ); } - bool operator!=( PhysicalDeviceShaderCoreProperties2AMD const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCoreProperties2AMD; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures = {}; - uint32_t activeComputeUnitCount = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCoreProperties2AMD; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures = {}; + uint32_t activeComputeUnitCount = {}; }; - static_assert( sizeof( PhysicalDeviceShaderCoreProperties2AMD ) == sizeof( VkPhysicalDeviceShaderCoreProperties2AMD ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceShaderCoreProperties2AMD ) == sizeof( VkPhysicalDeviceShaderCoreProperties2AMD ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceShaderCorePropertiesAMD { - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesAMD( uint32_t shaderEngineCount_ = {}, - uint32_t shaderArraysPerEngineCount_ = {}, - uint32_t computeUnitsPerShaderArray_ = {}, - uint32_t simdPerComputeUnit_ = {}, - uint32_t wavefrontsPerSimd_ = {}, - uint32_t wavefrontSize_ = {}, - uint32_t sgprsPerSimd_ = {}, - uint32_t minSgprAllocation_ = {}, - uint32_t maxSgprAllocation_ = {}, - uint32_t sgprAllocationGranularity_ = {}, - uint32_t vgprsPerSimd_ = {}, - uint32_t minVgprAllocation_ = {}, - uint32_t maxVgprAllocation_ = {}, - uint32_t vgprAllocationGranularity_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderCorePropertiesAMD( uint32_t shaderEngineCount_ = {}, + uint32_t shaderArraysPerEngineCount_ = {}, + uint32_t computeUnitsPerShaderArray_ = {}, + uint32_t simdPerComputeUnit_ = {}, + uint32_t wavefrontsPerSimd_ = {}, + uint32_t wavefrontSize_ = {}, + uint32_t sgprsPerSimd_ = {}, + uint32_t minSgprAllocation_ = {}, + uint32_t maxSgprAllocation_ = {}, + uint32_t sgprAllocationGranularity_ = {}, + uint32_t vgprsPerSimd_ = {}, + uint32_t minVgprAllocation_ = {}, + uint32_t maxVgprAllocation_ = {}, + uint32_t vgprAllocationGranularity_ = {} ) VULKAN_HPP_NOEXCEPT : shaderEngineCount( shaderEngineCount_ ) , shaderArraysPerEngineCount( shaderArraysPerEngineCount_ ) , computeUnitsPerShaderArray( computeUnitsPerShaderArray_ ) @@ -52579,9 +59154,13 @@ namespace VULKAN_HPP_NAMESPACE , vgprAllocationGranularity( vgprAllocationGranularity_ ) {} - PhysicalDeviceShaderCorePropertiesAMD & operator=( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderCorePropertiesAMD & + operator=( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShaderCorePropertiesAMD ) - offsetof( PhysicalDeviceShaderCorePropertiesAMD, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceShaderCorePropertiesAMD ) - + offsetof( PhysicalDeviceShaderCorePropertiesAMD, pNext ) ); return *this; } @@ -52590,197 +59169,211 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceShaderCorePropertiesAMD& operator=( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderCorePropertiesAMD & + operator=( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceShaderCorePropertiesAMD const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceShaderCorePropertiesAMD const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceShaderCorePropertiesAMD &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceShaderCorePropertiesAMD const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderCorePropertiesAMD const & ) const = default; #else - bool operator==( PhysicalDeviceShaderCorePropertiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shaderEngineCount == rhs.shaderEngineCount ) - && ( shaderArraysPerEngineCount == rhs.shaderArraysPerEngineCount ) - && ( computeUnitsPerShaderArray == rhs.computeUnitsPerShaderArray ) - && ( simdPerComputeUnit == rhs.simdPerComputeUnit ) - && ( wavefrontsPerSimd == rhs.wavefrontsPerSimd ) - && ( wavefrontSize == rhs.wavefrontSize ) - && ( sgprsPerSimd == rhs.sgprsPerSimd ) - && ( minSgprAllocation == rhs.minSgprAllocation ) - && ( maxSgprAllocation == rhs.maxSgprAllocation ) - && ( sgprAllocationGranularity == rhs.sgprAllocationGranularity ) - && ( vgprsPerSimd == rhs.vgprsPerSimd ) - && ( minVgprAllocation == rhs.minVgprAllocation ) - && ( maxVgprAllocation == rhs.maxVgprAllocation ) - && ( vgprAllocationGranularity == rhs.vgprAllocationGranularity ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderEngineCount == rhs.shaderEngineCount ) && + ( shaderArraysPerEngineCount == rhs.shaderArraysPerEngineCount ) && + ( computeUnitsPerShaderArray == rhs.computeUnitsPerShaderArray ) && + ( simdPerComputeUnit == rhs.simdPerComputeUnit ) && ( wavefrontsPerSimd == rhs.wavefrontsPerSimd ) && + ( wavefrontSize == rhs.wavefrontSize ) && ( sgprsPerSimd == rhs.sgprsPerSimd ) && + ( minSgprAllocation == rhs.minSgprAllocation ) && ( maxSgprAllocation == rhs.maxSgprAllocation ) && + ( sgprAllocationGranularity == rhs.sgprAllocationGranularity ) && ( vgprsPerSimd == rhs.vgprsPerSimd ) && + ( minVgprAllocation == rhs.minVgprAllocation ) && ( maxVgprAllocation == rhs.maxVgprAllocation ) && + ( vgprAllocationGranularity == rhs.vgprAllocationGranularity ); } - bool operator!=( PhysicalDeviceShaderCorePropertiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCorePropertiesAMD; - void* pNext = {}; - uint32_t shaderEngineCount = {}; - uint32_t shaderArraysPerEngineCount = {}; - uint32_t computeUnitsPerShaderArray = {}; - uint32_t simdPerComputeUnit = {}; - uint32_t wavefrontsPerSimd = {}; - uint32_t wavefrontSize = {}; - uint32_t sgprsPerSimd = {}; - uint32_t minSgprAllocation = {}; - uint32_t maxSgprAllocation = {}; - uint32_t sgprAllocationGranularity = {}; - uint32_t vgprsPerSimd = {}; - uint32_t minVgprAllocation = {}; - uint32_t maxVgprAllocation = {}; - uint32_t vgprAllocationGranularity = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCorePropertiesAMD; + void * pNext = {}; + uint32_t shaderEngineCount = {}; + uint32_t shaderArraysPerEngineCount = {}; + uint32_t computeUnitsPerShaderArray = {}; + uint32_t simdPerComputeUnit = {}; + uint32_t wavefrontsPerSimd = {}; + uint32_t wavefrontSize = {}; + uint32_t sgprsPerSimd = {}; + uint32_t minSgprAllocation = {}; + uint32_t maxSgprAllocation = {}; + uint32_t sgprAllocationGranularity = {}; + uint32_t vgprsPerSimd = {}; + uint32_t minVgprAllocation = {}; + uint32_t maxVgprAllocation = {}; + uint32_t vgprAllocationGranularity = {}; }; - static_assert( sizeof( PhysicalDeviceShaderCorePropertiesAMD ) == sizeof( VkPhysicalDeviceShaderCorePropertiesAMD ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceShaderCorePropertiesAMD ) == sizeof( VkPhysicalDeviceShaderCorePropertiesAMD ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ = {} ) VULKAN_HPP_NOEXCEPT : shaderDemoteToHelperInvocation( shaderDemoteToHelperInvocation_ ) {} - PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & operator=( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & + operator=( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ) - offsetof( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ) - + offsetof( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT, pNext ) ); return *this; } - PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( + VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT& operator=( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & + operator=( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( + &rhs ); return *this; } - PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & setShaderDemoteToHelperInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & setShaderDemoteToHelperInvocation( + VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ ) VULKAN_HPP_NOEXCEPT { shaderDemoteToHelperInvocation = shaderDemoteToHelperInvocation_; return *this; } - operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shaderDemoteToHelperInvocation == rhs.shaderDemoteToHelperInvocation ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderDemoteToHelperInvocation == rhs.shaderDemoteToHelperInvocation ); } - bool operator!=( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT; - void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = + StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT; + void * pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation = {}; }; - static_assert( sizeof( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ) == sizeof( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ) == + sizeof( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceShaderDrawParametersFeatures { - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures( + VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {} ) VULKAN_HPP_NOEXCEPT : shaderDrawParameters( shaderDrawParameters_ ) {} - PhysicalDeviceShaderDrawParametersFeatures & operator=( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderDrawParametersFeatures & + operator=( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShaderDrawParametersFeatures ) - offsetof( PhysicalDeviceShaderDrawParametersFeatures, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceShaderDrawParametersFeatures ) - + offsetof( PhysicalDeviceShaderDrawParametersFeatures, pNext ) ); return *this; } - PhysicalDeviceShaderDrawParametersFeatures( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderDrawParametersFeatures( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceShaderDrawParametersFeatures& operator=( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderDrawParametersFeatures & + operator=( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceShaderDrawParametersFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderDrawParametersFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceShaderDrawParametersFeatures & setShaderDrawParameters( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderDrawParametersFeatures & + setShaderDrawParameters( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT { shaderDrawParameters = shaderDrawParameters_; return *this; } - operator VkPhysicalDeviceShaderDrawParametersFeatures const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceShaderDrawParametersFeatures const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceShaderDrawParametersFeatures &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceShaderDrawParametersFeatures const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderDrawParametersFeatures const & ) const = default; #else - bool operator==( PhysicalDeviceShaderDrawParametersFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shaderDrawParameters == rhs.shaderDrawParameters ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderDrawParameters == rhs.shaderDrawParameters ); } - bool operator!=( PhysicalDeviceShaderDrawParametersFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -52788,23 +59381,31 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDrawParametersFeatures; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters = {}; }; - static_assert( sizeof( PhysicalDeviceShaderDrawParametersFeatures ) == sizeof( VkPhysicalDeviceShaderDrawParametersFeatures ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceShaderDrawParametersFeatures ) == + sizeof( VkPhysicalDeviceShaderDrawParametersFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceShaderFloat16Int8Features { - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8Features( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderFloat16Int8Features( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {} ) VULKAN_HPP_NOEXCEPT : shaderFloat16( shaderFloat16_ ) , shaderInt8( shaderInt8_ ) {} - PhysicalDeviceShaderFloat16Int8Features & operator=( PhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderFloat16Int8Features & + operator=( PhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShaderFloat16Int8Features ) - offsetof( PhysicalDeviceShaderFloat16Int8Features, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceShaderFloat16Int8Features ) - + offsetof( PhysicalDeviceShaderFloat16Int8Features, pNext ) ); return *this; } @@ -52813,122 +59414,131 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceShaderFloat16Int8Features& operator=( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderFloat16Int8Features & + operator=( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceShaderFloat16Int8Features & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderFloat16Int8Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceShaderFloat16Int8Features & setShaderFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderFloat16Int8Features & + setShaderFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ ) VULKAN_HPP_NOEXCEPT { shaderFloat16 = shaderFloat16_; return *this; } - PhysicalDeviceShaderFloat16Int8Features & setShaderInt8( VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderFloat16Int8Features & + setShaderInt8( VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ ) VULKAN_HPP_NOEXCEPT { shaderInt8 = shaderInt8_; return *this; } - operator VkPhysicalDeviceShaderFloat16Int8Features const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceShaderFloat16Int8Features const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceShaderFloat16Int8Features &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceShaderFloat16Int8Features const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderFloat16Int8Features const & ) const = default; #else - bool operator==( PhysicalDeviceShaderFloat16Int8Features const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceShaderFloat16Int8Features const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shaderFloat16 == rhs.shaderFloat16 ) - && ( shaderInt8 == rhs.shaderInt8 ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderFloat16 == rhs.shaderFloat16 ) && + ( shaderInt8 == rhs.shaderInt8 ); } - bool operator!=( PhysicalDeviceShaderFloat16Int8Features const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceShaderFloat16Int8Features const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderFloat16Int8Features; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderInt8 = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderFloat16Int8Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInt8 = {}; }; - static_assert( sizeof( PhysicalDeviceShaderFloat16Int8Features ) == sizeof( VkPhysicalDeviceShaderFloat16Int8Features ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceShaderFloat16Int8Features ) == + sizeof( VkPhysicalDeviceShaderFloat16Int8Features ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceShaderImageFootprintFeaturesNV { - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ = {} ) VULKAN_HPP_NOEXCEPT - : imageFootprint( imageFootprint_ ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ = {} ) VULKAN_HPP_NOEXCEPT : imageFootprint( imageFootprint_ ) {} - PhysicalDeviceShaderImageFootprintFeaturesNV & operator=( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderImageFootprintFeaturesNV & + operator=( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShaderImageFootprintFeaturesNV ) - offsetof( PhysicalDeviceShaderImageFootprintFeaturesNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceShaderImageFootprintFeaturesNV ) - + offsetof( PhysicalDeviceShaderImageFootprintFeaturesNV, pNext ) ); return *this; } - PhysicalDeviceShaderImageFootprintFeaturesNV( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderImageFootprintFeaturesNV( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceShaderImageFootprintFeaturesNV& operator=( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderImageFootprintFeaturesNV & + operator=( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceShaderImageFootprintFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderImageFootprintFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceShaderImageFootprintFeaturesNV & setImageFootprint( VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderImageFootprintFeaturesNV & + setImageFootprint( VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ ) VULKAN_HPP_NOEXCEPT { imageFootprint = imageFootprint_; return *this; } - operator VkPhysicalDeviceShaderImageFootprintFeaturesNV const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceShaderImageFootprintFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceShaderImageFootprintFeaturesNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceShaderImageFootprintFeaturesNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderImageFootprintFeaturesNV const & ) const = default; #else - bool operator==( PhysicalDeviceShaderImageFootprintFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( imageFootprint == rhs.imageFootprint ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageFootprint == rhs.imageFootprint ); } - bool operator!=( PhysicalDeviceShaderImageFootprintFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -52936,137 +59546,157 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 imageFootprint = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageFootprint = {}; }; - static_assert( sizeof( PhysicalDeviceShaderImageFootprintFeaturesNV ) == sizeof( VkPhysicalDeviceShaderImageFootprintFeaturesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceShaderImageFootprintFeaturesNV ) == + sizeof( VkPhysicalDeviceShaderImageFootprintFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL { - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( + VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ = {} ) VULKAN_HPP_NOEXCEPT : shaderIntegerFunctions2( shaderIntegerFunctions2_ ) {} - PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & operator=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & + operator=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ) - offsetof( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ) - + offsetof( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL, pNext ) ); return *this; } - PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( + VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL& operator=( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & + operator=( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = + *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & setShaderIntegerFunctions2( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & + setShaderIntegerFunctions2( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ ) VULKAN_HPP_NOEXCEPT { shaderIntegerFunctions2 = shaderIntegerFunctions2_; return *this; } - operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & ) const = default; #else - bool operator==( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shaderIntegerFunctions2 == rhs.shaderIntegerFunctions2 ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderIntegerFunctions2 == rhs.shaderIntegerFunctions2 ); } - bool operator!=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; - void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = + StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; + void * pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2 = {}; }; - static_assert( sizeof( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ) == sizeof( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ) == + sizeof( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceShaderSMBuiltinsFeaturesNV { - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ = {} ) VULKAN_HPP_NOEXCEPT - : shaderSMBuiltins( shaderSMBuiltins_ ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ = {} ) + VULKAN_HPP_NOEXCEPT : shaderSMBuiltins( shaderSMBuiltins_ ) {} - PhysicalDeviceShaderSMBuiltinsFeaturesNV & operator=( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderSMBuiltinsFeaturesNV & + operator=( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShaderSMBuiltinsFeaturesNV ) - offsetof( PhysicalDeviceShaderSMBuiltinsFeaturesNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceShaderSMBuiltinsFeaturesNV ) - + offsetof( PhysicalDeviceShaderSMBuiltinsFeaturesNV, pNext ) ); return *this; } - PhysicalDeviceShaderSMBuiltinsFeaturesNV( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderSMBuiltinsFeaturesNV( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceShaderSMBuiltinsFeaturesNV& operator=( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderSMBuiltinsFeaturesNV & + operator=( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceShaderSMBuiltinsFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderSMBuiltinsFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceShaderSMBuiltinsFeaturesNV & setShaderSMBuiltins( VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderSMBuiltinsFeaturesNV & + setShaderSMBuiltins( VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ ) VULKAN_HPP_NOEXCEPT { shaderSMBuiltins = shaderSMBuiltins_; return *this; } - operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceShaderSMBuiltinsFeaturesNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & ) const = default; #else - bool operator==( PhysicalDeviceShaderSMBuiltinsFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shaderSMBuiltins == rhs.shaderSMBuiltins ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSMBuiltins == rhs.shaderSMBuiltins ); } - bool operator!=( PhysicalDeviceShaderSMBuiltinsFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -53074,59 +59704,67 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins = {}; }; - static_assert( sizeof( PhysicalDeviceShaderSMBuiltinsFeaturesNV ) == sizeof( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceShaderSMBuiltinsFeaturesNV ) == + sizeof( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceShaderSMBuiltinsPropertiesNV { - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsPropertiesNV( uint32_t shaderSMCount_ = {}, - uint32_t shaderWarpsPerSM_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderSMBuiltinsPropertiesNV( uint32_t shaderSMCount_ = {}, + uint32_t shaderWarpsPerSM_ = {} ) VULKAN_HPP_NOEXCEPT : shaderSMCount( shaderSMCount_ ) , shaderWarpsPerSM( shaderWarpsPerSM_ ) {} - PhysicalDeviceShaderSMBuiltinsPropertiesNV & operator=( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderSMBuiltinsPropertiesNV & + operator=( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShaderSMBuiltinsPropertiesNV ) - offsetof( PhysicalDeviceShaderSMBuiltinsPropertiesNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceShaderSMBuiltinsPropertiesNV ) - + offsetof( PhysicalDeviceShaderSMBuiltinsPropertiesNV, pNext ) ); return *this; } - PhysicalDeviceShaderSMBuiltinsPropertiesNV( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderSMBuiltinsPropertiesNV( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceShaderSMBuiltinsPropertiesNV& operator=( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderSMBuiltinsPropertiesNV & + operator=( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceShaderSMBuiltinsPropertiesNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & ) const = default; #else - bool operator==( PhysicalDeviceShaderSMBuiltinsPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shaderSMCount == rhs.shaderSMCount ) - && ( shaderWarpsPerSM == rhs.shaderWarpsPerSM ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSMCount == rhs.shaderSMCount ) && + ( shaderWarpsPerSM == rhs.shaderWarpsPerSM ); } - bool operator!=( PhysicalDeviceShaderSMBuiltinsPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -53134,69 +59772,80 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV; - void* pNext = {}; - uint32_t shaderSMCount = {}; - uint32_t shaderWarpsPerSM = {}; + void * pNext = {}; + uint32_t shaderSMCount = {}; + uint32_t shaderWarpsPerSM = {}; }; - static_assert( sizeof( PhysicalDeviceShaderSMBuiltinsPropertiesNV ) == sizeof( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceShaderSMBuiltinsPropertiesNV ) == + sizeof( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceShaderSubgroupExtendedTypesFeatures { - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupExtendedTypesFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupExtendedTypesFeatures( + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {} ) VULKAN_HPP_NOEXCEPT : shaderSubgroupExtendedTypes( shaderSubgroupExtendedTypes_ ) {} - PhysicalDeviceShaderSubgroupExtendedTypesFeatures & operator=( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderSubgroupExtendedTypesFeatures & + operator=( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShaderSubgroupExtendedTypesFeatures ) - offsetof( PhysicalDeviceShaderSubgroupExtendedTypesFeatures, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceShaderSubgroupExtendedTypesFeatures ) - + offsetof( PhysicalDeviceShaderSubgroupExtendedTypesFeatures, pNext ) ); return *this; } - PhysicalDeviceShaderSubgroupExtendedTypesFeatures( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderSubgroupExtendedTypesFeatures( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceShaderSubgroupExtendedTypesFeatures& operator=( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderSubgroupExtendedTypesFeatures & + operator=( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = + *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceShaderSubgroupExtendedTypesFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderSubgroupExtendedTypesFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceShaderSubgroupExtendedTypesFeatures & setShaderSubgroupExtendedTypes( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderSubgroupExtendedTypesFeatures & + setShaderSubgroupExtendedTypes( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ ) VULKAN_HPP_NOEXCEPT { shaderSubgroupExtendedTypes = shaderSubgroupExtendedTypes_; return *this; } - operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & ) const = default; #else - bool operator==( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes ); } - bool operator!=( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -53204,77 +59853,87 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes = {}; }; - static_assert( sizeof( PhysicalDeviceShaderSubgroupExtendedTypesFeatures ) == sizeof( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceShaderSubgroupExtendedTypesFeatures ) == + sizeof( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceShadingRateImageFeaturesNV { - VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ = {} ) VULKAN_HPP_NOEXCEPT : shadingRateImage( shadingRateImage_ ) , shadingRateCoarseSampleOrder( shadingRateCoarseSampleOrder_ ) {} - PhysicalDeviceShadingRateImageFeaturesNV & operator=( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShadingRateImageFeaturesNV & + operator=( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShadingRateImageFeaturesNV ) - offsetof( PhysicalDeviceShadingRateImageFeaturesNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceShadingRateImageFeaturesNV ) - + offsetof( PhysicalDeviceShadingRateImageFeaturesNV, pNext ) ); return *this; } - PhysicalDeviceShadingRateImageFeaturesNV( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShadingRateImageFeaturesNV( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceShadingRateImageFeaturesNV& operator=( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShadingRateImageFeaturesNV & + operator=( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceShadingRateImageFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShadingRateImageFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceShadingRateImageFeaturesNV & setShadingRateImage( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShadingRateImageFeaturesNV & + setShadingRateImage( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ ) VULKAN_HPP_NOEXCEPT { shadingRateImage = shadingRateImage_; return *this; } - PhysicalDeviceShadingRateImageFeaturesNV & setShadingRateCoarseSampleOrder( VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShadingRateImageFeaturesNV & + setShadingRateCoarseSampleOrder( VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ ) VULKAN_HPP_NOEXCEPT { shadingRateCoarseSampleOrder = shadingRateCoarseSampleOrder_; return *this; } - operator VkPhysicalDeviceShadingRateImageFeaturesNV const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceShadingRateImageFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceShadingRateImageFeaturesNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceShadingRateImageFeaturesNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShadingRateImageFeaturesNV const & ) const = default; #else - bool operator==( PhysicalDeviceShadingRateImageFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shadingRateImage == rhs.shadingRateImage ) - && ( shadingRateCoarseSampleOrder == rhs.shadingRateCoarseSampleOrder ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shadingRateImage == rhs.shadingRateImage ) && + ( shadingRateCoarseSampleOrder == rhs.shadingRateCoarseSampleOrder ); } - bool operator!=( PhysicalDeviceShadingRateImageFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -53282,63 +59941,71 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImageFeaturesNV; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage = {}; - VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage = {}; + VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder = {}; }; - static_assert( sizeof( PhysicalDeviceShadingRateImageFeaturesNV ) == sizeof( VkPhysicalDeviceShadingRateImageFeaturesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceShadingRateImageFeaturesNV ) == + sizeof( VkPhysicalDeviceShadingRateImageFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceShadingRateImagePropertiesNV { - VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImagePropertiesNV( VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize_ = {}, - uint32_t shadingRatePaletteSize_ = {}, - uint32_t shadingRateMaxCoarseSamples_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShadingRateImagePropertiesNV( VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize_ = {}, + uint32_t shadingRatePaletteSize_ = {}, + uint32_t shadingRateMaxCoarseSamples_ = {} ) VULKAN_HPP_NOEXCEPT : shadingRateTexelSize( shadingRateTexelSize_ ) , shadingRatePaletteSize( shadingRatePaletteSize_ ) , shadingRateMaxCoarseSamples( shadingRateMaxCoarseSamples_ ) {} - PhysicalDeviceShadingRateImagePropertiesNV & operator=( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShadingRateImagePropertiesNV & + operator=( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShadingRateImagePropertiesNV ) - offsetof( PhysicalDeviceShadingRateImagePropertiesNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceShadingRateImagePropertiesNV ) - + offsetof( PhysicalDeviceShadingRateImagePropertiesNV, pNext ) ); return *this; } - PhysicalDeviceShadingRateImagePropertiesNV( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShadingRateImagePropertiesNV( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceShadingRateImagePropertiesNV& operator=( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShadingRateImagePropertiesNV & + operator=( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceShadingRateImagePropertiesNV const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceShadingRateImagePropertiesNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceShadingRateImagePropertiesNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceShadingRateImagePropertiesNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShadingRateImagePropertiesNV const & ) const = default; #else - bool operator==( PhysicalDeviceShadingRateImagePropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shadingRateTexelSize == rhs.shadingRateTexelSize ) - && ( shadingRatePaletteSize == rhs.shadingRatePaletteSize ) - && ( shadingRateMaxCoarseSamples == rhs.shadingRateMaxCoarseSamples ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shadingRateTexelSize == rhs.shadingRateTexelSize ) && + ( shadingRatePaletteSize == rhs.shadingRatePaletteSize ) && + ( shadingRateMaxCoarseSamples == rhs.shadingRateMaxCoarseSamples ); } - bool operator!=( PhysicalDeviceShadingRateImagePropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -53346,21 +60013,25 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImagePropertiesNV; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize = {}; - uint32_t shadingRatePaletteSize = {}; - uint32_t shadingRateMaxCoarseSamples = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize = {}; + uint32_t shadingRatePaletteSize = {}; + uint32_t shadingRateMaxCoarseSamples = {}; }; - static_assert( sizeof( PhysicalDeviceShadingRateImagePropertiesNV ) == sizeof( VkPhysicalDeviceShadingRateImagePropertiesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceShadingRateImagePropertiesNV ) == + sizeof( VkPhysicalDeviceShadingRateImagePropertiesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceSparseImageFormatInfo2 { - VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, - VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, - VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, - VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2( + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, + VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal ) VULKAN_HPP_NOEXCEPT : format( format_ ) , type( type_ ) , samples( samples_ ) @@ -53368,9 +60039,13 @@ namespace VULKAN_HPP_NAMESPACE , tiling( tiling_ ) {} - PhysicalDeviceSparseImageFormatInfo2 & operator=( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSparseImageFormatInfo2 & + operator=( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceSparseImageFormatInfo2 ) - offsetof( PhysicalDeviceSparseImageFormatInfo2, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceSparseImageFormatInfo2 ) - + offsetof( PhysicalDeviceSparseImageFormatInfo2, pNext ) ); return *this; } @@ -53379,13 +60054,14 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceSparseImageFormatInfo2& operator=( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSparseImageFormatInfo2 & + operator=( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceSparseImageFormatInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSparseImageFormatInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -53403,7 +60079,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PhysicalDeviceSparseImageFormatInfo2 & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSparseImageFormatInfo2 & + setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT { samples = samples_; return *this; @@ -53421,54 +60098,52 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkPhysicalDeviceSparseImageFormatInfo2 const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceSparseImageFormatInfo2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceSparseImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceSparseImageFormatInfo2 const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSparseImageFormatInfo2 const & ) const = default; #else - bool operator==( PhysicalDeviceSparseImageFormatInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( format == rhs.format ) - && ( type == rhs.type ) - && ( samples == rhs.samples ) - && ( usage == rhs.usage ) - && ( tiling == rhs.tiling ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( type == rhs.type ) && + ( samples == rhs.samples ) && ( usage == rhs.usage ) && ( tiling == rhs.tiling ); } - bool operator!=( PhysicalDeviceSparseImageFormatInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSparseImageFormatInfo2; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; - VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSparseImageFormatInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D; VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; - VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; - VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal; + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal; }; - static_assert( sizeof( PhysicalDeviceSparseImageFormatInfo2 ) == sizeof( VkPhysicalDeviceSparseImageFormatInfo2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceSparseImageFormatInfo2 ) == sizeof( VkPhysicalDeviceSparseImageFormatInfo2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceSubgroupProperties { - VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupProperties( uint32_t subgroupSize_ = {}, - VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages_ = {}, - VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupProperties( + uint32_t subgroupSize_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages_ = {}, + VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages_ = {} ) VULKAN_HPP_NOEXCEPT : subgroupSize( subgroupSize_ ) , supportedStages( supportedStages_ ) , supportedOperations( supportedOperations_ ) @@ -53477,7 +60152,9 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceSubgroupProperties & operator=( PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceSubgroupProperties ) - offsetof( PhysicalDeviceSubgroupProperties, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceSubgroupProperties ) - offsetof( PhysicalDeviceSubgroupProperties, pNext ) ); return *this; } @@ -53486,117 +60163,123 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceSubgroupProperties& operator=( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSubgroupProperties & operator=( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceSubgroupProperties const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceSubgroupProperties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceSubgroupProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceSubgroupProperties const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSubgroupProperties const & ) const = default; #else - bool operator==( PhysicalDeviceSubgroupProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceSubgroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( subgroupSize == rhs.subgroupSize ) - && ( supportedStages == rhs.supportedStages ) - && ( supportedOperations == rhs.supportedOperations ) - && ( quadOperationsInAllStages == rhs.quadOperationsInAllStages ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subgroupSize == rhs.subgroupSize ) && + ( supportedStages == rhs.supportedStages ) && ( supportedOperations == rhs.supportedOperations ) && + ( quadOperationsInAllStages == rhs.quadOperationsInAllStages ); } - bool operator!=( PhysicalDeviceSubgroupProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceSubgroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupProperties; - void* pNext = {}; - uint32_t subgroupSize = {}; - VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupProperties; + void * pNext = {}; + uint32_t subgroupSize = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages = {}; VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations = {}; - VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages = {}; + VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages = {}; }; - static_assert( sizeof( PhysicalDeviceSubgroupProperties ) == sizeof( VkPhysicalDeviceSubgroupProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceSubgroupProperties ) == sizeof( VkPhysicalDeviceSubgroupProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceSubgroupSizeControlFeaturesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ = {} ) VULKAN_HPP_NOEXCEPT : subgroupSizeControl( subgroupSizeControl_ ) , computeFullSubgroups( computeFullSubgroups_ ) {} - PhysicalDeviceSubgroupSizeControlFeaturesEXT & operator=( PhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSubgroupSizeControlFeaturesEXT & + operator=( PhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceSubgroupSizeControlFeaturesEXT ) - offsetof( PhysicalDeviceSubgroupSizeControlFeaturesEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceSubgroupSizeControlFeaturesEXT ) - + offsetof( PhysicalDeviceSubgroupSizeControlFeaturesEXT, pNext ) ); return *this; } - PhysicalDeviceSubgroupSizeControlFeaturesEXT( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSubgroupSizeControlFeaturesEXT( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceSubgroupSizeControlFeaturesEXT& operator=( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSubgroupSizeControlFeaturesEXT & + operator=( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceSubgroupSizeControlFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSubgroupSizeControlFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceSubgroupSizeControlFeaturesEXT & setSubgroupSizeControl( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSubgroupSizeControlFeaturesEXT & + setSubgroupSizeControl( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ ) VULKAN_HPP_NOEXCEPT { subgroupSizeControl = subgroupSizeControl_; return *this; } - PhysicalDeviceSubgroupSizeControlFeaturesEXT & setComputeFullSubgroups( VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSubgroupSizeControlFeaturesEXT & + setComputeFullSubgroups( VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ ) VULKAN_HPP_NOEXCEPT { computeFullSubgroups = computeFullSubgroups_; return *this; } - operator VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceSubgroupSizeControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceSubgroupSizeControlFeaturesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSubgroupSizeControlFeaturesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceSubgroupSizeControlFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( subgroupSizeControl == rhs.subgroupSizeControl ) - && ( computeFullSubgroups == rhs.computeFullSubgroups ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subgroupSizeControl == rhs.subgroupSizeControl ) && + ( computeFullSubgroups == rhs.computeFullSubgroups ); } - bool operator!=( PhysicalDeviceSubgroupSizeControlFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -53604,66 +60287,74 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlFeaturesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl = {}; - VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl = {}; + VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups = {}; }; - static_assert( sizeof( PhysicalDeviceSubgroupSizeControlFeaturesEXT ) == sizeof( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceSubgroupSizeControlFeaturesEXT ) == + sizeof( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceSubgroupSizeControlPropertiesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlPropertiesEXT( uint32_t minSubgroupSize_ = {}, - uint32_t maxSubgroupSize_ = {}, - uint32_t maxComputeWorkgroupSubgroups_ = {}, - VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlPropertiesEXT( + uint32_t minSubgroupSize_ = {}, + uint32_t maxSubgroupSize_ = {}, + uint32_t maxComputeWorkgroupSubgroups_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages_ = {} ) VULKAN_HPP_NOEXCEPT : minSubgroupSize( minSubgroupSize_ ) , maxSubgroupSize( maxSubgroupSize_ ) , maxComputeWorkgroupSubgroups( maxComputeWorkgroupSubgroups_ ) , requiredSubgroupSizeStages( requiredSubgroupSizeStages_ ) {} - PhysicalDeviceSubgroupSizeControlPropertiesEXT & operator=( PhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSubgroupSizeControlPropertiesEXT & + operator=( PhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceSubgroupSizeControlPropertiesEXT ) - offsetof( PhysicalDeviceSubgroupSizeControlPropertiesEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceSubgroupSizeControlPropertiesEXT ) - + offsetof( PhysicalDeviceSubgroupSizeControlPropertiesEXT, pNext ) ); return *this; } - PhysicalDeviceSubgroupSizeControlPropertiesEXT( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSubgroupSizeControlPropertiesEXT( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceSubgroupSizeControlPropertiesEXT& operator=( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSubgroupSizeControlPropertiesEXT & + operator=( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceSubgroupSizeControlPropertiesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceSubgroupSizeControlPropertiesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSubgroupSizeControlPropertiesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceSubgroupSizeControlPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( minSubgroupSize == rhs.minSubgroupSize ) - && ( maxSubgroupSize == rhs.maxSubgroupSize ) - && ( maxComputeWorkgroupSubgroups == rhs.maxComputeWorkgroupSubgroups ) - && ( requiredSubgroupSizeStages == rhs.requiredSubgroupSizeStages ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minSubgroupSize == rhs.minSubgroupSize ) && + ( maxSubgroupSize == rhs.maxSubgroupSize ) && + ( maxComputeWorkgroupSubgroups == rhs.maxComputeWorkgroupSubgroups ) && + ( requiredSubgroupSizeStages == rhs.requiredSubgroupSizeStages ); } - bool operator!=( PhysicalDeviceSubgroupSizeControlPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -53671,24 +60362,30 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlPropertiesEXT; - void* pNext = {}; - uint32_t minSubgroupSize = {}; - uint32_t maxSubgroupSize = {}; - uint32_t maxComputeWorkgroupSubgroups = {}; - VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages = {}; + void * pNext = {}; + uint32_t minSubgroupSize = {}; + uint32_t maxSubgroupSize = {}; + uint32_t maxComputeWorkgroupSubgroups = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages = {}; }; - static_assert( sizeof( PhysicalDeviceSubgroupSizeControlPropertiesEXT ) == sizeof( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceSubgroupSizeControlPropertiesEXT ) == + sizeof( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceSurfaceInfo2KHR { - VULKAN_HPP_CONSTEXPR PhysicalDeviceSurfaceInfo2KHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + PhysicalDeviceSurfaceInfo2KHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {} ) VULKAN_HPP_NOEXCEPT : surface( surface_ ) {} PhysicalDeviceSurfaceInfo2KHR & operator=( PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceSurfaceInfo2KHR ) - offsetof( PhysicalDeviceSurfaceInfo2KHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceSurfaceInfo2KHR ) - offsetof( PhysicalDeviceSurfaceInfo2KHR, pNext ) ); return *this; } @@ -53697,13 +60394,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceSurfaceInfo2KHR& operator=( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSurfaceInfo2KHR & operator=( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceSurfaceInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSurfaceInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -53715,96 +60412,102 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkPhysicalDeviceSurfaceInfo2KHR const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceSurfaceInfo2KHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceSurfaceInfo2KHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceSurfaceInfo2KHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSurfaceInfo2KHR const & ) const = default; #else - bool operator==( PhysicalDeviceSurfaceInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceSurfaceInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( surface == rhs.surface ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surface == rhs.surface ); } - bool operator!=( PhysicalDeviceSurfaceInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceSurfaceInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSurfaceInfo2KHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSurfaceInfo2KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {}; }; - static_assert( sizeof( PhysicalDeviceSurfaceInfo2KHR ) == sizeof( VkPhysicalDeviceSurfaceInfo2KHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceSurfaceInfo2KHR ) == sizeof( VkPhysicalDeviceSurfaceInfo2KHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceTexelBufferAlignmentFeaturesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ = {} ) VULKAN_HPP_NOEXCEPT : texelBufferAlignment( texelBufferAlignment_ ) {} - PhysicalDeviceTexelBufferAlignmentFeaturesEXT & operator=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTexelBufferAlignmentFeaturesEXT & + operator=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceTexelBufferAlignmentFeaturesEXT ) - offsetof( PhysicalDeviceTexelBufferAlignmentFeaturesEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceTexelBufferAlignmentFeaturesEXT ) - + offsetof( PhysicalDeviceTexelBufferAlignmentFeaturesEXT, pNext ) ); return *this; } - PhysicalDeviceTexelBufferAlignmentFeaturesEXT( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTexelBufferAlignmentFeaturesEXT( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceTexelBufferAlignmentFeaturesEXT& operator=( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTexelBufferAlignmentFeaturesEXT & + operator=( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceTexelBufferAlignmentFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTexelBufferAlignmentFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceTexelBufferAlignmentFeaturesEXT & setTexelBufferAlignment( VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTexelBufferAlignmentFeaturesEXT & + setTexelBufferAlignment( VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ ) VULKAN_HPP_NOEXCEPT { texelBufferAlignment = texelBufferAlignment_; return *this; } - operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( texelBufferAlignment == rhs.texelBufferAlignment ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( texelBufferAlignment == rhs.texelBufferAlignment ); } - bool operator!=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -53812,65 +60515,74 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment = {}; }; - static_assert( sizeof( PhysicalDeviceTexelBufferAlignmentFeaturesEXT ) == sizeof( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceTexelBufferAlignmentFeaturesEXT ) == + sizeof( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceTexelBufferAlignmentPropertiesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentPropertiesEXT( VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentPropertiesEXT( + VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment_ = {} ) VULKAN_HPP_NOEXCEPT : storageTexelBufferOffsetAlignmentBytes( storageTexelBufferOffsetAlignmentBytes_ ) , storageTexelBufferOffsetSingleTexelAlignment( storageTexelBufferOffsetSingleTexelAlignment_ ) , uniformTexelBufferOffsetAlignmentBytes( uniformTexelBufferOffsetAlignmentBytes_ ) , uniformTexelBufferOffsetSingleTexelAlignment( uniformTexelBufferOffsetSingleTexelAlignment_ ) {} - PhysicalDeviceTexelBufferAlignmentPropertiesEXT & operator=( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTexelBufferAlignmentPropertiesEXT & + operator=( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceTexelBufferAlignmentPropertiesEXT ) - offsetof( PhysicalDeviceTexelBufferAlignmentPropertiesEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceTexelBufferAlignmentPropertiesEXT ) - + offsetof( PhysicalDeviceTexelBufferAlignmentPropertiesEXT, pNext ) ); return *this; } - PhysicalDeviceTexelBufferAlignmentPropertiesEXT( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTexelBufferAlignmentPropertiesEXT( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceTexelBufferAlignmentPropertiesEXT& operator=( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTexelBufferAlignmentPropertiesEXT & + operator=( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( storageTexelBufferOffsetAlignmentBytes == rhs.storageTexelBufferOffsetAlignmentBytes ) - && ( storageTexelBufferOffsetSingleTexelAlignment == rhs.storageTexelBufferOffsetSingleTexelAlignment ) - && ( uniformTexelBufferOffsetAlignmentBytes == rhs.uniformTexelBufferOffsetAlignmentBytes ) - && ( uniformTexelBufferOffsetSingleTexelAlignment == rhs.uniformTexelBufferOffsetSingleTexelAlignment ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( storageTexelBufferOffsetAlignmentBytes == rhs.storageTexelBufferOffsetAlignmentBytes ) && + ( storageTexelBufferOffsetSingleTexelAlignment == rhs.storageTexelBufferOffsetSingleTexelAlignment ) && + ( uniformTexelBufferOffsetAlignmentBytes == rhs.uniformTexelBufferOffsetAlignmentBytes ) && + ( uniformTexelBufferOffsetSingleTexelAlignment == rhs.uniformTexelBufferOffsetSingleTexelAlignment ); } - bool operator!=( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -53878,93 +60590,112 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentPropertiesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes = {}; - VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment = {}; - VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes = {}; - VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment = {}; }; - static_assert( sizeof( PhysicalDeviceTexelBufferAlignmentPropertiesEXT ) == sizeof( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceTexelBufferAlignmentPropertiesEXT ) == + sizeof( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ = {} ) VULKAN_HPP_NOEXCEPT : textureCompressionASTC_HDR( textureCompressionASTC_HDR_ ) {} - PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & operator=( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & + operator=( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ) - offsetof( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ) - + offsetof( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT, pNext ) ); return *this; } - PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( + VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT& operator=( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & + operator=( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = + *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & setTextureCompressionASTC_HDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & + setTextureCompressionASTC_HDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ ) VULKAN_HPP_NOEXCEPT { textureCompressionASTC_HDR = textureCompressionASTC_HDR_; return *this; } - operator VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( textureCompressionASTC_HDR == rhs.textureCompressionASTC_HDR ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( textureCompressionASTC_HDR == rhs.textureCompressionASTC_HDR ); } - bool operator!=( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT; - void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = + StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT; + void * pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR = {}; }; - static_assert( sizeof( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ) == sizeof( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ) == + sizeof( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceTimelineSemaphoreFeatures { - VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {} ) VULKAN_HPP_NOEXCEPT - : timelineSemaphore( timelineSemaphore_ ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {} ) + VULKAN_HPP_NOEXCEPT : timelineSemaphore( timelineSemaphore_ ) {} - PhysicalDeviceTimelineSemaphoreFeatures & operator=( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTimelineSemaphoreFeatures & + operator=( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceTimelineSemaphoreFeatures ) - offsetof( PhysicalDeviceTimelineSemaphoreFeatures, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceTimelineSemaphoreFeatures ) - + offsetof( PhysicalDeviceTimelineSemaphoreFeatures, pNext ) ); return *this; } @@ -53973,45 +60704,45 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceTimelineSemaphoreFeatures& operator=( VkPhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTimelineSemaphoreFeatures & + operator=( VkPhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceTimelineSemaphoreFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTimelineSemaphoreFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceTimelineSemaphoreFeatures & setTimelineSemaphore( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTimelineSemaphoreFeatures & + setTimelineSemaphore( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ ) VULKAN_HPP_NOEXCEPT { timelineSemaphore = timelineSemaphore_; return *this; } - operator VkPhysicalDeviceTimelineSemaphoreFeatures const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceTimelineSemaphoreFeatures const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceTimelineSemaphoreFeatures &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceTimelineSemaphoreFeatures const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTimelineSemaphoreFeatures const & ) const = default; #else - bool operator==( PhysicalDeviceTimelineSemaphoreFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( timelineSemaphore == rhs.timelineSemaphore ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( timelineSemaphore == rhs.timelineSemaphore ); } - bool operator!=( PhysicalDeviceTimelineSemaphoreFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -54019,56 +60750,64 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphoreFeatures; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore = {}; }; - static_assert( sizeof( PhysicalDeviceTimelineSemaphoreFeatures ) == sizeof( VkPhysicalDeviceTimelineSemaphoreFeatures ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceTimelineSemaphoreFeatures ) == + sizeof( VkPhysicalDeviceTimelineSemaphoreFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceTimelineSemaphoreProperties { - VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreProperties( uint64_t maxTimelineSemaphoreValueDifference_ = {} ) VULKAN_HPP_NOEXCEPT - : maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreProperties( uint64_t maxTimelineSemaphoreValueDifference_ = {} ) + VULKAN_HPP_NOEXCEPT : maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ ) {} - PhysicalDeviceTimelineSemaphoreProperties & operator=( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTimelineSemaphoreProperties & + operator=( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceTimelineSemaphoreProperties ) - offsetof( PhysicalDeviceTimelineSemaphoreProperties, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceTimelineSemaphoreProperties ) - + offsetof( PhysicalDeviceTimelineSemaphoreProperties, pNext ) ); return *this; } - PhysicalDeviceTimelineSemaphoreProperties( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTimelineSemaphoreProperties( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceTimelineSemaphoreProperties& operator=( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTimelineSemaphoreProperties & + operator=( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceTimelineSemaphoreProperties const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceTimelineSemaphoreProperties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceTimelineSemaphoreProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceTimelineSemaphoreProperties const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTimelineSemaphoreProperties const & ) const = default; #else - bool operator==( PhysicalDeviceTimelineSemaphoreProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference ); } - bool operator!=( PhysicalDeviceTimelineSemaphoreProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -54076,19 +60815,23 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphoreProperties; - void* pNext = {}; - uint64_t maxTimelineSemaphoreValueDifference = {}; + void * pNext = {}; + uint64_t maxTimelineSemaphoreValueDifference = {}; }; - static_assert( sizeof( PhysicalDeviceTimelineSemaphoreProperties ) == sizeof( VkPhysicalDeviceTimelineSemaphoreProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceTimelineSemaphoreProperties ) == + sizeof( VkPhysicalDeviceTimelineSemaphoreProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceToolPropertiesEXT { - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceToolPropertiesEXT( std::array const& name_ = {}, - std::array const& version_ = {}, - VULKAN_HPP_NAMESPACE::ToolPurposeFlagsEXT purposes_ = {}, - std::array const& description_ = {}, - std::array const& layer_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceToolPropertiesEXT( + std::array const & name_ = {}, + std::array const & version_ = {}, + VULKAN_HPP_NAMESPACE::ToolPurposeFlagsEXT purposes_ = {}, + std::array const & description_ = {}, + std::array const & layer_ = {} ) VULKAN_HPP_NOEXCEPT : name( name_ ) , version( version_ ) , purposes( purposes_ ) @@ -54098,7 +60841,9 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceToolPropertiesEXT & operator=( PhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceToolPropertiesEXT ) - offsetof( PhysicalDeviceToolPropertiesEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceToolPropertiesEXT ) - offsetof( PhysicalDeviceToolPropertiesEXT, pNext ) ); return *this; } @@ -54107,37 +60852,32 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceToolPropertiesEXT& operator=( VkPhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceToolPropertiesEXT & operator=( VkPhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceToolPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceToolPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceToolPropertiesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceToolPropertiesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceToolPropertiesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceToolPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceToolPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( name == rhs.name ) - && ( version == rhs.version ) - && ( purposes == rhs.purposes ) - && ( description == rhs.description ) - && ( layer == rhs.layer ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( name == rhs.name ) && ( version == rhs.version ) && + ( purposes == rhs.purposes ) && ( description == rhs.description ) && ( layer == rhs.layer ); } - bool operator!=( PhysicalDeviceToolPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceToolPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -54145,81 +60885,90 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceToolPropertiesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D version = {}; - VULKAN_HPP_NAMESPACE::ToolPurposeFlagsEXT purposes = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D layer = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D version = {}; + VULKAN_HPP_NAMESPACE::ToolPurposeFlagsEXT purposes = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D layer = {}; }; - static_assert( sizeof( PhysicalDeviceToolPropertiesEXT ) == sizeof( VkPhysicalDeviceToolPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceToolPropertiesEXT ) == sizeof( VkPhysicalDeviceToolPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceTransformFeedbackFeaturesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ = {} ) VULKAN_HPP_NOEXCEPT : transformFeedback( transformFeedback_ ) , geometryStreams( geometryStreams_ ) {} - PhysicalDeviceTransformFeedbackFeaturesEXT & operator=( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTransformFeedbackFeaturesEXT & + operator=( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceTransformFeedbackFeaturesEXT ) - offsetof( PhysicalDeviceTransformFeedbackFeaturesEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceTransformFeedbackFeaturesEXT ) - + offsetof( PhysicalDeviceTransformFeedbackFeaturesEXT, pNext ) ); return *this; } - PhysicalDeviceTransformFeedbackFeaturesEXT( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTransformFeedbackFeaturesEXT( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceTransformFeedbackFeaturesEXT& operator=( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTransformFeedbackFeaturesEXT & + operator=( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceTransformFeedbackFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTransformFeedbackFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceTransformFeedbackFeaturesEXT & setTransformFeedback( VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTransformFeedbackFeaturesEXT & + setTransformFeedback( VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ ) VULKAN_HPP_NOEXCEPT { transformFeedback = transformFeedback_; return *this; } - PhysicalDeviceTransformFeedbackFeaturesEXT & setGeometryStreams( VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTransformFeedbackFeaturesEXT & + setGeometryStreams( VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ ) VULKAN_HPP_NOEXCEPT { geometryStreams = geometryStreams_; return *this; } - operator VkPhysicalDeviceTransformFeedbackFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceTransformFeedbackFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceTransformFeedbackFeaturesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceTransformFeedbackFeaturesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTransformFeedbackFeaturesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceTransformFeedbackFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( transformFeedback == rhs.transformFeedback ) - && ( geometryStreams == rhs.geometryStreams ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transformFeedback == rhs.transformFeedback ) && + ( geometryStreams == rhs.geometryStreams ); } - bool operator!=( PhysicalDeviceTransformFeedbackFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -54227,25 +60976,29 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 transformFeedback = {}; - VULKAN_HPP_NAMESPACE::Bool32 geometryStreams = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedback = {}; + VULKAN_HPP_NAMESPACE::Bool32 geometryStreams = {}; }; - static_assert( sizeof( PhysicalDeviceTransformFeedbackFeaturesEXT ) == sizeof( VkPhysicalDeviceTransformFeedbackFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceTransformFeedbackFeaturesEXT ) == + sizeof( VkPhysicalDeviceTransformFeedbackFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceTransformFeedbackPropertiesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT( uint32_t maxTransformFeedbackStreams_ = {}, - uint32_t maxTransformFeedbackBuffers_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize_ = {}, - uint32_t maxTransformFeedbackStreamDataSize_ = {}, - uint32_t maxTransformFeedbackBufferDataSize_ = {}, - uint32_t maxTransformFeedbackBufferDataStride_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT( + uint32_t maxTransformFeedbackStreams_ = {}, + uint32_t maxTransformFeedbackBuffers_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize_ = {}, + uint32_t maxTransformFeedbackStreamDataSize_ = {}, + uint32_t maxTransformFeedbackBufferDataSize_ = {}, + uint32_t maxTransformFeedbackBufferDataStride_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw_ = {} ) VULKAN_HPP_NOEXCEPT : maxTransformFeedbackStreams( maxTransformFeedbackStreams_ ) , maxTransformFeedbackBuffers( maxTransformFeedbackBuffers_ ) , maxTransformFeedbackBufferSize( maxTransformFeedbackBufferSize_ ) @@ -54258,53 +61011,58 @@ namespace VULKAN_HPP_NAMESPACE , transformFeedbackDraw( transformFeedbackDraw_ ) {} - PhysicalDeviceTransformFeedbackPropertiesEXT & operator=( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTransformFeedbackPropertiesEXT & + operator=( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceTransformFeedbackPropertiesEXT ) - offsetof( PhysicalDeviceTransformFeedbackPropertiesEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceTransformFeedbackPropertiesEXT ) - + offsetof( PhysicalDeviceTransformFeedbackPropertiesEXT, pNext ) ); return *this; } - PhysicalDeviceTransformFeedbackPropertiesEXT( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTransformFeedbackPropertiesEXT( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceTransformFeedbackPropertiesEXT& operator=( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTransformFeedbackPropertiesEXT & + operator=( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceTransformFeedbackPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceTransformFeedbackPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceTransformFeedbackPropertiesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceTransformFeedbackPropertiesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTransformFeedbackPropertiesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceTransformFeedbackPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxTransformFeedbackStreams == rhs.maxTransformFeedbackStreams ) - && ( maxTransformFeedbackBuffers == rhs.maxTransformFeedbackBuffers ) - && ( maxTransformFeedbackBufferSize == rhs.maxTransformFeedbackBufferSize ) - && ( maxTransformFeedbackStreamDataSize == rhs.maxTransformFeedbackStreamDataSize ) - && ( maxTransformFeedbackBufferDataSize == rhs.maxTransformFeedbackBufferDataSize ) - && ( maxTransformFeedbackBufferDataStride == rhs.maxTransformFeedbackBufferDataStride ) - && ( transformFeedbackQueries == rhs.transformFeedbackQueries ) - && ( transformFeedbackStreamsLinesTriangles == rhs.transformFeedbackStreamsLinesTriangles ) - && ( transformFeedbackRasterizationStreamSelect == rhs.transformFeedbackRasterizationStreamSelect ) - && ( transformFeedbackDraw == rhs.transformFeedbackDraw ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxTransformFeedbackStreams == rhs.maxTransformFeedbackStreams ) && + ( maxTransformFeedbackBuffers == rhs.maxTransformFeedbackBuffers ) && + ( maxTransformFeedbackBufferSize == rhs.maxTransformFeedbackBufferSize ) && + ( maxTransformFeedbackStreamDataSize == rhs.maxTransformFeedbackStreamDataSize ) && + ( maxTransformFeedbackBufferDataSize == rhs.maxTransformFeedbackBufferDataSize ) && + ( maxTransformFeedbackBufferDataStride == rhs.maxTransformFeedbackBufferDataStride ) && + ( transformFeedbackQueries == rhs.transformFeedbackQueries ) && + ( transformFeedbackStreamsLinesTriangles == rhs.transformFeedbackStreamsLinesTriangles ) && + ( transformFeedbackRasterizationStreamSelect == rhs.transformFeedbackRasterizationStreamSelect ) && + ( transformFeedbackDraw == rhs.transformFeedbackDraw ); } - bool operator!=( PhysicalDeviceTransformFeedbackPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -54312,77 +61070,88 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT; - void* pNext = {}; - uint32_t maxTransformFeedbackStreams = {}; - uint32_t maxTransformFeedbackBuffers = {}; - VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize = {}; - uint32_t maxTransformFeedbackStreamDataSize = {}; - uint32_t maxTransformFeedbackBufferDataSize = {}; - uint32_t maxTransformFeedbackBufferDataStride = {}; - VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries = {}; - VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles = {}; - VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect = {}; - VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw = {}; + void * pNext = {}; + uint32_t maxTransformFeedbackStreams = {}; + uint32_t maxTransformFeedbackBuffers = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize = {}; + uint32_t maxTransformFeedbackStreamDataSize = {}; + uint32_t maxTransformFeedbackBufferDataSize = {}; + uint32_t maxTransformFeedbackBufferDataStride = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw = {}; }; - static_assert( sizeof( PhysicalDeviceTransformFeedbackPropertiesEXT ) == sizeof( VkPhysicalDeviceTransformFeedbackPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceTransformFeedbackPropertiesEXT ) == + sizeof( VkPhysicalDeviceTransformFeedbackPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceUniformBufferStandardLayoutFeatures { - VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeatures( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeatures( + VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {} ) VULKAN_HPP_NOEXCEPT : uniformBufferStandardLayout( uniformBufferStandardLayout_ ) {} - PhysicalDeviceUniformBufferStandardLayoutFeatures & operator=( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceUniformBufferStandardLayoutFeatures & + operator=( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceUniformBufferStandardLayoutFeatures ) - offsetof( PhysicalDeviceUniformBufferStandardLayoutFeatures, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceUniformBufferStandardLayoutFeatures ) - + offsetof( PhysicalDeviceUniformBufferStandardLayoutFeatures, pNext ) ); return *this; } - PhysicalDeviceUniformBufferStandardLayoutFeatures( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceUniformBufferStandardLayoutFeatures( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceUniformBufferStandardLayoutFeatures& operator=( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceUniformBufferStandardLayoutFeatures & + operator=( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = + *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceUniformBufferStandardLayoutFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceUniformBufferStandardLayoutFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceUniformBufferStandardLayoutFeatures & setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceUniformBufferStandardLayoutFeatures & + setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT { uniformBufferStandardLayout = uniformBufferStandardLayout_; return *this; } - operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceUniformBufferStandardLayoutFeatures const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceUniformBufferStandardLayoutFeatures const & ) const = default; #else - bool operator==( PhysicalDeviceUniformBufferStandardLayoutFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout ); } - bool operator!=( PhysicalDeviceUniformBufferStandardLayoutFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -54390,23 +61159,31 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout = {}; }; - static_assert( sizeof( PhysicalDeviceUniformBufferStandardLayoutFeatures ) == sizeof( VkPhysicalDeviceUniformBufferStandardLayoutFeatures ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceUniformBufferStandardLayoutFeatures ) == + sizeof( VkPhysicalDeviceUniformBufferStandardLayoutFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceVariablePointersFeatures { - VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + PhysicalDeviceVariablePointersFeatures( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {} ) VULKAN_HPP_NOEXCEPT : variablePointersStorageBuffer( variablePointersStorageBuffer_ ) , variablePointers( variablePointers_ ) {} - PhysicalDeviceVariablePointersFeatures & operator=( PhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVariablePointersFeatures & + operator=( PhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceVariablePointersFeatures ) - offsetof( PhysicalDeviceVariablePointersFeatures, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceVariablePointersFeatures ) - + offsetof( PhysicalDeviceVariablePointersFeatures, pNext ) ); return *this; } @@ -54415,52 +61192,54 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceVariablePointersFeatures& operator=( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVariablePointersFeatures & + operator=( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceVariablePointersFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVariablePointersFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceVariablePointersFeatures & setVariablePointersStorageBuffer( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVariablePointersFeatures & setVariablePointersStorageBuffer( + VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT { variablePointersStorageBuffer = variablePointersStorageBuffer_; return *this; } - PhysicalDeviceVariablePointersFeatures & setVariablePointers( VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVariablePointersFeatures & + setVariablePointers( VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT { variablePointers = variablePointers_; return *this; } - operator VkPhysicalDeviceVariablePointersFeatures const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVariablePointersFeatures const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceVariablePointersFeatures &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceVariablePointersFeatures const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVariablePointersFeatures const & ) const = default; #else - bool operator==( PhysicalDeviceVariablePointersFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceVariablePointersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer ) - && ( variablePointers == rhs.variablePointers ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer ) && + ( variablePointers == rhs.variablePointers ); } - bool operator!=( PhysicalDeviceVariablePointersFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceVariablePointersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -54468,78 +61247,88 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVariablePointersFeatures; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer = {}; - VULKAN_HPP_NAMESPACE::Bool32 variablePointers = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer = {}; + VULKAN_HPP_NAMESPACE::Bool32 variablePointers = {}; }; - static_assert( sizeof( PhysicalDeviceVariablePointersFeatures ) == sizeof( VkPhysicalDeviceVariablePointersFeatures ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceVariablePointersFeatures ) == sizeof( VkPhysicalDeviceVariablePointersFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceVertexAttributeDivisorFeaturesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ = {} ) VULKAN_HPP_NOEXCEPT : vertexAttributeInstanceRateDivisor( vertexAttributeInstanceRateDivisor_ ) , vertexAttributeInstanceRateZeroDivisor( vertexAttributeInstanceRateZeroDivisor_ ) {} - PhysicalDeviceVertexAttributeDivisorFeaturesEXT & operator=( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVertexAttributeDivisorFeaturesEXT & + operator=( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceVertexAttributeDivisorFeaturesEXT ) - offsetof( PhysicalDeviceVertexAttributeDivisorFeaturesEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceVertexAttributeDivisorFeaturesEXT ) - + offsetof( PhysicalDeviceVertexAttributeDivisorFeaturesEXT, pNext ) ); return *this; } - PhysicalDeviceVertexAttributeDivisorFeaturesEXT( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVertexAttributeDivisorFeaturesEXT( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceVertexAttributeDivisorFeaturesEXT& operator=( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVertexAttributeDivisorFeaturesEXT & + operator=( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setVertexAttributeInstanceRateDivisor( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setVertexAttributeInstanceRateDivisor( + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ ) VULKAN_HPP_NOEXCEPT { vertexAttributeInstanceRateDivisor = vertexAttributeInstanceRateDivisor_; return *this; } - PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setVertexAttributeInstanceRateZeroDivisor( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setVertexAttributeInstanceRateZeroDivisor( + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ ) VULKAN_HPP_NOEXCEPT { vertexAttributeInstanceRateZeroDivisor = vertexAttributeInstanceRateZeroDivisor_; return *this; } - operator VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( vertexAttributeInstanceRateDivisor == rhs.vertexAttributeInstanceRateDivisor ) - && ( vertexAttributeInstanceRateZeroDivisor == rhs.vertexAttributeInstanceRateZeroDivisor ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( vertexAttributeInstanceRateDivisor == rhs.vertexAttributeInstanceRateDivisor ) && + ( vertexAttributeInstanceRateZeroDivisor == rhs.vertexAttributeInstanceRateZeroDivisor ); } - bool operator!=( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -54547,57 +61336,67 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor = {}; - VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor = {}; }; - static_assert( sizeof( PhysicalDeviceVertexAttributeDivisorFeaturesEXT ) == sizeof( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceVertexAttributeDivisorFeaturesEXT ) == + sizeof( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorPropertiesEXT( uint32_t maxVertexAttribDivisor_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + PhysicalDeviceVertexAttributeDivisorPropertiesEXT( uint32_t maxVertexAttribDivisor_ = {} ) VULKAN_HPP_NOEXCEPT : maxVertexAttribDivisor( maxVertexAttribDivisor_ ) {} - PhysicalDeviceVertexAttributeDivisorPropertiesEXT & operator=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVertexAttributeDivisorPropertiesEXT & + operator=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) - offsetof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) - + offsetof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT, pNext ) ); return *this; } - PhysicalDeviceVertexAttributeDivisorPropertiesEXT( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVertexAttributeDivisorPropertiesEXT( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceVertexAttributeDivisorPropertiesEXT& operator=( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVertexAttributeDivisorPropertiesEXT & + operator=( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = + *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxVertexAttribDivisor == rhs.maxVertexAttribDivisor ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxVertexAttribDivisor == rhs.maxVertexAttribDivisor ); } - bool operator!=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -54605,26 +61404,30 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT; - void* pNext = {}; - uint32_t maxVertexAttribDivisor = {}; + void * pNext = {}; + uint32_t maxVertexAttribDivisor = {}; }; - static_assert( sizeof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) == sizeof( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) == + sizeof( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceVulkan11Features { - VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan11Features( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + PhysicalDeviceVulkan11Features( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {} ) VULKAN_HPP_NOEXCEPT : storageBuffer16BitAccess( storageBuffer16BitAccess_ ) , uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ ) , storagePushConstant16( storagePushConstant16_ ) @@ -54641,7 +61444,9 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceVulkan11Features & operator=( PhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceVulkan11Features ) - offsetof( PhysicalDeviceVulkan11Features, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceVulkan11Features ) - offsetof( PhysicalDeviceVulkan11Features, pNext ) ); return *this; } @@ -54650,37 +61455,41 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceVulkan11Features& operator=( VkPhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan11Features & operator=( VkPhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceVulkan11Features & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan11Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceVulkan11Features & setStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan11Features & + setStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT { storageBuffer16BitAccess = storageBuffer16BitAccess_; return *this; } - PhysicalDeviceVulkan11Features & setUniformAndStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan11Features & setUniformAndStorageBuffer16BitAccess( + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT { uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_; return *this; } - PhysicalDeviceVulkan11Features & setStoragePushConstant16( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan11Features & + setStoragePushConstant16( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT { storagePushConstant16 = storagePushConstant16_; return *this; } - PhysicalDeviceVulkan11Features & setStorageInputOutput16( VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan11Features & + setStorageInputOutput16( VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT { storageInputOutput16 = storageInputOutput16_; return *this; @@ -54692,121 +61501,129 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PhysicalDeviceVulkan11Features & setMultiviewGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan11Features & + setMultiviewGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT { multiviewGeometryShader = multiviewGeometryShader_; return *this; } - PhysicalDeviceVulkan11Features & setMultiviewTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan11Features & + setMultiviewTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT { multiviewTessellationShader = multiviewTessellationShader_; return *this; } - PhysicalDeviceVulkan11Features & setVariablePointersStorageBuffer( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan11Features & setVariablePointersStorageBuffer( + VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT { variablePointersStorageBuffer = variablePointersStorageBuffer_; return *this; } - PhysicalDeviceVulkan11Features & setVariablePointers( VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan11Features & + setVariablePointers( VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT { variablePointers = variablePointers_; return *this; } - PhysicalDeviceVulkan11Features & setProtectedMemory( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan11Features & + setProtectedMemory( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT { protectedMemory = protectedMemory_; return *this; } - PhysicalDeviceVulkan11Features & setSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan11Features & + setSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT { samplerYcbcrConversion = samplerYcbcrConversion_; return *this; } - PhysicalDeviceVulkan11Features & setShaderDrawParameters( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan11Features & + setShaderDrawParameters( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT { shaderDrawParameters = shaderDrawParameters_; return *this; } - operator VkPhysicalDeviceVulkan11Features const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVulkan11Features const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceVulkan11Features &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceVulkan11Features const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVulkan11Features const & ) const = default; #else - bool operator==( PhysicalDeviceVulkan11Features const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceVulkan11Features const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess ) - && ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess ) - && ( storagePushConstant16 == rhs.storagePushConstant16 ) - && ( storageInputOutput16 == rhs.storageInputOutput16 ) - && ( multiview == rhs.multiview ) - && ( multiviewGeometryShader == rhs.multiviewGeometryShader ) - && ( multiviewTessellationShader == rhs.multiviewTessellationShader ) - && ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer ) - && ( variablePointers == rhs.variablePointers ) - && ( protectedMemory == rhs.protectedMemory ) - && ( samplerYcbcrConversion == rhs.samplerYcbcrConversion ) - && ( shaderDrawParameters == rhs.shaderDrawParameters ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess ) && + ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess ) && + ( storagePushConstant16 == rhs.storagePushConstant16 ) && + ( storageInputOutput16 == rhs.storageInputOutput16 ) && ( multiview == rhs.multiview ) && + ( multiviewGeometryShader == rhs.multiviewGeometryShader ) && + ( multiviewTessellationShader == rhs.multiviewTessellationShader ) && + ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer ) && + ( variablePointers == rhs.variablePointers ) && ( protectedMemory == rhs.protectedMemory ) && + ( samplerYcbcrConversion == rhs.samplerYcbcrConversion ) && + ( shaderDrawParameters == rhs.shaderDrawParameters ); } - bool operator!=( PhysicalDeviceVulkan11Features const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceVulkan11Features const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan11Features; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess = {}; - VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess = {}; - VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 multiview = {}; - VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader = {}; - VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader = {}; - VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer = {}; - VULKAN_HPP_NAMESPACE::Bool32 variablePointers = {}; - VULKAN_HPP_NAMESPACE::Bool32 protectedMemory = {}; - VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan11Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiview = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer = {}; + VULKAN_HPP_NAMESPACE::Bool32 variablePointers = {}; + VULKAN_HPP_NAMESPACE::Bool32 protectedMemory = {}; + VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters = {}; }; - static_assert( sizeof( PhysicalDeviceVulkan11Features ) == sizeof( VkPhysicalDeviceVulkan11Features ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceVulkan11Features ) == sizeof( VkPhysicalDeviceVulkan11Features ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceVulkan11Properties { - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Properties( std::array const& deviceUUID_ = {}, - std::array const& driverUUID_ = {}, - std::array const& deviceLUID_ = {}, - uint32_t deviceNodeMask_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {}, - uint32_t subgroupSize_ = {}, - VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages_ = {}, - VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages_ = {}, - VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes, - uint32_t maxMultiviewViewCount_ = {}, - uint32_t maxMultiviewInstanceIndex_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {}, - uint32_t maxPerSetDescriptors_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Properties( + std::array const & deviceUUID_ = {}, + std::array const & driverUUID_ = {}, + std::array const & deviceLUID_ = {}, + uint32_t deviceNodeMask_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {}, + uint32_t subgroupSize_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages_ = {}, + VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages_ = {}, + VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = + VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes, + uint32_t maxMultiviewViewCount_ = {}, + uint32_t maxMultiviewInstanceIndex_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {}, + uint32_t maxPerSetDescriptors_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {} ) VULKAN_HPP_NOEXCEPT : deviceUUID( deviceUUID_ ) , driverUUID( driverUUID_ ) , deviceLUID( deviceLUID_ ) @@ -54826,7 +61643,9 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceVulkan11Properties & operator=( PhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceVulkan11Properties ) - offsetof( PhysicalDeviceVulkan11Properties, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceVulkan11Properties ) - offsetof( PhysicalDeviceVulkan11Properties, pNext ) ); return *this; } @@ -54835,47 +61654,41 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceVulkan11Properties& operator=( VkPhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan11Properties & operator=( VkPhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceVulkan11Properties const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVulkan11Properties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceVulkan11Properties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceVulkan11Properties const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVulkan11Properties const & ) const = default; #else - bool operator==( PhysicalDeviceVulkan11Properties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceVulkan11Properties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( deviceUUID == rhs.deviceUUID ) - && ( driverUUID == rhs.driverUUID ) - && ( deviceLUID == rhs.deviceLUID ) - && ( deviceNodeMask == rhs.deviceNodeMask ) - && ( deviceLUIDValid == rhs.deviceLUIDValid ) - && ( subgroupSize == rhs.subgroupSize ) - && ( subgroupSupportedStages == rhs.subgroupSupportedStages ) - && ( subgroupSupportedOperations == rhs.subgroupSupportedOperations ) - && ( subgroupQuadOperationsInAllStages == rhs.subgroupQuadOperationsInAllStages ) - && ( pointClippingBehavior == rhs.pointClippingBehavior ) - && ( maxMultiviewViewCount == rhs.maxMultiviewViewCount ) - && ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex ) - && ( protectedNoFault == rhs.protectedNoFault ) - && ( maxPerSetDescriptors == rhs.maxPerSetDescriptors ) - && ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceUUID == rhs.deviceUUID ) && + ( driverUUID == rhs.driverUUID ) && ( deviceLUID == rhs.deviceLUID ) && + ( deviceNodeMask == rhs.deviceNodeMask ) && ( deviceLUIDValid == rhs.deviceLUIDValid ) && + ( subgroupSize == rhs.subgroupSize ) && ( subgroupSupportedStages == rhs.subgroupSupportedStages ) && + ( subgroupSupportedOperations == rhs.subgroupSupportedOperations ) && + ( subgroupQuadOperationsInAllStages == rhs.subgroupQuadOperationsInAllStages ) && + ( pointClippingBehavior == rhs.pointClippingBehavior ) && + ( maxMultiviewViewCount == rhs.maxMultiviewViewCount ) && + ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex ) && + ( protectedNoFault == rhs.protectedNoFault ) && ( maxPerSetDescriptors == rhs.maxPerSetDescriptors ) && + ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize ); } - bool operator!=( PhysicalDeviceVulkan11Properties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceVulkan11Properties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -54883,75 +61696,79 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan11Properties; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceUUID = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverUUID = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceLUID = {}; - uint32_t deviceNodeMask = {}; - VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid = {}; - uint32_t subgroupSize = {}; - VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages = {}; - VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations = {}; - VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages = {}; - VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes; - uint32_t maxMultiviewViewCount = {}; - uint32_t maxMultiviewInstanceIndex = {}; - VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {}; - uint32_t maxPerSetDescriptors = {}; - VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceUUID = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverUUID = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceLUID = {}; + uint32_t deviceNodeMask = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid = {}; + uint32_t subgroupSize = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages = {}; + VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations = {}; + VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages = {}; + VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior = + VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes; + uint32_t maxMultiviewViewCount = {}; + uint32_t maxMultiviewInstanceIndex = {}; + VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {}; + uint32_t maxPerSetDescriptors = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {}; }; - static_assert( sizeof( PhysicalDeviceVulkan11Properties ) == sizeof( VkPhysicalDeviceVulkan11Properties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceVulkan11Properties ) == sizeof( VkPhysicalDeviceVulkan11Properties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceVulkan12Features { - VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan12Features( VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan12Features( + VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ = {} ) VULKAN_HPP_NOEXCEPT : samplerMirrorClampToEdge( samplerMirrorClampToEdge_ ) , drawIndirectCount( drawIndirectCount_ ) , storageBuffer8BitAccess( storageBuffer8BitAccess_ ) @@ -55003,7 +61820,9 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceVulkan12Features & operator=( PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceVulkan12Features ) - offsetof( PhysicalDeviceVulkan12Features, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceVulkan12Features ) - offsetof( PhysicalDeviceVulkan12Features, pNext ) ); return *this; } @@ -55012,55 +61831,62 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceVulkan12Features& operator=( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & operator=( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceVulkan12Features & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceVulkan12Features & setSamplerMirrorClampToEdge( VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & + setSamplerMirrorClampToEdge( VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_ ) VULKAN_HPP_NOEXCEPT { samplerMirrorClampToEdge = samplerMirrorClampToEdge_; return *this; } - PhysicalDeviceVulkan12Features & setDrawIndirectCount( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & + setDrawIndirectCount( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_ ) VULKAN_HPP_NOEXCEPT { drawIndirectCount = drawIndirectCount_; return *this; } - PhysicalDeviceVulkan12Features & setStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & + setStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT { storageBuffer8BitAccess = storageBuffer8BitAccess_; return *this; } - PhysicalDeviceVulkan12Features & setUniformAndStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setUniformAndStorageBuffer8BitAccess( + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT { uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_; return *this; } - PhysicalDeviceVulkan12Features & setStoragePushConstant8( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & + setStoragePushConstant8( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT { storagePushConstant8 = storagePushConstant8_; return *this; } - PhysicalDeviceVulkan12Features & setShaderBufferInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & + setShaderBufferInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT { shaderBufferInt64Atomics = shaderBufferInt64Atomics_; return *this; } - PhysicalDeviceVulkan12Features & setShaderSharedInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & + setShaderSharedInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT { shaderSharedInt64Atomics = shaderSharedInt64Atomics_; return *this; @@ -55078,414 +61904,456 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PhysicalDeviceVulkan12Features & setDescriptorIndexing( VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & + setDescriptorIndexing( VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_ ) VULKAN_HPP_NOEXCEPT { descriptorIndexing = descriptorIndexing_; return *this; } - PhysicalDeviceVulkan12Features & setShaderInputAttachmentArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setShaderInputAttachmentArrayDynamicIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT { shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_; return *this; } - PhysicalDeviceVulkan12Features & setShaderUniformTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setShaderUniformTexelBufferArrayDynamicIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT { shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_; return *this; } - PhysicalDeviceVulkan12Features & setShaderStorageTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setShaderStorageTexelBufferArrayDynamicIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT { shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_; return *this; } - PhysicalDeviceVulkan12Features & setShaderUniformBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setShaderUniformBufferArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT { shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_; return *this; } - PhysicalDeviceVulkan12Features & setShaderSampledImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setShaderSampledImageArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT { shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_; return *this; } - PhysicalDeviceVulkan12Features & setShaderStorageBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setShaderStorageBufferArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT { shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_; return *this; } - PhysicalDeviceVulkan12Features & setShaderStorageImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setShaderStorageImageArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT { shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_; return *this; } - PhysicalDeviceVulkan12Features & setShaderInputAttachmentArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setShaderInputAttachmentArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT { shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_; return *this; } - PhysicalDeviceVulkan12Features & setShaderUniformTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setShaderUniformTexelBufferArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT { shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_; return *this; } - PhysicalDeviceVulkan12Features & setShaderStorageTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setShaderStorageTexelBufferArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT { shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_; return *this; } - PhysicalDeviceVulkan12Features & setDescriptorBindingUniformBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setDescriptorBindingUniformBufferUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT { descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_; return *this; } - PhysicalDeviceVulkan12Features & setDescriptorBindingSampledImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setDescriptorBindingSampledImageUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT { descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_; return *this; } - PhysicalDeviceVulkan12Features & setDescriptorBindingStorageImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setDescriptorBindingStorageImageUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT { descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_; return *this; } - PhysicalDeviceVulkan12Features & setDescriptorBindingStorageBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setDescriptorBindingStorageBufferUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT { descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_; return *this; } - PhysicalDeviceVulkan12Features & setDescriptorBindingUniformTexelBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setDescriptorBindingUniformTexelBufferUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT { descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_; return *this; } - PhysicalDeviceVulkan12Features & setDescriptorBindingStorageTexelBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setDescriptorBindingStorageTexelBufferUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT { descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_; return *this; } - PhysicalDeviceVulkan12Features & setDescriptorBindingUpdateUnusedWhilePending( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setDescriptorBindingUpdateUnusedWhilePending( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT { descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_; return *this; } - PhysicalDeviceVulkan12Features & setDescriptorBindingPartiallyBound( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setDescriptorBindingPartiallyBound( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT { descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_; return *this; } - PhysicalDeviceVulkan12Features & setDescriptorBindingVariableDescriptorCount( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setDescriptorBindingVariableDescriptorCount( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT { descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_; return *this; } - PhysicalDeviceVulkan12Features & setRuntimeDescriptorArray( VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & + setRuntimeDescriptorArray( VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT { runtimeDescriptorArray = runtimeDescriptorArray_; return *this; } - PhysicalDeviceVulkan12Features & setSamplerFilterMinmax( VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & + setSamplerFilterMinmax( VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_ ) VULKAN_HPP_NOEXCEPT { samplerFilterMinmax = samplerFilterMinmax_; return *this; } - PhysicalDeviceVulkan12Features & setScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & + setScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT { scalarBlockLayout = scalarBlockLayout_; return *this; } - PhysicalDeviceVulkan12Features & setImagelessFramebuffer( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & + setImagelessFramebuffer( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT { imagelessFramebuffer = imagelessFramebuffer_; return *this; } - PhysicalDeviceVulkan12Features & setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & + setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT { uniformBufferStandardLayout = uniformBufferStandardLayout_; return *this; } - PhysicalDeviceVulkan12Features & setShaderSubgroupExtendedTypes( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & + setShaderSubgroupExtendedTypes( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ ) VULKAN_HPP_NOEXCEPT { shaderSubgroupExtendedTypes = shaderSubgroupExtendedTypes_; return *this; } - PhysicalDeviceVulkan12Features & setSeparateDepthStencilLayouts( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & + setSeparateDepthStencilLayouts( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ ) VULKAN_HPP_NOEXCEPT { separateDepthStencilLayouts = separateDepthStencilLayouts_; return *this; } - PhysicalDeviceVulkan12Features & setHostQueryReset( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & + setHostQueryReset( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT { hostQueryReset = hostQueryReset_; return *this; } - PhysicalDeviceVulkan12Features & setTimelineSemaphore( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & + setTimelineSemaphore( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ ) VULKAN_HPP_NOEXCEPT { timelineSemaphore = timelineSemaphore_; return *this; } - PhysicalDeviceVulkan12Features & setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & + setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT { bufferDeviceAddress = bufferDeviceAddress_; return *this; } - PhysicalDeviceVulkan12Features & setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setBufferDeviceAddressCaptureReplay( + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT { bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_; return *this; } - PhysicalDeviceVulkan12Features & setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setBufferDeviceAddressMultiDevice( + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT { bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_; return *this; } - PhysicalDeviceVulkan12Features & setVulkanMemoryModel( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & + setVulkanMemoryModel( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT { vulkanMemoryModel = vulkanMemoryModel_; return *this; } - PhysicalDeviceVulkan12Features & setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & + setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT { vulkanMemoryModelDeviceScope = vulkanMemoryModelDeviceScope_; return *this; } - PhysicalDeviceVulkan12Features & setVulkanMemoryModelAvailabilityVisibilityChains( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setVulkanMemoryModelAvailabilityVisibilityChains( + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT { vulkanMemoryModelAvailabilityVisibilityChains = vulkanMemoryModelAvailabilityVisibilityChains_; return *this; } - PhysicalDeviceVulkan12Features & setShaderOutputViewportIndex( VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & + setShaderOutputViewportIndex( VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_ ) VULKAN_HPP_NOEXCEPT { shaderOutputViewportIndex = shaderOutputViewportIndex_; return *this; } - PhysicalDeviceVulkan12Features & setShaderOutputLayer( VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & + setShaderOutputLayer( VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ ) VULKAN_HPP_NOEXCEPT { shaderOutputLayer = shaderOutputLayer_; return *this; } - PhysicalDeviceVulkan12Features & setSubgroupBroadcastDynamicId( VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & + setSubgroupBroadcastDynamicId( VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ ) VULKAN_HPP_NOEXCEPT { subgroupBroadcastDynamicId = subgroupBroadcastDynamicId_; return *this; } - operator VkPhysicalDeviceVulkan12Features const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVulkan12Features const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceVulkan12Features &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceVulkan12Features const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVulkan12Features const & ) const = default; #else - bool operator==( PhysicalDeviceVulkan12Features const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceVulkan12Features const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( samplerMirrorClampToEdge == rhs.samplerMirrorClampToEdge ) - && ( drawIndirectCount == rhs.drawIndirectCount ) - && ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess ) - && ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess ) - && ( storagePushConstant8 == rhs.storagePushConstant8 ) - && ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics ) - && ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics ) - && ( shaderFloat16 == rhs.shaderFloat16 ) - && ( shaderInt8 == rhs.shaderInt8 ) - && ( descriptorIndexing == rhs.descriptorIndexing ) - && ( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing ) - && ( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing ) - && ( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing ) - && ( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing ) - && ( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing ) - && ( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing ) - && ( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing ) - && ( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing ) - && ( shaderUniformTexelBufferArrayNonUniformIndexing == rhs.shaderUniformTexelBufferArrayNonUniformIndexing ) - && ( shaderStorageTexelBufferArrayNonUniformIndexing == rhs.shaderStorageTexelBufferArrayNonUniformIndexing ) - && ( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind ) - && ( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind ) - && ( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind ) - && ( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind ) - && ( descriptorBindingUniformTexelBufferUpdateAfterBind == rhs.descriptorBindingUniformTexelBufferUpdateAfterBind ) - && ( descriptorBindingStorageTexelBufferUpdateAfterBind == rhs.descriptorBindingStorageTexelBufferUpdateAfterBind ) - && ( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending ) - && ( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound ) - && ( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount ) - && ( runtimeDescriptorArray == rhs.runtimeDescriptorArray ) - && ( samplerFilterMinmax == rhs.samplerFilterMinmax ) - && ( scalarBlockLayout == rhs.scalarBlockLayout ) - && ( imagelessFramebuffer == rhs.imagelessFramebuffer ) - && ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout ) - && ( shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes ) - && ( separateDepthStencilLayouts == rhs.separateDepthStencilLayouts ) - && ( hostQueryReset == rhs.hostQueryReset ) - && ( timelineSemaphore == rhs.timelineSemaphore ) - && ( bufferDeviceAddress == rhs.bufferDeviceAddress ) - && ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay ) - && ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice ) - && ( vulkanMemoryModel == rhs.vulkanMemoryModel ) - && ( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope ) - && ( vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains ) - && ( shaderOutputViewportIndex == rhs.shaderOutputViewportIndex ) - && ( shaderOutputLayer == rhs.shaderOutputLayer ) - && ( subgroupBroadcastDynamicId == rhs.subgroupBroadcastDynamicId ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( samplerMirrorClampToEdge == rhs.samplerMirrorClampToEdge ) && + ( drawIndirectCount == rhs.drawIndirectCount ) && + ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess ) && + ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess ) && + ( storagePushConstant8 == rhs.storagePushConstant8 ) && + ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics ) && + ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics ) && ( shaderFloat16 == rhs.shaderFloat16 ) && + ( shaderInt8 == rhs.shaderInt8 ) && ( descriptorIndexing == rhs.descriptorIndexing ) && + ( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing ) && + ( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing ) && + ( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing ) && + ( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing ) && + ( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing ) && + ( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing ) && + ( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing ) && + ( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing ) && + ( shaderUniformTexelBufferArrayNonUniformIndexing == + rhs.shaderUniformTexelBufferArrayNonUniformIndexing ) && + ( shaderStorageTexelBufferArrayNonUniformIndexing == + rhs.shaderStorageTexelBufferArrayNonUniformIndexing ) && + ( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind ) && + ( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind ) && + ( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind ) && + ( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind ) && + ( descriptorBindingUniformTexelBufferUpdateAfterBind == + rhs.descriptorBindingUniformTexelBufferUpdateAfterBind ) && + ( descriptorBindingStorageTexelBufferUpdateAfterBind == + rhs.descriptorBindingStorageTexelBufferUpdateAfterBind ) && + ( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending ) && + ( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound ) && + ( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount ) && + ( runtimeDescriptorArray == rhs.runtimeDescriptorArray ) && + ( samplerFilterMinmax == rhs.samplerFilterMinmax ) && ( scalarBlockLayout == rhs.scalarBlockLayout ) && + ( imagelessFramebuffer == rhs.imagelessFramebuffer ) && + ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout ) && + ( shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes ) && + ( separateDepthStencilLayouts == rhs.separateDepthStencilLayouts ) && + ( hostQueryReset == rhs.hostQueryReset ) && ( timelineSemaphore == rhs.timelineSemaphore ) && + ( bufferDeviceAddress == rhs.bufferDeviceAddress ) && + ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay ) && + ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice ) && + ( vulkanMemoryModel == rhs.vulkanMemoryModel ) && + ( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope ) && + ( vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains ) && + ( shaderOutputViewportIndex == rhs.shaderOutputViewportIndex ) && + ( shaderOutputLayer == rhs.shaderOutputLayer ) && + ( subgroupBroadcastDynamicId == rhs.subgroupBroadcastDynamicId ); } - bool operator!=( PhysicalDeviceVulkan12Features const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceVulkan12Features const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Features; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge = {}; - VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount = {}; - VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess = {}; - VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess = {}; - VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderInt8 = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount = {}; - VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray = {}; - VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax = {}; - VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout = {}; - VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer = {}; - VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes = {}; - VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts = {}; - VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset = {}; - VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore = {}; - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {}; - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {}; - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {}; - VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel = {}; - VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope = {}; - VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer = {}; - VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge = {}; + VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInt8 = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount = {}; + VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray = {}; + VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax = {}; + VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout = {}; + VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes = {}; + VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts = {}; + VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset = {}; + VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer = {}; + VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId = {}; }; - static_assert( sizeof( PhysicalDeviceVulkan12Features ) == sizeof( VkPhysicalDeviceVulkan12Features ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceVulkan12Features ) == sizeof( VkPhysicalDeviceVulkan12Features ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceVulkan12Properties { - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Properties( VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary, - std::array const& driverName_ = {}, - std::array const& driverInfo_ = {}, - VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {}, - VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, - VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, - VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {}, - uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {}, - uint32_t maxPerStageUpdateAfterBindResources_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {}, - VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {}, - VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {}, - uint64_t maxTimelineSemaphoreValueDifference_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Properties( + VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary, + std::array const & driverName_ = {}, + std::array const & driverInfo_ = {}, + VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {}, + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {}, + uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {}, + uint32_t maxPerStageUpdateAfterBindResources_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {}, + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {}, + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {}, + uint64_t maxTimelineSemaphoreValueDifference_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts_ = {} ) VULKAN_HPP_NOEXCEPT : driverID( driverID_ ) , driverName( driverName_ ) , driverInfo( driverInfo_ ) @@ -55542,7 +62410,9 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceVulkan12Properties & operator=( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceVulkan12Properties ) - offsetof( PhysicalDeviceVulkan12Properties, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceVulkan12Properties ) - offsetof( PhysicalDeviceVulkan12Properties, pNext ) ); return *this; } @@ -55551,161 +62421,180 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceVulkan12Properties& operator=( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Properties & operator=( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceVulkan12Properties const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVulkan12Properties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceVulkan12Properties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceVulkan12Properties const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVulkan12Properties const & ) const = default; #else - bool operator==( PhysicalDeviceVulkan12Properties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( driverID == rhs.driverID ) - && ( driverName == rhs.driverName ) - && ( driverInfo == rhs.driverInfo ) - && ( conformanceVersion == rhs.conformanceVersion ) - && ( denormBehaviorIndependence == rhs.denormBehaviorIndependence ) - && ( roundingModeIndependence == rhs.roundingModeIndependence ) - && ( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 ) - && ( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 ) - && ( shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64 ) - && ( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 ) - && ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 ) - && ( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 ) - && ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 ) - && ( shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32 ) - && ( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 ) - && ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 ) - && ( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 ) - && ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 ) - && ( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 ) - && ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 ) - && ( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 ) - && ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools ) - && ( shaderUniformBufferArrayNonUniformIndexingNative == rhs.shaderUniformBufferArrayNonUniformIndexingNative ) - && ( shaderSampledImageArrayNonUniformIndexingNative == rhs.shaderSampledImageArrayNonUniformIndexingNative ) - && ( shaderStorageBufferArrayNonUniformIndexingNative == rhs.shaderStorageBufferArrayNonUniformIndexingNative ) - && ( shaderStorageImageArrayNonUniformIndexingNative == rhs.shaderStorageImageArrayNonUniformIndexingNative ) - && ( shaderInputAttachmentArrayNonUniformIndexingNative == rhs.shaderInputAttachmentArrayNonUniformIndexingNative ) - && ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind ) - && ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod ) - && ( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers ) - && ( maxPerStageDescriptorUpdateAfterBindUniformBuffers == rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers ) - && ( maxPerStageDescriptorUpdateAfterBindStorageBuffers == rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers ) - && ( maxPerStageDescriptorUpdateAfterBindSampledImages == rhs.maxPerStageDescriptorUpdateAfterBindSampledImages ) - && ( maxPerStageDescriptorUpdateAfterBindStorageImages == rhs.maxPerStageDescriptorUpdateAfterBindStorageImages ) - && ( maxPerStageDescriptorUpdateAfterBindInputAttachments == rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments ) - && ( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources ) - && ( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers ) - && ( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers ) - && ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ) - && ( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers ) - && ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ) - && ( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages ) - && ( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages ) - && ( maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments ) - && ( supportedDepthResolveModes == rhs.supportedDepthResolveModes ) - && ( supportedStencilResolveModes == rhs.supportedStencilResolveModes ) - && ( independentResolveNone == rhs.independentResolveNone ) - && ( independentResolve == rhs.independentResolve ) - && ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats ) - && ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping ) - && ( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference ) - && ( framebufferIntegerColorSampleCounts == rhs.framebufferIntegerColorSampleCounts ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( driverID == rhs.driverID ) && + ( driverName == rhs.driverName ) && ( driverInfo == rhs.driverInfo ) && + ( conformanceVersion == rhs.conformanceVersion ) && + ( denormBehaviorIndependence == rhs.denormBehaviorIndependence ) && + ( roundingModeIndependence == rhs.roundingModeIndependence ) && + ( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 ) && + ( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 ) && + ( shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64 ) && + ( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 ) && + ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 ) && + ( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 ) && + ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 ) && + ( shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32 ) && + ( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 ) && + ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 ) && + ( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 ) && + ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 ) && + ( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 ) && + ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 ) && + ( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 ) && + ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools ) && + ( shaderUniformBufferArrayNonUniformIndexingNative == + rhs.shaderUniformBufferArrayNonUniformIndexingNative ) && + ( shaderSampledImageArrayNonUniformIndexingNative == + rhs.shaderSampledImageArrayNonUniformIndexingNative ) && + ( shaderStorageBufferArrayNonUniformIndexingNative == + rhs.shaderStorageBufferArrayNonUniformIndexingNative ) && + ( shaderStorageImageArrayNonUniformIndexingNative == + rhs.shaderStorageImageArrayNonUniformIndexingNative ) && + ( shaderInputAttachmentArrayNonUniformIndexingNative == + rhs.shaderInputAttachmentArrayNonUniformIndexingNative ) && + ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind ) && + ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod ) && + ( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers ) && + ( maxPerStageDescriptorUpdateAfterBindUniformBuffers == + rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers ) && + ( maxPerStageDescriptorUpdateAfterBindStorageBuffers == + rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers ) && + ( maxPerStageDescriptorUpdateAfterBindSampledImages == + rhs.maxPerStageDescriptorUpdateAfterBindSampledImages ) && + ( maxPerStageDescriptorUpdateAfterBindStorageImages == + rhs.maxPerStageDescriptorUpdateAfterBindStorageImages ) && + ( maxPerStageDescriptorUpdateAfterBindInputAttachments == + rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments ) && + ( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources ) && + ( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers ) && + ( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers ) && + ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == + rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ) && + ( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers ) && + ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == + rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ) && + ( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages ) && + ( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages ) && + ( maxDescriptorSetUpdateAfterBindInputAttachments == + rhs.maxDescriptorSetUpdateAfterBindInputAttachments ) && + ( supportedDepthResolveModes == rhs.supportedDepthResolveModes ) && + ( supportedStencilResolveModes == rhs.supportedStencilResolveModes ) && + ( independentResolveNone == rhs.independentResolveNone ) && + ( independentResolve == rhs.independentResolve ) && + ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats ) && + ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping ) && + ( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference ) && + ( framebufferIntegerColorSampleCounts == rhs.framebufferIntegerColorSampleCounts ); } - bool operator!=( PhysicalDeviceVulkan12Properties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Properties; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverName = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverInfo = {}; - VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion = {}; - VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; - VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; - VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64 = {}; - uint32_t maxUpdateAfterBindDescriptorsInAllPools = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative = {}; - VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind = {}; - VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod = {}; - uint32_t maxPerStageDescriptorUpdateAfterBindSamplers = {}; - uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers = {}; - uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers = {}; - uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages = {}; - uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages = {}; - uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments = {}; - uint32_t maxPerStageUpdateAfterBindResources = {}; - uint32_t maxDescriptorSetUpdateAfterBindSamplers = {}; - uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers = {}; - uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {}; - uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers = {}; - uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {}; - uint32_t maxDescriptorSetUpdateAfterBindSampledImages = {}; - uint32_t maxDescriptorSetUpdateAfterBindStorageImages = {}; - uint32_t maxDescriptorSetUpdateAfterBindInputAttachments = {}; - VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes = {}; - VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes = {}; - VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone = {}; - VULKAN_HPP_NAMESPACE::Bool32 independentResolve = {}; - VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats = {}; - VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {}; - uint64_t maxTimelineSemaphoreValueDifference = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Properties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverName = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverInfo = {}; + VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion = {}; + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence = + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence = + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64 = {}; + uint32_t maxUpdateAfterBindDescriptorsInAllPools = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments = {}; + uint32_t maxPerStageUpdateAfterBindResources = {}; + uint32_t maxDescriptorSetUpdateAfterBindSamplers = {}; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers = {}; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {}; + uint32_t maxDescriptorSetUpdateAfterBindSampledImages = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageImages = {}; + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments = {}; + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes = {}; + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes = {}; + VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone = {}; + VULKAN_HPP_NAMESPACE::Bool32 independentResolve = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {}; + uint64_t maxTimelineSemaphoreValueDifference = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts = {}; }; - static_assert( sizeof( PhysicalDeviceVulkan12Properties ) == sizeof( VkPhysicalDeviceVulkan12Properties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceVulkan12Properties ) == sizeof( VkPhysicalDeviceVulkan12Properties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceVulkanMemoryModelFeatures { - VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures( + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {} ) VULKAN_HPP_NOEXCEPT : vulkanMemoryModel( vulkanMemoryModel_ ) , vulkanMemoryModelDeviceScope( vulkanMemoryModelDeviceScope_ ) , vulkanMemoryModelAvailabilityVisibilityChains( vulkanMemoryModelAvailabilityVisibilityChains_ ) {} - PhysicalDeviceVulkanMemoryModelFeatures & operator=( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkanMemoryModelFeatures & + operator=( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceVulkanMemoryModelFeatures ) - offsetof( PhysicalDeviceVulkanMemoryModelFeatures, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceVulkanMemoryModelFeatures ) - + offsetof( PhysicalDeviceVulkanMemoryModelFeatures, pNext ) ); return *this; } @@ -55714,59 +62603,61 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PhysicalDeviceVulkanMemoryModelFeatures& operator=( VkPhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkanMemoryModelFeatures & + operator=( VkPhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceVulkanMemoryModelFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkanMemoryModelFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceVulkanMemoryModelFeatures & setVulkanMemoryModel( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkanMemoryModelFeatures & + setVulkanMemoryModel( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT { vulkanMemoryModel = vulkanMemoryModel_; return *this; } - PhysicalDeviceVulkanMemoryModelFeatures & setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkanMemoryModelFeatures & + setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT { vulkanMemoryModelDeviceScope = vulkanMemoryModelDeviceScope_; return *this; } - PhysicalDeviceVulkanMemoryModelFeatures & setVulkanMemoryModelAvailabilityVisibilityChains( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkanMemoryModelFeatures & setVulkanMemoryModelAvailabilityVisibilityChains( + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT { vulkanMemoryModelAvailabilityVisibilityChains = vulkanMemoryModelAvailabilityVisibilityChains_; return *this; } - operator VkPhysicalDeviceVulkanMemoryModelFeatures const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVulkanMemoryModelFeatures const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceVulkanMemoryModelFeatures &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceVulkanMemoryModelFeatures const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVulkanMemoryModelFeatures const & ) const = default; #else - bool operator==( PhysicalDeviceVulkanMemoryModelFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( vulkanMemoryModel == rhs.vulkanMemoryModel ) - && ( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope ) - && ( vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vulkanMemoryModel == rhs.vulkanMemoryModel ) && + ( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope ) && + ( vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains ); } - bool operator!=( PhysicalDeviceVulkanMemoryModelFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -55774,70 +62665,78 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkanMemoryModelFeatures; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel = {}; - VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope = {}; - VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains = {}; }; - static_assert( sizeof( PhysicalDeviceVulkanMemoryModelFeatures ) == sizeof( VkPhysicalDeviceVulkanMemoryModelFeatures ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceVulkanMemoryModelFeatures ) == + sizeof( VkPhysicalDeviceVulkanMemoryModelFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceYcbcrImageArraysFeaturesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ = {} ) VULKAN_HPP_NOEXCEPT - : ycbcrImageArrays( ycbcrImageArrays_ ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ = {} ) VULKAN_HPP_NOEXCEPT : ycbcrImageArrays( ycbcrImageArrays_ ) {} - PhysicalDeviceYcbcrImageArraysFeaturesEXT & operator=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceYcbcrImageArraysFeaturesEXT & + operator=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceYcbcrImageArraysFeaturesEXT ) - offsetof( PhysicalDeviceYcbcrImageArraysFeaturesEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PhysicalDeviceYcbcrImageArraysFeaturesEXT ) - + offsetof( PhysicalDeviceYcbcrImageArraysFeaturesEXT, pNext ) ); return *this; } - PhysicalDeviceYcbcrImageArraysFeaturesEXT( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceYcbcrImageArraysFeaturesEXT( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceYcbcrImageArraysFeaturesEXT& operator=( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceYcbcrImageArraysFeaturesEXT & + operator=( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceYcbcrImageArraysFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceYcbcrImageArraysFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceYcbcrImageArraysFeaturesEXT & setYcbcrImageArrays( VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceYcbcrImageArraysFeaturesEXT & + setYcbcrImageArrays( VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ ) VULKAN_HPP_NOEXCEPT { ycbcrImageArrays = ycbcrImageArrays_; return *this; } - operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceYcbcrImageArraysFeaturesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceYcbcrImageArraysFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( ycbcrImageArrays == rhs.ycbcrImageArrays ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ycbcrImageArrays == rhs.ycbcrImageArrays ); } - bool operator!=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -55845,17 +62744,20 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays = {}; }; - static_assert( sizeof( PhysicalDeviceYcbcrImageArraysFeaturesEXT ) == sizeof( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceYcbcrImageArraysFeaturesEXT ) == + sizeof( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PipelineCacheCreateInfo { - VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ = {}, - size_t initialDataSize_ = {}, - const void* pInitialData_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ = {}, + size_t initialDataSize_ = {}, + const void * pInitialData_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , initialDataSize( initialDataSize_ ) , pInitialData( pInitialData_ ) @@ -55872,13 +62774,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PipelineCacheCreateInfo& operator=( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineCacheCreateInfo & operator=( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineCacheCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineCacheCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -55896,124 +62798,130 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PipelineCacheCreateInfo & setPInitialData( const void* pInitialData_ ) VULKAN_HPP_NOEXCEPT + PipelineCacheCreateInfo & setPInitialData( const void * pInitialData_ ) VULKAN_HPP_NOEXCEPT { pInitialData = pInitialData_; return *this; } - operator VkPipelineCacheCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineCacheCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineCacheCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineCacheCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCacheCreateInfo const & ) const = default; #else - bool operator==( PipelineCacheCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineCacheCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( initialDataSize == rhs.initialDataSize ) - && ( pInitialData == rhs.pInitialData ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( initialDataSize == rhs.initialDataSize ) && ( pInitialData == rhs.pInitialData ); } - bool operator!=( PipelineCacheCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineCacheCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCacheCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags = {}; - size_t initialDataSize = {}; - const void* pInitialData = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCacheCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags = {}; + size_t initialDataSize = {}; + const void * pInitialData = {}; }; - static_assert( sizeof( PipelineCacheCreateInfo ) == sizeof( VkPipelineCacheCreateInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( PipelineCacheCreateInfo ) == sizeof( VkPipelineCacheCreateInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PipelineColorBlendAdvancedStateCreateInfoEXT { - VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ = {}, - VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT( + VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ = {}, + VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated ) + VULKAN_HPP_NOEXCEPT : srcPremultiplied( srcPremultiplied_ ) , dstPremultiplied( dstPremultiplied_ ) , blendOverlap( blendOverlap_ ) {} - PipelineColorBlendAdvancedStateCreateInfoEXT & operator=( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineColorBlendAdvancedStateCreateInfoEXT & + operator=( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineColorBlendAdvancedStateCreateInfoEXT ) - offsetof( PipelineColorBlendAdvancedStateCreateInfoEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PipelineColorBlendAdvancedStateCreateInfoEXT ) - + offsetof( PipelineColorBlendAdvancedStateCreateInfoEXT, pNext ) ); return *this; } - PipelineColorBlendAdvancedStateCreateInfoEXT( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineColorBlendAdvancedStateCreateInfoEXT( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PipelineColorBlendAdvancedStateCreateInfoEXT& operator=( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineColorBlendAdvancedStateCreateInfoEXT & + operator=( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineColorBlendAdvancedStateCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineColorBlendAdvancedStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineColorBlendAdvancedStateCreateInfoEXT & setSrcPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ ) VULKAN_HPP_NOEXCEPT + PipelineColorBlendAdvancedStateCreateInfoEXT & + setSrcPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ ) VULKAN_HPP_NOEXCEPT { srcPremultiplied = srcPremultiplied_; return *this; } - PipelineColorBlendAdvancedStateCreateInfoEXT & setDstPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ ) VULKAN_HPP_NOEXCEPT + PipelineColorBlendAdvancedStateCreateInfoEXT & + setDstPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ ) VULKAN_HPP_NOEXCEPT { dstPremultiplied = dstPremultiplied_; return *this; } - PipelineColorBlendAdvancedStateCreateInfoEXT & setBlendOverlap( VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ ) VULKAN_HPP_NOEXCEPT + PipelineColorBlendAdvancedStateCreateInfoEXT & + setBlendOverlap( VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ ) VULKAN_HPP_NOEXCEPT { blendOverlap = blendOverlap_; return *this; } - operator VkPipelineColorBlendAdvancedStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineColorBlendAdvancedStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineColorBlendAdvancedStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineColorBlendAdvancedStateCreateInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineColorBlendAdvancedStateCreateInfoEXT const & ) const = default; #else - bool operator==( PipelineColorBlendAdvancedStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( srcPremultiplied == rhs.srcPremultiplied ) - && ( dstPremultiplied == rhs.dstPremultiplied ) - && ( blendOverlap == rhs.blendOverlap ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcPremultiplied == rhs.srcPremultiplied ) && + ( dstPremultiplied == rhs.dstPremultiplied ) && ( blendOverlap == rhs.blendOverlap ); } - bool operator!=( PipelineColorBlendAdvancedStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -56021,23 +62929,31 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied = {}; - VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied = {}; - VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied = {}; + VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied = {}; + VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated; }; - static_assert( sizeof( PipelineColorBlendAdvancedStateCreateInfoEXT ) == sizeof( VkPipelineColorBlendAdvancedStateCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineColorBlendAdvancedStateCreateInfoEXT ) == + sizeof( VkPipelineColorBlendAdvancedStateCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PipelineCompilerControlCreateInfoAMD { - VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD( VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD( + VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ = {} ) VULKAN_HPP_NOEXCEPT : compilerControlFlags( compilerControlFlags_ ) {} - PipelineCompilerControlCreateInfoAMD & operator=( PipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineCompilerControlCreateInfoAMD & + operator=( PipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineCompilerControlCreateInfoAMD ) - offsetof( PipelineCompilerControlCreateInfoAMD, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PipelineCompilerControlCreateInfoAMD ) - + offsetof( PipelineCompilerControlCreateInfoAMD, pNext ) ); return *this; } @@ -56046,65 +62962,69 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PipelineCompilerControlCreateInfoAMD& operator=( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineCompilerControlCreateInfoAMD & + operator=( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineCompilerControlCreateInfoAMD & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineCompilerControlCreateInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineCompilerControlCreateInfoAMD & setCompilerControlFlags( VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ ) VULKAN_HPP_NOEXCEPT + PipelineCompilerControlCreateInfoAMD & setCompilerControlFlags( + VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ ) VULKAN_HPP_NOEXCEPT { compilerControlFlags = compilerControlFlags_; return *this; } - operator VkPipelineCompilerControlCreateInfoAMD const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineCompilerControlCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineCompilerControlCreateInfoAMD &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineCompilerControlCreateInfoAMD const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCompilerControlCreateInfoAMD const & ) const = default; #else - bool operator==( PipelineCompilerControlCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineCompilerControlCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( compilerControlFlags == rhs.compilerControlFlags ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( compilerControlFlags == rhs.compilerControlFlags ); } - bool operator!=( PipelineCompilerControlCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineCompilerControlCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCompilerControlCreateInfoAMD; - const void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCompilerControlCreateInfoAMD; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags = {}; }; - static_assert( sizeof( PipelineCompilerControlCreateInfoAMD ) == sizeof( VkPipelineCompilerControlCreateInfoAMD ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineCompilerControlCreateInfoAMD ) == sizeof( VkPipelineCompilerControlCreateInfoAMD ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PipelineCoverageModulationStateCreateInfoNV { - VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ = {}, - VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ = VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone, - VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ = {}, - uint32_t coverageModulationTableCount_ = {}, - const float* pCoverageModulationTable_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ = {}, + VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ = + VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone, + VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ = {}, + uint32_t coverageModulationTableCount_ = {}, + const float * pCoverageModulationTable_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , coverageModulationMode( coverageModulationMode_ ) , coverageModulationTableEnable( coverageModulationTableEnable_ ) @@ -56112,84 +63032,93 @@ namespace VULKAN_HPP_NAMESPACE , pCoverageModulationTable( pCoverageModulationTable_ ) {} - PipelineCoverageModulationStateCreateInfoNV & operator=( PipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineCoverageModulationStateCreateInfoNV & + operator=( PipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineCoverageModulationStateCreateInfoNV ) - offsetof( PipelineCoverageModulationStateCreateInfoNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PipelineCoverageModulationStateCreateInfoNV ) - + offsetof( PipelineCoverageModulationStateCreateInfoNV, pNext ) ); return *this; } - PipelineCoverageModulationStateCreateInfoNV( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineCoverageModulationStateCreateInfoNV( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PipelineCoverageModulationStateCreateInfoNV& operator=( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineCoverageModulationStateCreateInfoNV & + operator=( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineCoverageModulationStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineCoverageModulationStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineCoverageModulationStateCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + PipelineCoverageModulationStateCreateInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationMode( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ ) VULKAN_HPP_NOEXCEPT + PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationMode( + VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ ) VULKAN_HPP_NOEXCEPT { coverageModulationMode = coverageModulationMode_; return *this; } - PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationTableEnable( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ ) VULKAN_HPP_NOEXCEPT + PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationTableEnable( + VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ ) VULKAN_HPP_NOEXCEPT { coverageModulationTableEnable = coverageModulationTableEnable_; return *this; } - PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationTableCount( uint32_t coverageModulationTableCount_ ) VULKAN_HPP_NOEXCEPT + PipelineCoverageModulationStateCreateInfoNV & + setCoverageModulationTableCount( uint32_t coverageModulationTableCount_ ) VULKAN_HPP_NOEXCEPT { coverageModulationTableCount = coverageModulationTableCount_; return *this; } - PipelineCoverageModulationStateCreateInfoNV & setPCoverageModulationTable( const float* pCoverageModulationTable_ ) VULKAN_HPP_NOEXCEPT + PipelineCoverageModulationStateCreateInfoNV & + setPCoverageModulationTable( const float * pCoverageModulationTable_ ) VULKAN_HPP_NOEXCEPT { pCoverageModulationTable = pCoverageModulationTable_; return *this; } - operator VkPipelineCoverageModulationStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineCoverageModulationStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineCoverageModulationStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineCoverageModulationStateCreateInfoNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCoverageModulationStateCreateInfoNV const & ) const = default; #else - bool operator==( PipelineCoverageModulationStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineCoverageModulationStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( coverageModulationMode == rhs.coverageModulationMode ) - && ( coverageModulationTableEnable == rhs.coverageModulationTableEnable ) - && ( coverageModulationTableCount == rhs.coverageModulationTableCount ) - && ( pCoverageModulationTable == rhs.pCoverageModulationTable ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( coverageModulationMode == rhs.coverageModulationMode ) && + ( coverageModulationTableEnable == rhs.coverageModulationTableEnable ) && + ( coverageModulationTableCount == rhs.coverageModulationTableCount ) && + ( pCoverageModulationTable == rhs.pCoverageModulationTable ); } - bool operator!=( PipelineCoverageModulationStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineCoverageModulationStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -56197,81 +63126,93 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageModulationStateCreateInfoNV; - const void* pNext = {}; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags = {}; - VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode = VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone; + VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode = + VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone; VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable = {}; - uint32_t coverageModulationTableCount = {}; - const float* pCoverageModulationTable = {}; + uint32_t coverageModulationTableCount = {}; + const float * pCoverageModulationTable = {}; }; - static_assert( sizeof( PipelineCoverageModulationStateCreateInfoNV ) == sizeof( VkPipelineCoverageModulationStateCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineCoverageModulationStateCreateInfoNV ) == + sizeof( VkPipelineCoverageModulationStateCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PipelineCoverageReductionStateCreateInfoNV { - VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ = {}, - VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ = {}, + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , coverageReductionMode( coverageReductionMode_ ) {} - PipelineCoverageReductionStateCreateInfoNV & operator=( PipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineCoverageReductionStateCreateInfoNV & + operator=( PipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineCoverageReductionStateCreateInfoNV ) - offsetof( PipelineCoverageReductionStateCreateInfoNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PipelineCoverageReductionStateCreateInfoNV ) - + offsetof( PipelineCoverageReductionStateCreateInfoNV, pNext ) ); return *this; } - PipelineCoverageReductionStateCreateInfoNV( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineCoverageReductionStateCreateInfoNV( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PipelineCoverageReductionStateCreateInfoNV& operator=( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineCoverageReductionStateCreateInfoNV & + operator=( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineCoverageReductionStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineCoverageReductionStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineCoverageReductionStateCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + PipelineCoverageReductionStateCreateInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - PipelineCoverageReductionStateCreateInfoNV & setCoverageReductionMode( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT + PipelineCoverageReductionStateCreateInfoNV & setCoverageReductionMode( + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT { coverageReductionMode = coverageReductionMode_; return *this; } - operator VkPipelineCoverageReductionStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineCoverageReductionStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineCoverageReductionStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineCoverageReductionStateCreateInfoNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCoverageReductionStateCreateInfoNV const & ) const = default; #else - bool operator==( PipelineCoverageReductionStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineCoverageReductionStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( coverageReductionMode == rhs.coverageReductionMode ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( coverageReductionMode == rhs.coverageReductionMode ); } - bool operator!=( PipelineCoverageReductionStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineCoverageReductionStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -56279,87 +63220,99 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageReductionStateCreateInfoNV; - const void* pNext = {}; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags = {}; - VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge; + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode = + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge; }; - static_assert( sizeof( PipelineCoverageReductionStateCreateInfoNV ) == sizeof( VkPipelineCoverageReductionStateCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineCoverageReductionStateCreateInfoNV ) == + sizeof( VkPipelineCoverageReductionStateCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PipelineCoverageToColorStateCreateInfoNV { - VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ = {}, - uint32_t coverageToColorLocation_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ = {}, + uint32_t coverageToColorLocation_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , coverageToColorEnable( coverageToColorEnable_ ) , coverageToColorLocation( coverageToColorLocation_ ) {} - PipelineCoverageToColorStateCreateInfoNV & operator=( PipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineCoverageToColorStateCreateInfoNV & + operator=( PipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineCoverageToColorStateCreateInfoNV ) - offsetof( PipelineCoverageToColorStateCreateInfoNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PipelineCoverageToColorStateCreateInfoNV ) - + offsetof( PipelineCoverageToColorStateCreateInfoNV, pNext ) ); return *this; } - PipelineCoverageToColorStateCreateInfoNV( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineCoverageToColorStateCreateInfoNV( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PipelineCoverageToColorStateCreateInfoNV& operator=( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineCoverageToColorStateCreateInfoNV & + operator=( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineCoverageToColorStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineCoverageToColorStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineCoverageToColorStateCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + PipelineCoverageToColorStateCreateInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - PipelineCoverageToColorStateCreateInfoNV & setCoverageToColorEnable( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ ) VULKAN_HPP_NOEXCEPT + PipelineCoverageToColorStateCreateInfoNV & + setCoverageToColorEnable( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ ) VULKAN_HPP_NOEXCEPT { coverageToColorEnable = coverageToColorEnable_; return *this; } - PipelineCoverageToColorStateCreateInfoNV & setCoverageToColorLocation( uint32_t coverageToColorLocation_ ) VULKAN_HPP_NOEXCEPT + PipelineCoverageToColorStateCreateInfoNV & + setCoverageToColorLocation( uint32_t coverageToColorLocation_ ) VULKAN_HPP_NOEXCEPT { coverageToColorLocation = coverageToColorLocation_; return *this; } - operator VkPipelineCoverageToColorStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineCoverageToColorStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineCoverageToColorStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineCoverageToColorStateCreateInfoNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCoverageToColorStateCreateInfoNV const & ) const = default; #else - bool operator==( PipelineCoverageToColorStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineCoverageToColorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( coverageToColorEnable == rhs.coverageToColorEnable ) - && ( coverageToColorLocation == rhs.coverageToColorLocation ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( coverageToColorEnable == rhs.coverageToColorEnable ) && + ( coverageToColorLocation == rhs.coverageToColorLocation ); } - bool operator!=( PipelineCoverageToColorStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineCoverageToColorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -56367,18 +63320,22 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageToColorStateCreateInfoNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags = {}; - VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable = {}; - uint32_t coverageToColorLocation = {}; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags = {}; + VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable = {}; + uint32_t coverageToColorLocation = {}; }; - static_assert( sizeof( PipelineCoverageToColorStateCreateInfoNV ) == sizeof( VkPipelineCoverageToColorStateCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineCoverageToColorStateCreateInfoNV ) == + sizeof( VkPipelineCoverageToColorStateCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PipelineCreationFeedbackEXT { - VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackEXT( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlagsEXT flags_ = {}, - uint64_t duration_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + PipelineCreationFeedbackEXT( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlagsEXT flags_ = {}, + uint64_t duration_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , duration( duration_ ) {} @@ -56388,57 +63345,63 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PipelineCreationFeedbackEXT& operator=( VkPipelineCreationFeedbackEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineCreationFeedbackEXT & operator=( VkPipelineCreationFeedbackEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPipelineCreationFeedbackEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineCreationFeedbackEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineCreationFeedbackEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineCreationFeedbackEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCreationFeedbackEXT const & ) const = default; #else - bool operator==( PipelineCreationFeedbackEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineCreationFeedbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( flags == rhs.flags ) - && ( duration == rhs.duration ); + return ( flags == rhs.flags ) && ( duration == rhs.duration ); } - bool operator!=( PipelineCreationFeedbackEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineCreationFeedbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlagsEXT flags = {}; - uint64_t duration = {}; + VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlagsEXT flags = {}; + uint64_t duration = {}; }; - static_assert( sizeof( PipelineCreationFeedbackEXT ) == sizeof( VkPipelineCreationFeedbackEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineCreationFeedbackEXT ) == sizeof( VkPipelineCreationFeedbackEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PipelineCreationFeedbackCreateInfoEXT { - VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineCreationFeedback_ = {}, - uint32_t pipelineStageCreationFeedbackCount_ = {}, - VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineStageCreationFeedbacks_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfoEXT( + VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT * pPipelineCreationFeedback_ = {}, + uint32_t pipelineStageCreationFeedbackCount_ = {}, + VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT * pPipelineStageCreationFeedbacks_ = {} ) VULKAN_HPP_NOEXCEPT : pPipelineCreationFeedback( pPipelineCreationFeedback_ ) , pipelineStageCreationFeedbackCount( pipelineStageCreationFeedbackCount_ ) , pPipelineStageCreationFeedbacks( pPipelineStageCreationFeedbacks_ ) {} - PipelineCreationFeedbackCreateInfoEXT & operator=( PipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineCreationFeedbackCreateInfoEXT & + operator=( PipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineCreationFeedbackCreateInfoEXT ) - offsetof( PipelineCreationFeedbackCreateInfoEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PipelineCreationFeedbackCreateInfoEXT ) - + offsetof( PipelineCreationFeedbackCreateInfoEXT, pNext ) ); return *this; } @@ -56447,157 +63410,171 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PipelineCreationFeedbackCreateInfoEXT& operator=( VkPipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineCreationFeedbackCreateInfoEXT & + operator=( VkPipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineCreationFeedbackCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineCreationFeedbackCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineCreationFeedbackCreateInfoEXT & setPPipelineCreationFeedback( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineCreationFeedback_ ) VULKAN_HPP_NOEXCEPT + PipelineCreationFeedbackCreateInfoEXT & setPPipelineCreationFeedback( + VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT * pPipelineCreationFeedback_ ) VULKAN_HPP_NOEXCEPT { pPipelineCreationFeedback = pPipelineCreationFeedback_; return *this; } - PipelineCreationFeedbackCreateInfoEXT & setPipelineStageCreationFeedbackCount( uint32_t pipelineStageCreationFeedbackCount_ ) VULKAN_HPP_NOEXCEPT + PipelineCreationFeedbackCreateInfoEXT & + setPipelineStageCreationFeedbackCount( uint32_t pipelineStageCreationFeedbackCount_ ) VULKAN_HPP_NOEXCEPT { pipelineStageCreationFeedbackCount = pipelineStageCreationFeedbackCount_; return *this; } - PipelineCreationFeedbackCreateInfoEXT & setPPipelineStageCreationFeedbacks( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineStageCreationFeedbacks_ ) VULKAN_HPP_NOEXCEPT + PipelineCreationFeedbackCreateInfoEXT & setPPipelineStageCreationFeedbacks( + VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT * pPipelineStageCreationFeedbacks_ ) VULKAN_HPP_NOEXCEPT { pPipelineStageCreationFeedbacks = pPipelineStageCreationFeedbacks_; return *this; } - operator VkPipelineCreationFeedbackCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineCreationFeedbackCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineCreationFeedbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineCreationFeedbackCreateInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCreationFeedbackCreateInfoEXT const & ) const = default; #else - bool operator==( PipelineCreationFeedbackCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineCreationFeedbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pPipelineCreationFeedback == rhs.pPipelineCreationFeedback ) - && ( pipelineStageCreationFeedbackCount == rhs.pipelineStageCreationFeedbackCount ) - && ( pPipelineStageCreationFeedbacks == rhs.pPipelineStageCreationFeedbacks ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( pPipelineCreationFeedback == rhs.pPipelineCreationFeedback ) && + ( pipelineStageCreationFeedbackCount == rhs.pipelineStageCreationFeedbackCount ) && + ( pPipelineStageCreationFeedbacks == rhs.pPipelineStageCreationFeedbacks ); } - bool operator!=( PipelineCreationFeedbackCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineCreationFeedbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCreationFeedbackCreateInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineCreationFeedback = {}; - uint32_t pipelineStageCreationFeedbackCount = {}; - VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineStageCreationFeedbacks = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCreationFeedbackCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT * pPipelineCreationFeedback = {}; + uint32_t pipelineStageCreationFeedbackCount = {}; + VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT * pPipelineStageCreationFeedbacks = {}; }; - static_assert( sizeof( PipelineCreationFeedbackCreateInfoEXT ) == sizeof( VkPipelineCreationFeedbackCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineCreationFeedbackCreateInfoEXT ) == sizeof( VkPipelineCreationFeedbackCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PipelineDiscardRectangleStateCreateInfoEXT { - VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ = {}, - VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ = VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive, - uint32_t discardRectangleCount_ = {}, - const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT( + VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ = + VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive, + uint32_t discardRectangleCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , discardRectangleMode( discardRectangleMode_ ) , discardRectangleCount( discardRectangleCount_ ) , pDiscardRectangles( pDiscardRectangles_ ) {} - PipelineDiscardRectangleStateCreateInfoEXT & operator=( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineDiscardRectangleStateCreateInfoEXT & + operator=( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineDiscardRectangleStateCreateInfoEXT ) - offsetof( PipelineDiscardRectangleStateCreateInfoEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PipelineDiscardRectangleStateCreateInfoEXT ) - + offsetof( PipelineDiscardRectangleStateCreateInfoEXT, pNext ) ); return *this; } - PipelineDiscardRectangleStateCreateInfoEXT( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineDiscardRectangleStateCreateInfoEXT( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PipelineDiscardRectangleStateCreateInfoEXT& operator=( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineDiscardRectangleStateCreateInfoEXT & + operator=( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineDiscardRectangleStateCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineDiscardRectangleStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineDiscardRectangleStateCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + PipelineDiscardRectangleStateCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - PipelineDiscardRectangleStateCreateInfoEXT & setDiscardRectangleMode( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ ) VULKAN_HPP_NOEXCEPT + PipelineDiscardRectangleStateCreateInfoEXT & + setDiscardRectangleMode( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ ) VULKAN_HPP_NOEXCEPT { discardRectangleMode = discardRectangleMode_; return *this; } - PipelineDiscardRectangleStateCreateInfoEXT & setDiscardRectangleCount( uint32_t discardRectangleCount_ ) VULKAN_HPP_NOEXCEPT + PipelineDiscardRectangleStateCreateInfoEXT & + setDiscardRectangleCount( uint32_t discardRectangleCount_ ) VULKAN_HPP_NOEXCEPT { discardRectangleCount = discardRectangleCount_; return *this; } - PipelineDiscardRectangleStateCreateInfoEXT & setPDiscardRectangles( const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles_ ) VULKAN_HPP_NOEXCEPT + PipelineDiscardRectangleStateCreateInfoEXT & + setPDiscardRectangles( const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles_ ) VULKAN_HPP_NOEXCEPT { pDiscardRectangles = pDiscardRectangles_; return *this; } - operator VkPipelineDiscardRectangleStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineDiscardRectangleStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineDiscardRectangleStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineDiscardRectangleStateCreateInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineDiscardRectangleStateCreateInfoEXT const & ) const = default; #else - bool operator==( PipelineDiscardRectangleStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( discardRectangleMode == rhs.discardRectangleMode ) - && ( discardRectangleCount == rhs.discardRectangleCount ) - && ( pDiscardRectangles == rhs.pDiscardRectangles ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( discardRectangleMode == rhs.discardRectangleMode ) && + ( discardRectangleCount == rhs.discardRectangleCount ) && ( pDiscardRectangles == rhs.pDiscardRectangles ); } - bool operator!=( PipelineDiscardRectangleStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -56605,19 +63582,23 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT; - const void* pNext = {}; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags = {}; - VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode = VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive; - uint32_t discardRectangleCount = {}; - const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles = {}; + VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode = + VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive; + uint32_t discardRectangleCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles = {}; }; - static_assert( sizeof( PipelineDiscardRectangleStateCreateInfoEXT ) == sizeof( VkPipelineDiscardRectangleStateCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineDiscardRectangleStateCreateInfoEXT ) == + sizeof( VkPipelineDiscardRectangleStateCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PipelineExecutableInfoKHR { VULKAN_HPP_CONSTEXPR PipelineExecutableInfoKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, - uint32_t executableIndex_ = {} ) VULKAN_HPP_NOEXCEPT + uint32_t executableIndex_ = {} ) VULKAN_HPP_NOEXCEPT : pipeline( pipeline_ ) , executableIndex( executableIndex_ ) {} @@ -56633,13 +63614,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PipelineExecutableInfoKHR& operator=( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineExecutableInfoKHR & operator=( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineExecutableInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineExecutableInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -56657,49 +63638,50 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkPipelineExecutableInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineExecutableInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineExecutableInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineExecutableInfoKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineExecutableInfoKHR const & ) const = default; #else - bool operator==( PipelineExecutableInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineExecutableInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pipeline == rhs.pipeline ) - && ( executableIndex == rhs.executableIndex ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipeline == rhs.pipeline ) && + ( executableIndex == rhs.executableIndex ); } - bool operator!=( PipelineExecutableInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineExecutableInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; - uint32_t executableIndex = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + uint32_t executableIndex = {}; }; - static_assert( sizeof( PipelineExecutableInfoKHR ) == sizeof( VkPipelineExecutableInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineExecutableInfoKHR ) == sizeof( VkPipelineExecutableInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PipelineExecutableInternalRepresentationKHR { - VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInternalRepresentationKHR( std::array const& name_ = {}, - std::array const& description_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 isText_ = {}, - size_t dataSize_ = {}, - void* pData_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 + PipelineExecutableInternalRepresentationKHR( std::array const & name_ = {}, + std::array const & description_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 isText_ = {}, + size_t dataSize_ = {}, + void * pData_ = {} ) VULKAN_HPP_NOEXCEPT : name( name_ ) , description( description_ ) , isText( isText_ ) @@ -56707,48 +63689,50 @@ namespace VULKAN_HPP_NAMESPACE , pData( pData_ ) {} - PipelineExecutableInternalRepresentationKHR & operator=( PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineExecutableInternalRepresentationKHR & + operator=( PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineExecutableInternalRepresentationKHR ) - offsetof( PipelineExecutableInternalRepresentationKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PipelineExecutableInternalRepresentationKHR ) - + offsetof( PipelineExecutableInternalRepresentationKHR, pNext ) ); return *this; } - PipelineExecutableInternalRepresentationKHR( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineExecutableInternalRepresentationKHR( VkPipelineExecutableInternalRepresentationKHR const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PipelineExecutableInternalRepresentationKHR& operator=( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineExecutableInternalRepresentationKHR & + operator=( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPipelineExecutableInternalRepresentationKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineExecutableInternalRepresentationKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineExecutableInternalRepresentationKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineExecutableInternalRepresentationKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineExecutableInternalRepresentationKHR const & ) const = default; #else - bool operator==( PipelineExecutableInternalRepresentationKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineExecutableInternalRepresentationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( name == rhs.name ) - && ( description == rhs.description ) - && ( isText == rhs.isText ) - && ( dataSize == rhs.dataSize ) - && ( pData == rhs.pData ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( name == rhs.name ) && + ( description == rhs.description ) && ( isText == rhs.isText ) && ( dataSize == rhs.dataSize ) && + ( pData == rhs.pData ); } - bool operator!=( PipelineExecutableInternalRepresentationKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineExecutableInternalRepresentationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -56756,22 +63740,26 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInternalRepresentationKHR; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; - VULKAN_HPP_NAMESPACE::Bool32 isText = {}; - size_t dataSize = {}; - void* pData = {}; + VULKAN_HPP_NAMESPACE::Bool32 isText = {}; + size_t dataSize = {}; + void * pData = {}; }; - static_assert( sizeof( PipelineExecutableInternalRepresentationKHR ) == sizeof( VkPipelineExecutableInternalRepresentationKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineExecutableInternalRepresentationKHR ) == + sizeof( VkPipelineExecutableInternalRepresentationKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PipelineExecutablePropertiesKHR { - VULKAN_HPP_CONSTEXPR_14 PipelineExecutablePropertiesKHR( VULKAN_HPP_NAMESPACE::ShaderStageFlags stages_ = {}, - std::array const& name_ = {}, - std::array const& description_ = {}, - uint32_t subgroupSize_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 + PipelineExecutablePropertiesKHR( VULKAN_HPP_NAMESPACE::ShaderStageFlags stages_ = {}, + std::array const & name_ = {}, + std::array const & description_ = {}, + uint32_t subgroupSize_ = {} ) VULKAN_HPP_NOEXCEPT : stages( stages_ ) , name( name_ ) , description( description_ ) @@ -56780,7 +63768,9 @@ namespace VULKAN_HPP_NAMESPACE PipelineExecutablePropertiesKHR & operator=( PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineExecutablePropertiesKHR ) - offsetof( PipelineExecutablePropertiesKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PipelineExecutablePropertiesKHR ) - offsetof( PipelineExecutablePropertiesKHR, pNext ) ); return *this; } @@ -56789,74 +63779,65 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PipelineExecutablePropertiesKHR& operator=( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineExecutablePropertiesKHR & operator=( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPipelineExecutablePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineExecutablePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineExecutablePropertiesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineExecutablePropertiesKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineExecutablePropertiesKHR const & ) const = default; #else - bool operator==( PipelineExecutablePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineExecutablePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( stages == rhs.stages ) - && ( name == rhs.name ) - && ( description == rhs.description ) - && ( subgroupSize == rhs.subgroupSize ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stages == rhs.stages ) && ( name == rhs.name ) && + ( description == rhs.description ) && ( subgroupSize == rhs.subgroupSize ); } - bool operator!=( PipelineExecutablePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineExecutablePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutablePropertiesKHR; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::ShaderStageFlags stages = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; - uint32_t subgroupSize = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutablePropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags stages = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + uint32_t subgroupSize = {}; }; - static_assert( sizeof( PipelineExecutablePropertiesKHR ) == sizeof( VkPipelineExecutablePropertiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineExecutablePropertiesKHR ) == sizeof( VkPipelineExecutablePropertiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); union PipelineExecutableStatisticValueKHR { - PipelineExecutableStatisticValueKHR( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR const& rhs ) VULKAN_HPP_NOEXCEPT + PipelineExecutableStatisticValueKHR( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR const & rhs ) + VULKAN_HPP_NOEXCEPT { - memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR ) ); + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR ) ); } - PipelineExecutableStatisticValueKHR( VULKAN_HPP_NAMESPACE::Bool32 b32_ = {} ) - : b32( b32_ ) - {} + PipelineExecutableStatisticValueKHR( VULKAN_HPP_NAMESPACE::Bool32 b32_ = {} ) : b32( b32_ ) {} - PipelineExecutableStatisticValueKHR( int64_t i64_ ) - : i64( i64_ ) - {} + PipelineExecutableStatisticValueKHR( int64_t i64_ ) : i64( i64_ ) {} - PipelineExecutableStatisticValueKHR( uint64_t u64_ ) - : u64( u64_ ) - {} + PipelineExecutableStatisticValueKHR( uint64_t u64_ ) : u64( u64_ ) {} - PipelineExecutableStatisticValueKHR( double f64_ ) - : f64( f64_ ) - {} + PipelineExecutableStatisticValueKHR( double f64_ ) : f64( f64_ ) {} PipelineExecutableStatisticValueKHR & setB32( VULKAN_HPP_NAMESPACE::Bool32 b32_ ) VULKAN_HPP_NOEXCEPT { @@ -56882,41 +63863,44 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR & operator=( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR & + operator=( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR ) ); + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR ) ); return *this; } - operator VkPipelineExecutableStatisticValueKHR const&() const + operator VkPipelineExecutableStatisticValueKHR const &() const { - return *reinterpret_cast(this); + return *reinterpret_cast( this ); } operator VkPipelineExecutableStatisticValueKHR &() { - return *reinterpret_cast(this); + return *reinterpret_cast( this ); } #ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS VULKAN_HPP_NAMESPACE::Bool32 b32; - int64_t i64; - uint64_t u64; - double f64; + int64_t i64; + uint64_t u64; + double f64; #else VkBool32 b32; - int64_t i64; + int64_t i64; uint64_t u64; - double f64; -#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + double f64; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ }; struct PipelineExecutableStatisticKHR { - PipelineExecutableStatisticKHR( std::array const& name_ = {}, - std::array const& description_ = {}, - VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format_ = VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32, - VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value_ = {} ) VULKAN_HPP_NOEXCEPT + PipelineExecutableStatisticKHR( std::array const & name_ = {}, + std::array const & description_ = {}, + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format_ = + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32, + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value_ = {} ) + VULKAN_HPP_NOEXCEPT : name( name_ ) , description( description_ ) , format( format_ ) @@ -56925,7 +63909,9 @@ namespace VULKAN_HPP_NAMESPACE PipelineExecutableStatisticKHR & operator=( PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineExecutableStatisticKHR ) - offsetof( PipelineExecutableStatisticKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PipelineExecutableStatisticKHR ) - offsetof( PipelineExecutableStatisticKHR, pNext ) ); return *this; } @@ -56934,32 +63920,35 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PipelineExecutableStatisticKHR& operator=( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineExecutableStatisticKHR & operator=( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPipelineExecutableStatisticKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineExecutableStatisticKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineExecutableStatisticKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableStatisticKHR; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; - VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format = VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32; + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format = + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32; VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value = {}; }; - static_assert( sizeof( PipelineExecutableStatisticKHR ) == sizeof( VkPipelineExecutableStatisticKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineExecutableStatisticKHR ) == sizeof( VkPipelineExecutableStatisticKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PipelineInfoKHR { @@ -56978,13 +63967,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PipelineInfoKHR& operator=( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineInfoKHR & operator=( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -56996,36 +63985,34 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkPipelineInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineInfoKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineInfoKHR const & ) const = default; #else - bool operator==( PipelineInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pipeline == rhs.pipeline ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipeline == rhs.pipeline ); } - bool operator!=( PipelineInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; }; static_assert( sizeof( PipelineInfoKHR ) == sizeof( VkPipelineInfoKHR ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); @@ -57033,8 +64020,8 @@ namespace VULKAN_HPP_NAMESPACE struct PushConstantRange { VULKAN_HPP_CONSTEXPR PushConstantRange( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, - uint32_t offset_ = {}, - uint32_t size_ = {} ) VULKAN_HPP_NOEXCEPT + uint32_t offset_ = {}, + uint32_t size_ = {} ) VULKAN_HPP_NOEXCEPT : stageFlags( stageFlags_ ) , offset( offset_ ) , size( size_ ) @@ -57045,9 +64032,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PushConstantRange& operator=( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT + PushConstantRange & operator=( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -57069,27 +64056,25 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkPushConstantRange const&() const VULKAN_HPP_NOEXCEPT + operator VkPushConstantRange const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPushConstantRange &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PushConstantRange const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PushConstantRange const & ) const = default; #else - bool operator==( PushConstantRange const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PushConstantRange const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( stageFlags == rhs.stageFlags ) - && ( offset == rhs.offset ) - && ( size == rhs.size ); + return ( stageFlags == rhs.stageFlags ) && ( offset == rhs.offset ) && ( size == rhs.size ); } - bool operator!=( PushConstantRange const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PushConstantRange const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -57097,19 +64082,21 @@ namespace VULKAN_HPP_NAMESPACE public: VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; - uint32_t offset = {}; - uint32_t size = {}; + uint32_t offset = {}; + uint32_t size = {}; }; - static_assert( sizeof( PushConstantRange ) == sizeof( VkPushConstantRange ), "struct and wrapper have different size!" ); + static_assert( sizeof( PushConstantRange ) == sizeof( VkPushConstantRange ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PipelineLayoutCreateInfo { - VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ = {}, - uint32_t setLayoutCount_ = {}, - const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts_ = {}, - uint32_t pushConstantRangeCount_ = {}, - const VULKAN_HPP_NAMESPACE::PushConstantRange* pPushConstantRanges_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ = {}, + uint32_t setLayoutCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {}, + uint32_t pushConstantRangeCount_ = {}, + const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , setLayoutCount( setLayoutCount_ ) , pSetLayouts( pSetLayouts_ ) @@ -57128,13 +64115,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PipelineLayoutCreateInfo& operator=( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineLayoutCreateInfo & operator=( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineLayoutCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineLayoutCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -57152,7 +64139,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PipelineLayoutCreateInfo & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts_ ) VULKAN_HPP_NOEXCEPT + PipelineLayoutCreateInfo & + setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT { pSetLayouts = pSetLayouts_; return *this; @@ -57164,66 +64152,67 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PipelineLayoutCreateInfo & setPPushConstantRanges( const VULKAN_HPP_NAMESPACE::PushConstantRange* pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT + PipelineLayoutCreateInfo & + setPPushConstantRanges( const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT { pPushConstantRanges = pPushConstantRanges_; return *this; } - operator VkPipelineLayoutCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineLayoutCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineLayoutCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineLayoutCreateInfo const & ) const = default; #else - bool operator==( PipelineLayoutCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( setLayoutCount == rhs.setLayoutCount ) - && ( pSetLayouts == rhs.pSetLayouts ) - && ( pushConstantRangeCount == rhs.pushConstantRangeCount ) - && ( pPushConstantRanges == rhs.pPushConstantRanges ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( setLayoutCount == rhs.setLayoutCount ) && ( pSetLayouts == rhs.pSetLayouts ) && + ( pushConstantRangeCount == rhs.pushConstantRangeCount ) && + ( pPushConstantRanges == rhs.pPushConstantRanges ); } - bool operator!=( PipelineLayoutCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLayoutCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags = {}; - uint32_t setLayoutCount = {}; - const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts = {}; - uint32_t pushConstantRangeCount = {}; - const VULKAN_HPP_NAMESPACE::PushConstantRange* pPushConstantRanges = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLayoutCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags = {}; + uint32_t setLayoutCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts = {}; + uint32_t pushConstantRangeCount = {}; + const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges = {}; }; - static_assert( sizeof( PipelineLayoutCreateInfo ) == sizeof( VkPipelineLayoutCreateInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( PipelineLayoutCreateInfo ) == sizeof( VkPipelineLayoutCreateInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); #ifdef VK_ENABLE_BETA_EXTENSIONS struct PipelineLibraryCreateInfoKHR { - VULKAN_HPP_CONSTEXPR PipelineLibraryCreateInfoKHR( uint32_t libraryCount_ = {}, - const VULKAN_HPP_NAMESPACE::Pipeline* pLibraries_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + PipelineLibraryCreateInfoKHR( uint32_t libraryCount_ = {}, + const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries_ = {} ) VULKAN_HPP_NOEXCEPT : libraryCount( libraryCount_ ) , pLibraries( pLibraries_ ) {} PipelineLibraryCreateInfoKHR & operator=( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineLibraryCreateInfoKHR ) - offsetof( PipelineLibraryCreateInfoKHR, pNext ) ); + memcpy( + &pNext, &rhs.pNext, sizeof( PipelineLibraryCreateInfoKHR ) - offsetof( PipelineLibraryCreateInfoKHR, pNext ) ); return *this; } @@ -57232,13 +64221,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PipelineLibraryCreateInfoKHR& operator=( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineLibraryCreateInfoKHR & operator=( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineLibraryCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineLibraryCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -57250,203 +64239,226 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PipelineLibraryCreateInfoKHR & setPLibraries( const VULKAN_HPP_NAMESPACE::Pipeline* pLibraries_ ) VULKAN_HPP_NOEXCEPT + PipelineLibraryCreateInfoKHR & + setPLibraries( const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries_ ) VULKAN_HPP_NOEXCEPT { pLibraries = pLibraries_; return *this; } - operator VkPipelineLibraryCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineLibraryCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineLibraryCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineLibraryCreateInfoKHR const& ) const = default; -#else - bool operator==( PipelineLibraryCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineLibraryCreateInfoKHR const & ) const = default; +# else + bool operator==( PipelineLibraryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( libraryCount == rhs.libraryCount ) - && ( pLibraries == rhs.pLibraries ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( libraryCount == rhs.libraryCount ) && + ( pLibraries == rhs.pLibraries ); } - bool operator!=( PipelineLibraryCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineLibraryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLibraryCreateInfoKHR; - const void* pNext = {}; - uint32_t libraryCount = {}; - const VULKAN_HPP_NAMESPACE::Pipeline* pLibraries = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLibraryCreateInfoKHR; + const void * pNext = {}; + uint32_t libraryCount = {}; + const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries = {}; }; - static_assert( sizeof( PipelineLibraryCreateInfoKHR ) == sizeof( VkPipelineLibraryCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineLibraryCreateInfoKHR ) == sizeof( VkPipelineLibraryCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct PipelineRasterizationConservativeStateCreateInfoEXT { - VULKAN_HPP_CONSTEXPR PipelineRasterizationConservativeStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ = {}, - VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ = VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled, - float extraPrimitiveOverestimationSize_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineRasterizationConservativeStateCreateInfoEXT( + VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ = + VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled, + float extraPrimitiveOverestimationSize_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , conservativeRasterizationMode( conservativeRasterizationMode_ ) , extraPrimitiveOverestimationSize( extraPrimitiveOverestimationSize_ ) {} - PipelineRasterizationConservativeStateCreateInfoEXT & operator=( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationConservativeStateCreateInfoEXT & + operator=( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineRasterizationConservativeStateCreateInfoEXT ) - offsetof( PipelineRasterizationConservativeStateCreateInfoEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PipelineRasterizationConservativeStateCreateInfoEXT ) - + offsetof( PipelineRasterizationConservativeStateCreateInfoEXT, pNext ) ); return *this; } - PipelineRasterizationConservativeStateCreateInfoEXT( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationConservativeStateCreateInfoEXT( + VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - PipelineRasterizationConservativeStateCreateInfoEXT& operator=( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationConservativeStateCreateInfoEXT & + operator=( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = + *reinterpret_cast( &rhs ); return *this; } - PipelineRasterizationConservativeStateCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationConservativeStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineRasterizationConservativeStateCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationConservativeStateCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - PipelineRasterizationConservativeStateCreateInfoEXT & setConservativeRasterizationMode( VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationConservativeStateCreateInfoEXT & setConservativeRasterizationMode( + VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ ) VULKAN_HPP_NOEXCEPT { conservativeRasterizationMode = conservativeRasterizationMode_; return *this; } - PipelineRasterizationConservativeStateCreateInfoEXT & setExtraPrimitiveOverestimationSize( float extraPrimitiveOverestimationSize_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationConservativeStateCreateInfoEXT & + setExtraPrimitiveOverestimationSize( float extraPrimitiveOverestimationSize_ ) VULKAN_HPP_NOEXCEPT { extraPrimitiveOverestimationSize = extraPrimitiveOverestimationSize_; return *this; } - operator VkPipelineRasterizationConservativeStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineRasterizationConservativeStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineRasterizationConservativeStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineRasterizationConservativeStateCreateInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRasterizationConservativeStateCreateInfoEXT const & ) const = default; #else - bool operator==( PipelineRasterizationConservativeStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( conservativeRasterizationMode == rhs.conservativeRasterizationMode ) - && ( extraPrimitiveOverestimationSize == rhs.extraPrimitiveOverestimationSize ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( conservativeRasterizationMode == rhs.conservativeRasterizationMode ) && + ( extraPrimitiveOverestimationSize == rhs.extraPrimitiveOverestimationSize ); } - bool operator!=( PipelineRasterizationConservativeStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT; - const void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = + StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags = {}; - VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode = VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled; + VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode = + VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled; float extraPrimitiveOverestimationSize = {}; }; - static_assert( sizeof( PipelineRasterizationConservativeStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationConservativeStateCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineRasterizationConservativeStateCreateInfoEXT ) == + sizeof( VkPipelineRasterizationConservativeStateCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PipelineRasterizationDepthClipStateCreateInfoEXT { - VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT( + VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , depthClipEnable( depthClipEnable_ ) {} - PipelineRasterizationDepthClipStateCreateInfoEXT & operator=( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationDepthClipStateCreateInfoEXT & + operator=( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineRasterizationDepthClipStateCreateInfoEXT ) - offsetof( PipelineRasterizationDepthClipStateCreateInfoEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PipelineRasterizationDepthClipStateCreateInfoEXT ) - + offsetof( PipelineRasterizationDepthClipStateCreateInfoEXT, pNext ) ); return *this; } - PipelineRasterizationDepthClipStateCreateInfoEXT( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationDepthClipStateCreateInfoEXT( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PipelineRasterizationDepthClipStateCreateInfoEXT& operator=( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationDepthClipStateCreateInfoEXT & + operator=( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineRasterizationDepthClipStateCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationDepthClipStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineRasterizationDepthClipStateCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationDepthClipStateCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - PipelineRasterizationDepthClipStateCreateInfoEXT & setDepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationDepthClipStateCreateInfoEXT & + setDepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT { depthClipEnable = depthClipEnable_; return *this; } - operator VkPipelineRasterizationDepthClipStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineRasterizationDepthClipStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineRasterizationDepthClipStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineRasterizationDepthClipStateCreateInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRasterizationDepthClipStateCreateInfoEXT const & ) const = default; #else - bool operator==( PipelineRasterizationDepthClipStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( depthClipEnable == rhs.depthClipEnable ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( depthClipEnable == rhs.depthClipEnable ); } - bool operator!=( PipelineRasterizationDepthClipStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -57454,246 +64466,281 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags = {}; - VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {}; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {}; }; - static_assert( sizeof( PipelineRasterizationDepthClipStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationDepthClipStateCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineRasterizationDepthClipStateCreateInfoEXT ) == + sizeof( VkPipelineRasterizationDepthClipStateCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PipelineRasterizationLineStateCreateInfoEXT { - VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode_ = VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT::eDefault, - VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ = {}, - uint32_t lineStippleFactor_ = {}, - uint16_t lineStipplePattern_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfoEXT( + VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode_ = + VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT::eDefault, + VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ = {}, + uint32_t lineStippleFactor_ = {}, + uint16_t lineStipplePattern_ = {} ) VULKAN_HPP_NOEXCEPT : lineRasterizationMode( lineRasterizationMode_ ) , stippledLineEnable( stippledLineEnable_ ) , lineStippleFactor( lineStippleFactor_ ) , lineStipplePattern( lineStipplePattern_ ) {} - PipelineRasterizationLineStateCreateInfoEXT & operator=( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationLineStateCreateInfoEXT & + operator=( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineRasterizationLineStateCreateInfoEXT ) - offsetof( PipelineRasterizationLineStateCreateInfoEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PipelineRasterizationLineStateCreateInfoEXT ) - + offsetof( PipelineRasterizationLineStateCreateInfoEXT, pNext ) ); return *this; } - PipelineRasterizationLineStateCreateInfoEXT( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationLineStateCreateInfoEXT( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PipelineRasterizationLineStateCreateInfoEXT& operator=( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationLineStateCreateInfoEXT & + operator=( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineRasterizationLineStateCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationLineStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineRasterizationLineStateCreateInfoEXT & setLineRasterizationMode( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationLineStateCreateInfoEXT & setLineRasterizationMode( + VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode_ ) VULKAN_HPP_NOEXCEPT { lineRasterizationMode = lineRasterizationMode_; return *this; } - PipelineRasterizationLineStateCreateInfoEXT & setStippledLineEnable( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationLineStateCreateInfoEXT & + setStippledLineEnable( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ ) VULKAN_HPP_NOEXCEPT { stippledLineEnable = stippledLineEnable_; return *this; } - PipelineRasterizationLineStateCreateInfoEXT & setLineStippleFactor( uint32_t lineStippleFactor_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationLineStateCreateInfoEXT & + setLineStippleFactor( uint32_t lineStippleFactor_ ) VULKAN_HPP_NOEXCEPT { lineStippleFactor = lineStippleFactor_; return *this; } - PipelineRasterizationLineStateCreateInfoEXT & setLineStipplePattern( uint16_t lineStipplePattern_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationLineStateCreateInfoEXT & + setLineStipplePattern( uint16_t lineStipplePattern_ ) VULKAN_HPP_NOEXCEPT { lineStipplePattern = lineStipplePattern_; return *this; } - operator VkPipelineRasterizationLineStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineRasterizationLineStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineRasterizationLineStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineRasterizationLineStateCreateInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRasterizationLineStateCreateInfoEXT const & ) const = default; #else - bool operator==( PipelineRasterizationLineStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( lineRasterizationMode == rhs.lineRasterizationMode ) - && ( stippledLineEnable == rhs.stippledLineEnable ) - && ( lineStippleFactor == rhs.lineStippleFactor ) - && ( lineStipplePattern == rhs.lineStipplePattern ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( lineRasterizationMode == rhs.lineRasterizationMode ) && + ( stippledLineEnable == rhs.stippledLineEnable ) && ( lineStippleFactor == rhs.lineStippleFactor ) && + ( lineStipplePattern == rhs.lineStipplePattern ); } - bool operator!=( PipelineRasterizationLineStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationLineStateCreateInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode = VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT::eDefault; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationLineStateCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode = + VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT::eDefault; VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable = {}; - uint32_t lineStippleFactor = {}; - uint16_t lineStipplePattern = {}; + uint32_t lineStippleFactor = {}; + uint16_t lineStipplePattern = {}; }; - static_assert( sizeof( PipelineRasterizationLineStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationLineStateCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineRasterizationLineStateCreateInfoEXT ) == + sizeof( VkPipelineRasterizationLineStateCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PipelineRasterizationStateRasterizationOrderAMD { - VULKAN_HPP_CONSTEXPR PipelineRasterizationStateRasterizationOrderAMD( VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ = VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateRasterizationOrderAMD( + VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ = + VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict ) VULKAN_HPP_NOEXCEPT : rasterizationOrder( rasterizationOrder_ ) {} - PipelineRasterizationStateRasterizationOrderAMD & operator=( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationStateRasterizationOrderAMD & + operator=( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineRasterizationStateRasterizationOrderAMD ) - offsetof( PipelineRasterizationStateRasterizationOrderAMD, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PipelineRasterizationStateRasterizationOrderAMD ) - + offsetof( PipelineRasterizationStateRasterizationOrderAMD, pNext ) ); return *this; } - PipelineRasterizationStateRasterizationOrderAMD( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationStateRasterizationOrderAMD( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PipelineRasterizationStateRasterizationOrderAMD& operator=( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationStateRasterizationOrderAMD & + operator=( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineRasterizationStateRasterizationOrderAMD & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationStateRasterizationOrderAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineRasterizationStateRasterizationOrderAMD & setRasterizationOrder( VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationStateRasterizationOrderAMD & + setRasterizationOrder( VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ ) VULKAN_HPP_NOEXCEPT { rasterizationOrder = rasterizationOrder_; return *this; } - operator VkPipelineRasterizationStateRasterizationOrderAMD const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineRasterizationStateRasterizationOrderAMD const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineRasterizationStateRasterizationOrderAMD &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineRasterizationStateRasterizationOrderAMD const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRasterizationStateRasterizationOrderAMD const & ) const = default; #else - bool operator==( PipelineRasterizationStateRasterizationOrderAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( rasterizationOrder == rhs.rasterizationOrder ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rasterizationOrder == rhs.rasterizationOrder ); } - bool operator!=( PipelineRasterizationStateRasterizationOrderAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateRasterizationOrderAMD; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder = VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateRasterizationOrderAMD; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder = + VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict; }; - static_assert( sizeof( PipelineRasterizationStateRasterizationOrderAMD ) == sizeof( VkPipelineRasterizationStateRasterizationOrderAMD ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineRasterizationStateRasterizationOrderAMD ) == + sizeof( VkPipelineRasterizationStateRasterizationOrderAMD ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PipelineRasterizationStateStreamCreateInfoEXT { - VULKAN_HPP_CONSTEXPR PipelineRasterizationStateStreamCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ = {}, - uint32_t rasterizationStream_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateStreamCreateInfoEXT( + VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ = {}, + uint32_t rasterizationStream_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , rasterizationStream( rasterizationStream_ ) {} - PipelineRasterizationStateStreamCreateInfoEXT & operator=( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationStateStreamCreateInfoEXT & + operator=( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineRasterizationStateStreamCreateInfoEXT ) - offsetof( PipelineRasterizationStateStreamCreateInfoEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PipelineRasterizationStateStreamCreateInfoEXT ) - + offsetof( PipelineRasterizationStateStreamCreateInfoEXT, pNext ) ); return *this; } - PipelineRasterizationStateStreamCreateInfoEXT( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationStateStreamCreateInfoEXT( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PipelineRasterizationStateStreamCreateInfoEXT& operator=( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationStateStreamCreateInfoEXT & + operator=( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineRasterizationStateStreamCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationStateStreamCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineRasterizationStateStreamCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationStateStreamCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - PipelineRasterizationStateStreamCreateInfoEXT & setRasterizationStream( uint32_t rasterizationStream_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationStateStreamCreateInfoEXT & + setRasterizationStream( uint32_t rasterizationStream_ ) VULKAN_HPP_NOEXCEPT { rasterizationStream = rasterizationStream_; return *this; } - operator VkPipelineRasterizationStateStreamCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineRasterizationStateStreamCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineRasterizationStateStreamCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineRasterizationStateStreamCreateInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRasterizationStateStreamCreateInfoEXT const & ) const = default; #else - bool operator==( PipelineRasterizationStateStreamCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( rasterizationStream == rhs.rasterizationStream ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( rasterizationStream == rhs.rasterizationStream ); } - bool operator!=( PipelineRasterizationStateStreamCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -57701,274 +64748,316 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateStreamCreateInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags = {}; - uint32_t rasterizationStream = {}; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags = {}; + uint32_t rasterizationStream = {}; }; - static_assert( sizeof( PipelineRasterizationStateStreamCreateInfoEXT ) == sizeof( VkPipelineRasterizationStateStreamCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineRasterizationStateStreamCreateInfoEXT ) == + sizeof( VkPipelineRasterizationStateStreamCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PipelineRepresentativeFragmentTestStateCreateInfoNV { - VULKAN_HPP_CONSTEXPR PipelineRepresentativeFragmentTestStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineRepresentativeFragmentTestStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ = {} ) VULKAN_HPP_NOEXCEPT : representativeFragmentTestEnable( representativeFragmentTestEnable_ ) {} - PipelineRepresentativeFragmentTestStateCreateInfoNV & operator=( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineRepresentativeFragmentTestStateCreateInfoNV & + operator=( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineRepresentativeFragmentTestStateCreateInfoNV ) - offsetof( PipelineRepresentativeFragmentTestStateCreateInfoNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PipelineRepresentativeFragmentTestStateCreateInfoNV ) - + offsetof( PipelineRepresentativeFragmentTestStateCreateInfoNV, pNext ) ); return *this; } - PipelineRepresentativeFragmentTestStateCreateInfoNV( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineRepresentativeFragmentTestStateCreateInfoNV( + VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - PipelineRepresentativeFragmentTestStateCreateInfoNV& operator=( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineRepresentativeFragmentTestStateCreateInfoNV & + operator=( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = + *reinterpret_cast( &rhs ); return *this; } - PipelineRepresentativeFragmentTestStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineRepresentativeFragmentTestStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineRepresentativeFragmentTestStateCreateInfoNV & setRepresentativeFragmentTestEnable( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ ) VULKAN_HPP_NOEXCEPT + PipelineRepresentativeFragmentTestStateCreateInfoNV & setRepresentativeFragmentTestEnable( + VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ ) VULKAN_HPP_NOEXCEPT { representativeFragmentTestEnable = representativeFragmentTestEnable_; return *this; } - operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineRepresentativeFragmentTestStateCreateInfoNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRepresentativeFragmentTestStateCreateInfoNV const & ) const = default; #else - bool operator==( PipelineRepresentativeFragmentTestStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( representativeFragmentTestEnable == rhs.representativeFragmentTestEnable ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( representativeFragmentTestEnable == rhs.representativeFragmentTestEnable ); } - bool operator!=( PipelineRepresentativeFragmentTestStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV; - const void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = + StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable = {}; }; - static_assert( sizeof( PipelineRepresentativeFragmentTestStateCreateInfoNV ) == sizeof( VkPipelineRepresentativeFragmentTestStateCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineRepresentativeFragmentTestStateCreateInfoNV ) == + sizeof( VkPipelineRepresentativeFragmentTestStateCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PipelineSampleLocationsStateCreateInfoEXT { - VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ = {}, - VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT( + VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ = {}, + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT : sampleLocationsEnable( sampleLocationsEnable_ ) , sampleLocationsInfo( sampleLocationsInfo_ ) {} - PipelineSampleLocationsStateCreateInfoEXT & operator=( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineSampleLocationsStateCreateInfoEXT & + operator=( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineSampleLocationsStateCreateInfoEXT ) - offsetof( PipelineSampleLocationsStateCreateInfoEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PipelineSampleLocationsStateCreateInfoEXT ) - + offsetof( PipelineSampleLocationsStateCreateInfoEXT, pNext ) ); return *this; } - PipelineSampleLocationsStateCreateInfoEXT( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineSampleLocationsStateCreateInfoEXT( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PipelineSampleLocationsStateCreateInfoEXT& operator=( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineSampleLocationsStateCreateInfoEXT & + operator=( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineSampleLocationsStateCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineSampleLocationsStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineSampleLocationsStateCreateInfoEXT & setSampleLocationsEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ ) VULKAN_HPP_NOEXCEPT + PipelineSampleLocationsStateCreateInfoEXT & + setSampleLocationsEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ ) VULKAN_HPP_NOEXCEPT { sampleLocationsEnable = sampleLocationsEnable_; return *this; } - PipelineSampleLocationsStateCreateInfoEXT & setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT + PipelineSampleLocationsStateCreateInfoEXT & setSampleLocationsInfo( + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT { sampleLocationsInfo = sampleLocationsInfo_; return *this; } - operator VkPipelineSampleLocationsStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineSampleLocationsStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineSampleLocationsStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineSampleLocationsStateCreateInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineSampleLocationsStateCreateInfoEXT const & ) const = default; #else - bool operator==( PipelineSampleLocationsStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( sampleLocationsEnable == rhs.sampleLocationsEnable ) - && ( sampleLocationsInfo == rhs.sampleLocationsInfo ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( sampleLocationsEnable == rhs.sampleLocationsEnable ) && + ( sampleLocationsInfo == rhs.sampleLocationsInfo ); } - bool operator!=( PipelineSampleLocationsStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable = {}; - VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable = {}; + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {}; }; - static_assert( sizeof( PipelineSampleLocationsStateCreateInfoEXT ) == sizeof( VkPipelineSampleLocationsStateCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineSampleLocationsStateCreateInfoEXT ) == + sizeof( VkPipelineSampleLocationsStateCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT { - VULKAN_HPP_CONSTEXPR PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT( uint32_t requiredSubgroupSize_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT( uint32_t requiredSubgroupSize_ = {} ) VULKAN_HPP_NOEXCEPT : requiredSubgroupSize( requiredSubgroupSize_ ) {} - PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT & operator=( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT & + operator=( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT ) - offsetof( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT ) - + offsetof( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT, pNext ) ); return *this; } - PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT( VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT( + VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT& operator=( VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT & + operator=( VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = + *reinterpret_cast( &rhs ); return *this; } - operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & ) const = default; #else - bool operator==( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( requiredSubgroupSize == rhs.requiredSubgroupSize ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( requiredSubgroupSize == rhs.requiredSubgroupSize ); } - bool operator!=( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT; - void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = + StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT; + void * pNext = {}; uint32_t requiredSubgroupSize = {}; }; - static_assert( sizeof( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT ) == sizeof( VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT ) == + sizeof( VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PipelineTessellationDomainOriginStateCreateInfo { - VULKAN_HPP_CONSTEXPR PipelineTessellationDomainOriginStateCreateInfo( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ = VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft ) VULKAN_HPP_NOEXCEPT - : domainOrigin( domainOrigin_ ) + VULKAN_HPP_CONSTEXPR PipelineTessellationDomainOriginStateCreateInfo( + VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ = + VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft ) VULKAN_HPP_NOEXCEPT : domainOrigin( domainOrigin_ ) {} - PipelineTessellationDomainOriginStateCreateInfo & operator=( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineTessellationDomainOriginStateCreateInfo & + operator=( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineTessellationDomainOriginStateCreateInfo ) - offsetof( PipelineTessellationDomainOriginStateCreateInfo, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PipelineTessellationDomainOriginStateCreateInfo ) - + offsetof( PipelineTessellationDomainOriginStateCreateInfo, pNext ) ); return *this; } - PipelineTessellationDomainOriginStateCreateInfo( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineTessellationDomainOriginStateCreateInfo( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PipelineTessellationDomainOriginStateCreateInfo& operator=( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineTessellationDomainOriginStateCreateInfo & + operator=( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineTessellationDomainOriginStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineTessellationDomainOriginStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineTessellationDomainOriginStateCreateInfo & setDomainOrigin( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ ) VULKAN_HPP_NOEXCEPT + PipelineTessellationDomainOriginStateCreateInfo & + setDomainOrigin( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ ) VULKAN_HPP_NOEXCEPT { domainOrigin = domainOrigin_; return *this; } - operator VkPipelineTessellationDomainOriginStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineTessellationDomainOriginStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineTessellationDomainOriginStateCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineTessellationDomainOriginStateCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineTessellationDomainOriginStateCreateInfo const & ) const = default; #else - bool operator==( PipelineTessellationDomainOriginStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( domainOrigin == rhs.domainOrigin ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( domainOrigin == rhs.domainOrigin ); } - bool operator!=( PipelineTessellationDomainOriginStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -57976,11 +65065,15 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin = VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin = + VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft; }; - static_assert( sizeof( PipelineTessellationDomainOriginStateCreateInfo ) == sizeof( VkPipelineTessellationDomainOriginStateCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineTessellationDomainOriginStateCreateInfo ) == + sizeof( VkPipelineTessellationDomainOriginStateCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct VertexInputBindingDivisorDescriptionEXT { @@ -57995,9 +65088,10 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - VertexInputBindingDivisorDescriptionEXT& operator=( VkVertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VertexInputBindingDivisorDescriptionEXT & + operator=( VkVertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -58013,26 +65107,25 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkVertexInputBindingDivisorDescriptionEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkVertexInputBindingDivisorDescriptionEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkVertexInputBindingDivisorDescriptionEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( VertexInputBindingDivisorDescriptionEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VertexInputBindingDivisorDescriptionEXT const & ) const = default; #else - bool operator==( VertexInputBindingDivisorDescriptionEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( VertexInputBindingDivisorDescriptionEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( binding == rhs.binding ) - && ( divisor == rhs.divisor ); + return ( binding == rhs.binding ) && ( divisor == rhs.divisor ); } - bool operator!=( VertexInputBindingDivisorDescriptionEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VertexInputBindingDivisorDescriptionEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -58042,74 +65135,87 @@ namespace VULKAN_HPP_NAMESPACE uint32_t binding = {}; uint32_t divisor = {}; }; - static_assert( sizeof( VertexInputBindingDivisorDescriptionEXT ) == sizeof( VkVertexInputBindingDivisorDescriptionEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( VertexInputBindingDivisorDescriptionEXT ) == + sizeof( VkVertexInputBindingDivisorDescriptionEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PipelineVertexInputDivisorStateCreateInfoEXT { - VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfoEXT( uint32_t vertexBindingDivisorCount_ = {}, - const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfoEXT( + uint32_t vertexBindingDivisorCount_ = {}, + const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT * pVertexBindingDivisors_ = {} ) + VULKAN_HPP_NOEXCEPT : vertexBindingDivisorCount( vertexBindingDivisorCount_ ) , pVertexBindingDivisors( pVertexBindingDivisors_ ) {} - PipelineVertexInputDivisorStateCreateInfoEXT & operator=( PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineVertexInputDivisorStateCreateInfoEXT & + operator=( PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineVertexInputDivisorStateCreateInfoEXT ) - offsetof( PipelineVertexInputDivisorStateCreateInfoEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PipelineVertexInputDivisorStateCreateInfoEXT ) - + offsetof( PipelineVertexInputDivisorStateCreateInfoEXT, pNext ) ); return *this; } - PipelineVertexInputDivisorStateCreateInfoEXT( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineVertexInputDivisorStateCreateInfoEXT( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PipelineVertexInputDivisorStateCreateInfoEXT& operator=( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineVertexInputDivisorStateCreateInfoEXT & + operator=( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineVertexInputDivisorStateCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineVertexInputDivisorStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineVertexInputDivisorStateCreateInfoEXT & setVertexBindingDivisorCount( uint32_t vertexBindingDivisorCount_ ) VULKAN_HPP_NOEXCEPT + PipelineVertexInputDivisorStateCreateInfoEXT & + setVertexBindingDivisorCount( uint32_t vertexBindingDivisorCount_ ) VULKAN_HPP_NOEXCEPT { vertexBindingDivisorCount = vertexBindingDivisorCount_; return *this; } - PipelineVertexInputDivisorStateCreateInfoEXT & setPVertexBindingDivisors( const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors_ ) VULKAN_HPP_NOEXCEPT + PipelineVertexInputDivisorStateCreateInfoEXT & setPVertexBindingDivisors( + const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT * pVertexBindingDivisors_ ) + VULKAN_HPP_NOEXCEPT { pVertexBindingDivisors = pVertexBindingDivisors_; return *this; } - operator VkPipelineVertexInputDivisorStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineVertexInputDivisorStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineVertexInputDivisorStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineVertexInputDivisorStateCreateInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineVertexInputDivisorStateCreateInfoEXT const & ) const = default; #else - bool operator==( PipelineVertexInputDivisorStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( vertexBindingDivisorCount == rhs.vertexBindingDivisorCount ) - && ( pVertexBindingDivisors == rhs.pVertexBindingDivisors ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( vertexBindingDivisorCount == rhs.vertexBindingDivisorCount ) && + ( pVertexBindingDivisors == rhs.pVertexBindingDivisors ); } - bool operator!=( PipelineVertexInputDivisorStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -58117,167 +65223,193 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT; - const void* pNext = {}; - uint32_t vertexBindingDivisorCount = {}; - const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors = {}; + const void * pNext = {}; + uint32_t vertexBindingDivisorCount = {}; + const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT * pVertexBindingDivisors = {}; }; - static_assert( sizeof( PipelineVertexInputDivisorStateCreateInfoEXT ) == sizeof( VkPipelineVertexInputDivisorStateCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineVertexInputDivisorStateCreateInfoEXT ) == + sizeof( VkPipelineVertexInputDivisorStateCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PipelineViewportCoarseSampleOrderStateCreateInfoNV { - VULKAN_HPP_CONSTEXPR PipelineViewportCoarseSampleOrderStateCreateInfoNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ = VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault, - uint32_t customSampleOrderCount_ = {}, - const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineViewportCoarseSampleOrderStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ = + VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault, + uint32_t customSampleOrderCount_ = {}, + const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders_ = {} ) VULKAN_HPP_NOEXCEPT : sampleOrderType( sampleOrderType_ ) , customSampleOrderCount( customSampleOrderCount_ ) , pCustomSampleOrders( pCustomSampleOrders_ ) {} - PipelineViewportCoarseSampleOrderStateCreateInfoNV & operator=( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineViewportCoarseSampleOrderStateCreateInfoNV & + operator=( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineViewportCoarseSampleOrderStateCreateInfoNV ) - offsetof( PipelineViewportCoarseSampleOrderStateCreateInfoNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PipelineViewportCoarseSampleOrderStateCreateInfoNV ) - + offsetof( PipelineViewportCoarseSampleOrderStateCreateInfoNV, pNext ) ); return *this; } - PipelineViewportCoarseSampleOrderStateCreateInfoNV( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineViewportCoarseSampleOrderStateCreateInfoNV( + VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - PipelineViewportCoarseSampleOrderStateCreateInfoNV& operator=( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineViewportCoarseSampleOrderStateCreateInfoNV & + operator=( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = + *reinterpret_cast( &rhs ); return *this; } - PipelineViewportCoarseSampleOrderStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineViewportCoarseSampleOrderStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineViewportCoarseSampleOrderStateCreateInfoNV & setSampleOrderType( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ ) VULKAN_HPP_NOEXCEPT + PipelineViewportCoarseSampleOrderStateCreateInfoNV & + setSampleOrderType( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ ) VULKAN_HPP_NOEXCEPT { sampleOrderType = sampleOrderType_; return *this; } - PipelineViewportCoarseSampleOrderStateCreateInfoNV & setCustomSampleOrderCount( uint32_t customSampleOrderCount_ ) VULKAN_HPP_NOEXCEPT + PipelineViewportCoarseSampleOrderStateCreateInfoNV & + setCustomSampleOrderCount( uint32_t customSampleOrderCount_ ) VULKAN_HPP_NOEXCEPT { customSampleOrderCount = customSampleOrderCount_; return *this; } - PipelineViewportCoarseSampleOrderStateCreateInfoNV & setPCustomSampleOrders( const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders_ ) VULKAN_HPP_NOEXCEPT + PipelineViewportCoarseSampleOrderStateCreateInfoNV & setPCustomSampleOrders( + const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders_ ) VULKAN_HPP_NOEXCEPT { pCustomSampleOrders = pCustomSampleOrders_; return *this; } - operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineViewportCoarseSampleOrderStateCreateInfoNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & ) const = default; #else - bool operator==( PipelineViewportCoarseSampleOrderStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( sampleOrderType == rhs.sampleOrderType ) - && ( customSampleOrderCount == rhs.customSampleOrderCount ) - && ( pCustomSampleOrders == rhs.pCustomSampleOrders ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleOrderType == rhs.sampleOrderType ) && + ( customSampleOrderCount == rhs.customSampleOrderCount ) && + ( pCustomSampleOrders == rhs.pCustomSampleOrders ); } - bool operator!=( PipelineViewportCoarseSampleOrderStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType = VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault; - uint32_t customSampleOrderCount = {}; - const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = + StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType = + VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault; + uint32_t customSampleOrderCount = {}; + const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders = {}; }; - static_assert( sizeof( PipelineViewportCoarseSampleOrderStateCreateInfoNV ) == sizeof( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineViewportCoarseSampleOrderStateCreateInfoNV ) == + sizeof( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PipelineViewportExclusiveScissorStateCreateInfoNV { - VULKAN_HPP_CONSTEXPR PipelineViewportExclusiveScissorStateCreateInfoNV( uint32_t exclusiveScissorCount_ = {}, - const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineViewportExclusiveScissorStateCreateInfoNV( + uint32_t exclusiveScissorCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors_ = {} ) VULKAN_HPP_NOEXCEPT : exclusiveScissorCount( exclusiveScissorCount_ ) , pExclusiveScissors( pExclusiveScissors_ ) {} - PipelineViewportExclusiveScissorStateCreateInfoNV & operator=( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineViewportExclusiveScissorStateCreateInfoNV & + operator=( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineViewportExclusiveScissorStateCreateInfoNV ) - offsetof( PipelineViewportExclusiveScissorStateCreateInfoNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PipelineViewportExclusiveScissorStateCreateInfoNV ) - + offsetof( PipelineViewportExclusiveScissorStateCreateInfoNV, pNext ) ); return *this; } - PipelineViewportExclusiveScissorStateCreateInfoNV( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineViewportExclusiveScissorStateCreateInfoNV( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PipelineViewportExclusiveScissorStateCreateInfoNV& operator=( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineViewportExclusiveScissorStateCreateInfoNV & + operator=( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = + *reinterpret_cast( &rhs ); return *this; } - PipelineViewportExclusiveScissorStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineViewportExclusiveScissorStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineViewportExclusiveScissorStateCreateInfoNV & setExclusiveScissorCount( uint32_t exclusiveScissorCount_ ) VULKAN_HPP_NOEXCEPT + PipelineViewportExclusiveScissorStateCreateInfoNV & + setExclusiveScissorCount( uint32_t exclusiveScissorCount_ ) VULKAN_HPP_NOEXCEPT { exclusiveScissorCount = exclusiveScissorCount_; return *this; } - PipelineViewportExclusiveScissorStateCreateInfoNV & setPExclusiveScissors( const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors_ ) VULKAN_HPP_NOEXCEPT + PipelineViewportExclusiveScissorStateCreateInfoNV & + setPExclusiveScissors( const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors_ ) VULKAN_HPP_NOEXCEPT { pExclusiveScissors = pExclusiveScissors_; return *this; } - operator VkPipelineViewportExclusiveScissorStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineViewportExclusiveScissorStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineViewportExclusiveScissorStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineViewportExclusiveScissorStateCreateInfoNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineViewportExclusiveScissorStateCreateInfoNV const & ) const = default; #else - bool operator==( PipelineViewportExclusiveScissorStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( exclusiveScissorCount == rhs.exclusiveScissorCount ) - && ( pExclusiveScissors == rhs.pExclusiveScissors ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( exclusiveScissorCount == rhs.exclusiveScissorCount ) && ( pExclusiveScissors == rhs.pExclusiveScissors ); } - bool operator!=( PipelineViewportExclusiveScissorStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -58285,17 +65417,21 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV; - const void* pNext = {}; - uint32_t exclusiveScissorCount = {}; - const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors = {}; + const void * pNext = {}; + uint32_t exclusiveScissorCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors = {}; }; - static_assert( sizeof( PipelineViewportExclusiveScissorStateCreateInfoNV ) == sizeof( VkPipelineViewportExclusiveScissorStateCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineViewportExclusiveScissorStateCreateInfoNV ) == + sizeof( VkPipelineViewportExclusiveScissorStateCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct ShadingRatePaletteNV { - VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV( uint32_t shadingRatePaletteEntryCount_ = {}, - const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV( + uint32_t shadingRatePaletteEntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * pShadingRatePaletteEntries_ = {} ) VULKAN_HPP_NOEXCEPT : shadingRatePaletteEntryCount( shadingRatePaletteEntryCount_ ) , pShadingRatePaletteEntries( pShadingRatePaletteEntries_ ) {} @@ -58305,9 +65441,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ShadingRatePaletteNV& operator=( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT + ShadingRatePaletteNV & operator=( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -58317,78 +65453,89 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - ShadingRatePaletteNV & setPShadingRatePaletteEntries( const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT + ShadingRatePaletteNV & setPShadingRatePaletteEntries( + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * pShadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT { pShadingRatePaletteEntries = pShadingRatePaletteEntries_; return *this; } - operator VkShadingRatePaletteNV const&() const VULKAN_HPP_NOEXCEPT + operator VkShadingRatePaletteNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkShadingRatePaletteNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ShadingRatePaletteNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ShadingRatePaletteNV const & ) const = default; #else - bool operator==( ShadingRatePaletteNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ShadingRatePaletteNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( shadingRatePaletteEntryCount == rhs.shadingRatePaletteEntryCount ) - && ( pShadingRatePaletteEntries == rhs.pShadingRatePaletteEntries ); + return ( shadingRatePaletteEntryCount == rhs.shadingRatePaletteEntryCount ) && + ( pShadingRatePaletteEntries == rhs.pShadingRatePaletteEntries ); } - bool operator!=( ShadingRatePaletteNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ShadingRatePaletteNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint32_t shadingRatePaletteEntryCount = {}; - const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries = {}; + uint32_t shadingRatePaletteEntryCount = {}; + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * pShadingRatePaletteEntries = {}; }; - static_assert( sizeof( ShadingRatePaletteNV ) == sizeof( VkShadingRatePaletteNV ), "struct and wrapper have different size!" ); + static_assert( sizeof( ShadingRatePaletteNV ) == sizeof( VkShadingRatePaletteNV ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PipelineViewportShadingRateImageStateCreateInfoNV { - VULKAN_HPP_CONSTEXPR PipelineViewportShadingRateImageStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ = {}, - uint32_t viewportCount_ = {}, - const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineViewportShadingRateImageStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ = {}, + uint32_t viewportCount_ = {}, + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes_ = {} ) VULKAN_HPP_NOEXCEPT : shadingRateImageEnable( shadingRateImageEnable_ ) , viewportCount( viewportCount_ ) , pShadingRatePalettes( pShadingRatePalettes_ ) {} - PipelineViewportShadingRateImageStateCreateInfoNV & operator=( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineViewportShadingRateImageStateCreateInfoNV & + operator=( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineViewportShadingRateImageStateCreateInfoNV ) - offsetof( PipelineViewportShadingRateImageStateCreateInfoNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PipelineViewportShadingRateImageStateCreateInfoNV ) - + offsetof( PipelineViewportShadingRateImageStateCreateInfoNV, pNext ) ); return *this; } - PipelineViewportShadingRateImageStateCreateInfoNV( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineViewportShadingRateImageStateCreateInfoNV( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PipelineViewportShadingRateImageStateCreateInfoNV& operator=( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineViewportShadingRateImageStateCreateInfoNV & + operator=( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = + *reinterpret_cast( &rhs ); return *this; } - PipelineViewportShadingRateImageStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineViewportShadingRateImageStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineViewportShadingRateImageStateCreateInfoNV & setShadingRateImageEnable( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ ) VULKAN_HPP_NOEXCEPT + PipelineViewportShadingRateImageStateCreateInfoNV & + setShadingRateImageEnable( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ ) VULKAN_HPP_NOEXCEPT { shadingRateImageEnable = shadingRateImageEnable_; return *this; @@ -58400,35 +65547,34 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PipelineViewportShadingRateImageStateCreateInfoNV & setPShadingRatePalettes( const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes_ ) VULKAN_HPP_NOEXCEPT + PipelineViewportShadingRateImageStateCreateInfoNV & setPShadingRatePalettes( + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes_ ) VULKAN_HPP_NOEXCEPT { pShadingRatePalettes = pShadingRatePalettes_; return *this; } - operator VkPipelineViewportShadingRateImageStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineViewportShadingRateImageStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineViewportShadingRateImageStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineViewportShadingRateImageStateCreateInfoNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineViewportShadingRateImageStateCreateInfoNV const & ) const = default; #else - bool operator==( PipelineViewportShadingRateImageStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shadingRateImageEnable == rhs.shadingRateImageEnable ) - && ( viewportCount == rhs.viewportCount ) - && ( pShadingRatePalettes == rhs.pShadingRatePalettes ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shadingRateImageEnable == rhs.shadingRateImageEnable ) && ( viewportCount == rhs.viewportCount ) && + ( pShadingRatePalettes == rhs.pShadingRatePalettes ); } - bool operator!=( PipelineViewportShadingRateImageStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -58436,20 +65582,28 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable = {}; - uint32_t viewportCount = {}; - const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes = {}; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable = {}; + uint32_t viewportCount = {}; + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes = {}; }; - static_assert( sizeof( PipelineViewportShadingRateImageStateCreateInfoNV ) == sizeof( VkPipelineViewportShadingRateImageStateCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineViewportShadingRateImageStateCreateInfoNV ) == + sizeof( VkPipelineViewportShadingRateImageStateCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct ViewportSwizzleNV { - VULKAN_HPP_CONSTEXPR ViewportSwizzleNV( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, - VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, - VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, - VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + ViewportSwizzleNV( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x_ = + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_ = + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_ = + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_ = + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX ) VULKAN_HPP_NOEXCEPT : x( x_ ) , y( y_ ) , z( z_ ) @@ -58461,9 +65615,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ViewportSwizzleNV& operator=( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT + ViewportSwizzleNV & operator=( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -58491,28 +65645,25 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkViewportSwizzleNV const&() const VULKAN_HPP_NOEXCEPT + operator VkViewportSwizzleNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkViewportSwizzleNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ViewportSwizzleNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ViewportSwizzleNV const & ) const = default; #else - bool operator==( ViewportSwizzleNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ViewportSwizzleNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( x == rhs.x ) - && ( y == rhs.y ) - && ( z == rhs.z ) - && ( w == rhs.w ); + return ( x == rhs.x ) && ( y == rhs.y ) && ( z == rhs.z ) && ( w == rhs.w ); } - bool operator!=( ViewportSwizzleNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ViewportSwizzleNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -58524,43 +65675,52 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX; VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX; }; - static_assert( sizeof( ViewportSwizzleNV ) == sizeof( VkViewportSwizzleNV ), "struct and wrapper have different size!" ); + static_assert( sizeof( ViewportSwizzleNV ) == sizeof( VkViewportSwizzleNV ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PipelineViewportSwizzleStateCreateInfoNV { - VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ = {}, - uint32_t viewportCount_ = {}, - const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV* pViewportSwizzles_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ = {}, + uint32_t viewportCount_ = {}, + const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , viewportCount( viewportCount_ ) , pViewportSwizzles( pViewportSwizzles_ ) {} - PipelineViewportSwizzleStateCreateInfoNV & operator=( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineViewportSwizzleStateCreateInfoNV & + operator=( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineViewportSwizzleStateCreateInfoNV ) - offsetof( PipelineViewportSwizzleStateCreateInfoNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PipelineViewportSwizzleStateCreateInfoNV ) - + offsetof( PipelineViewportSwizzleStateCreateInfoNV, pNext ) ); return *this; } - PipelineViewportSwizzleStateCreateInfoNV( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineViewportSwizzleStateCreateInfoNV( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PipelineViewportSwizzleStateCreateInfoNV& operator=( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineViewportSwizzleStateCreateInfoNV & + operator=( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineViewportSwizzleStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineViewportSwizzleStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineViewportSwizzleStateCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + PipelineViewportSwizzleStateCreateInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; @@ -58572,35 +65732,33 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PipelineViewportSwizzleStateCreateInfoNV & setPViewportSwizzles( const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV* pViewportSwizzles_ ) VULKAN_HPP_NOEXCEPT + PipelineViewportSwizzleStateCreateInfoNV & + setPViewportSwizzles( const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles_ ) VULKAN_HPP_NOEXCEPT { pViewportSwizzles = pViewportSwizzles_; return *this; } - operator VkPipelineViewportSwizzleStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineViewportSwizzleStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineViewportSwizzleStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineViewportSwizzleStateCreateInfoNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineViewportSwizzleStateCreateInfoNV const & ) const = default; #else - bool operator==( PipelineViewportSwizzleStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( viewportCount == rhs.viewportCount ) - && ( pViewportSwizzles == rhs.pViewportSwizzles ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( viewportCount == rhs.viewportCount ) && ( pViewportSwizzles == rhs.pViewportSwizzles ); } - bool operator!=( PipelineViewportSwizzleStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -58608,18 +65766,20 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportSwizzleStateCreateInfoNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags = {}; - uint32_t viewportCount = {}; - const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV* pViewportSwizzles = {}; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags = {}; + uint32_t viewportCount = {}; + const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles = {}; }; - static_assert( sizeof( PipelineViewportSwizzleStateCreateInfoNV ) == sizeof( VkPipelineViewportSwizzleStateCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineViewportSwizzleStateCreateInfoNV ) == + sizeof( VkPipelineViewportSwizzleStateCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct ViewportWScalingNV { - VULKAN_HPP_CONSTEXPR ViewportWScalingNV( float xcoeff_ = {}, - float ycoeff_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ViewportWScalingNV( float xcoeff_ = {}, float ycoeff_ = {} ) VULKAN_HPP_NOEXCEPT : xcoeff( xcoeff_ ) , ycoeff( ycoeff_ ) {} @@ -58629,9 +65789,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ViewportWScalingNV& operator=( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT + ViewportWScalingNV & operator=( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -58647,26 +65807,25 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkViewportWScalingNV const&() const VULKAN_HPP_NOEXCEPT + operator VkViewportWScalingNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkViewportWScalingNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ViewportWScalingNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ViewportWScalingNV const & ) const = default; #else - bool operator==( ViewportWScalingNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ViewportWScalingNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( xcoeff == rhs.xcoeff ) - && ( ycoeff == rhs.ycoeff ); + return ( xcoeff == rhs.xcoeff ) && ( ycoeff == rhs.ycoeff ); } - bool operator!=( ViewportWScalingNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ViewportWScalingNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -58676,43 +65835,52 @@ namespace VULKAN_HPP_NAMESPACE float xcoeff = {}; float ycoeff = {}; }; - static_assert( sizeof( ViewportWScalingNV ) == sizeof( VkViewportWScalingNV ), "struct and wrapper have different size!" ); + static_assert( sizeof( ViewportWScalingNV ) == sizeof( VkViewportWScalingNV ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PipelineViewportWScalingStateCreateInfoNV { - VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ = {}, - uint32_t viewportCount_ = {}, - const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ = {}, + uint32_t viewportCount_ = {}, + const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings_ = {} ) VULKAN_HPP_NOEXCEPT : viewportWScalingEnable( viewportWScalingEnable_ ) , viewportCount( viewportCount_ ) , pViewportWScalings( pViewportWScalings_ ) {} - PipelineViewportWScalingStateCreateInfoNV & operator=( PipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineViewportWScalingStateCreateInfoNV & + operator=( PipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineViewportWScalingStateCreateInfoNV ) - offsetof( PipelineViewportWScalingStateCreateInfoNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( PipelineViewportWScalingStateCreateInfoNV ) - + offsetof( PipelineViewportWScalingStateCreateInfoNV, pNext ) ); return *this; } - PipelineViewportWScalingStateCreateInfoNV( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineViewportWScalingStateCreateInfoNV( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - PipelineViewportWScalingStateCreateInfoNV& operator=( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineViewportWScalingStateCreateInfoNV & + operator=( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineViewportWScalingStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineViewportWScalingStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineViewportWScalingStateCreateInfoNV & setViewportWScalingEnable( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ ) VULKAN_HPP_NOEXCEPT + PipelineViewportWScalingStateCreateInfoNV & + setViewportWScalingEnable( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ ) VULKAN_HPP_NOEXCEPT { viewportWScalingEnable = viewportWScalingEnable_; return *this; @@ -58724,49 +65892,51 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PipelineViewportWScalingStateCreateInfoNV & setPViewportWScalings( const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings_ ) VULKAN_HPP_NOEXCEPT + PipelineViewportWScalingStateCreateInfoNV & + setPViewportWScalings( const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings_ ) VULKAN_HPP_NOEXCEPT { pViewportWScalings = pViewportWScalings_; return *this; } - operator VkPipelineViewportWScalingStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineViewportWScalingStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineViewportWScalingStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineViewportWScalingStateCreateInfoNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineViewportWScalingStateCreateInfoNV const & ) const = default; #else - bool operator==( PipelineViewportWScalingStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineViewportWScalingStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( viewportWScalingEnable == rhs.viewportWScalingEnable ) - && ( viewportCount == rhs.viewportCount ) - && ( pViewportWScalings == rhs.pViewportWScalings ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( viewportWScalingEnable == rhs.viewportWScalingEnable ) && ( viewportCount == rhs.viewportCount ) && + ( pViewportWScalings == rhs.pViewportWScalings ); } - bool operator!=( PipelineViewportWScalingStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineViewportWScalingStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportWScalingStateCreateInfoNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable = {}; - uint32_t viewportCount = {}; - const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportWScalingStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable = {}; + uint32_t viewportCount = {}; + const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings = {}; }; - static_assert( sizeof( PipelineViewportWScalingStateCreateInfoNV ) == sizeof( VkPipelineViewportWScalingStateCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineViewportWScalingStateCreateInfoNV ) == + sizeof( VkPipelineViewportWScalingStateCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #ifdef VK_USE_PLATFORM_GGP struct PresentFrameTokenGGP @@ -58786,13 +65956,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PresentFrameTokenGGP& operator=( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT + PresentFrameTokenGGP & operator=( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PresentFrameTokenGGP & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PresentFrameTokenGGP & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -58804,49 +65974,48 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkPresentFrameTokenGGP const&() const VULKAN_HPP_NOEXCEPT + operator VkPresentFrameTokenGGP const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPresentFrameTokenGGP &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PresentFrameTokenGGP const& ) const = default; -#else - bool operator==( PresentFrameTokenGGP const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PresentFrameTokenGGP const & ) const = default; +# else + bool operator==( PresentFrameTokenGGP const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( frameToken == rhs.frameToken ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( frameToken == rhs.frameToken ); } - bool operator!=( PresentFrameTokenGGP const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PresentFrameTokenGGP const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentFrameTokenGGP; - const void* pNext = {}; - GgpFrameToken frameToken = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentFrameTokenGGP; + const void * pNext = {}; + GgpFrameToken frameToken = {}; }; - static_assert( sizeof( PresentFrameTokenGGP ) == sizeof( VkPresentFrameTokenGGP ), "struct and wrapper have different size!" ); + static_assert( sizeof( PresentFrameTokenGGP ) == sizeof( VkPresentFrameTokenGGP ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); #endif /*VK_USE_PLATFORM_GGP*/ struct PresentInfoKHR { - VULKAN_HPP_CONSTEXPR PresentInfoKHR( uint32_t waitSemaphoreCount_ = {}, - const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ = {}, - uint32_t swapchainCount_ = {}, - const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains_ = {}, - const uint32_t* pImageIndices_ = {}, - VULKAN_HPP_NAMESPACE::Result* pResults_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PresentInfoKHR( uint32_t waitSemaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ = {}, + uint32_t swapchainCount_ = {}, + const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains_ = {}, + const uint32_t * pImageIndices_ = {}, + VULKAN_HPP_NAMESPACE::Result * pResults_ = {} ) VULKAN_HPP_NOEXCEPT : waitSemaphoreCount( waitSemaphoreCount_ ) , pWaitSemaphores( pWaitSemaphores_ ) , swapchainCount( swapchainCount_ ) @@ -58866,13 +66035,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PresentInfoKHR& operator=( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PresentInfoKHR & operator=( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PresentInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PresentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -58884,7 +66053,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PresentInfoKHR & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT + PresentInfoKHR & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT { pWaitSemaphores = pWaitSemaphores_; return *this; @@ -58896,64 +66065,60 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PresentInfoKHR & setPSwapchains( const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains_ ) VULKAN_HPP_NOEXCEPT + PresentInfoKHR & setPSwapchains( const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains_ ) VULKAN_HPP_NOEXCEPT { pSwapchains = pSwapchains_; return *this; } - PresentInfoKHR & setPImageIndices( const uint32_t* pImageIndices_ ) VULKAN_HPP_NOEXCEPT + PresentInfoKHR & setPImageIndices( const uint32_t * pImageIndices_ ) VULKAN_HPP_NOEXCEPT { pImageIndices = pImageIndices_; return *this; } - PresentInfoKHR & setPResults( VULKAN_HPP_NAMESPACE::Result* pResults_ ) VULKAN_HPP_NOEXCEPT + PresentInfoKHR & setPResults( VULKAN_HPP_NAMESPACE::Result * pResults_ ) VULKAN_HPP_NOEXCEPT { pResults = pResults_; return *this; } - operator VkPresentInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPresentInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PresentInfoKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PresentInfoKHR const & ) const = default; #else - bool operator==( PresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) - && ( pWaitSemaphores == rhs.pWaitSemaphores ) - && ( swapchainCount == rhs.swapchainCount ) - && ( pSwapchains == rhs.pSwapchains ) - && ( pImageIndices == rhs.pImageIndices ) - && ( pResults == rhs.pResults ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) && + ( pWaitSemaphores == rhs.pWaitSemaphores ) && ( swapchainCount == rhs.swapchainCount ) && + ( pSwapchains == rhs.pSwapchains ) && ( pImageIndices == rhs.pImageIndices ) && + ( pResults == rhs.pResults ); } - bool operator!=( PresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentInfoKHR; - const void* pNext = {}; - uint32_t waitSemaphoreCount = {}; - const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores = {}; - uint32_t swapchainCount = {}; - const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains = {}; - const uint32_t* pImageIndices = {}; - VULKAN_HPP_NAMESPACE::Result* pResults = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentInfoKHR; + const void * pNext = {}; + uint32_t waitSemaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores = {}; + uint32_t swapchainCount = {}; + const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains = {}; + const uint32_t * pImageIndices = {}; + VULKAN_HPP_NAMESPACE::Result * pResults = {}; }; static_assert( sizeof( PresentInfoKHR ) == sizeof( VkPresentInfoKHR ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); @@ -58962,17 +66127,14 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_CONSTEXPR RectLayerKHR( VULKAN_HPP_NAMESPACE::Offset2D offset_ = {}, VULKAN_HPP_NAMESPACE::Extent2D extent_ = {}, - uint32_t layer_ = {} ) VULKAN_HPP_NOEXCEPT + uint32_t layer_ = {} ) VULKAN_HPP_NOEXCEPT : offset( offset_ ) , extent( extent_ ) , layer( layer_ ) {} - explicit RectLayerKHR( Rect2D const& rect2D, - uint32_t layer_ = {} ) - : offset( rect2D.offset ) - , extent( rect2D.extent ) - , layer( layer_ ) + explicit RectLayerKHR( Rect2D const & rect2D, uint32_t layer_ = {} ) + : offset( rect2D.offset ), extent( rect2D.extent ), layer( layer_ ) {} RectLayerKHR( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT @@ -58980,9 +66142,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - RectLayerKHR& operator=( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT + RectLayerKHR & operator=( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -59004,27 +66166,25 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkRectLayerKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkRectLayerKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkRectLayerKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( RectLayerKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RectLayerKHR const & ) const = default; #else - bool operator==( RectLayerKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RectLayerKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( offset == rhs.offset ) - && ( extent == rhs.extent ) - && ( layer == rhs.layer ); + return ( offset == rhs.offset ) && ( extent == rhs.extent ) && ( layer == rhs.layer ); } - bool operator!=( RectLayerKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RectLayerKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -59033,15 +66193,16 @@ namespace VULKAN_HPP_NAMESPACE public: VULKAN_HPP_NAMESPACE::Offset2D offset = {}; VULKAN_HPP_NAMESPACE::Extent2D extent = {}; - uint32_t layer = {}; + uint32_t layer = {}; }; static_assert( sizeof( RectLayerKHR ) == sizeof( VkRectLayerKHR ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PresentRegionKHR { - VULKAN_HPP_CONSTEXPR PresentRegionKHR( uint32_t rectangleCount_ = {}, - const VULKAN_HPP_NAMESPACE::RectLayerKHR* pRectangles_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + PresentRegionKHR( uint32_t rectangleCount_ = {}, + const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles_ = {} ) VULKAN_HPP_NOEXCEPT : rectangleCount( rectangleCount_ ) , pRectangles( pRectangles_ ) {} @@ -59051,9 +66212,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PresentRegionKHR& operator=( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PresentRegionKHR & operator=( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -59063,48 +66224,49 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PresentRegionKHR & setPRectangles( const VULKAN_HPP_NAMESPACE::RectLayerKHR* pRectangles_ ) VULKAN_HPP_NOEXCEPT + PresentRegionKHR & setPRectangles( const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles_ ) VULKAN_HPP_NOEXCEPT { pRectangles = pRectangles_; return *this; } - operator VkPresentRegionKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkPresentRegionKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPresentRegionKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PresentRegionKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PresentRegionKHR const & ) const = default; #else - bool operator==( PresentRegionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PresentRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( rectangleCount == rhs.rectangleCount ) - && ( pRectangles == rhs.pRectangles ); + return ( rectangleCount == rhs.rectangleCount ) && ( pRectangles == rhs.pRectangles ); } - bool operator!=( PresentRegionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PresentRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint32_t rectangleCount = {}; - const VULKAN_HPP_NAMESPACE::RectLayerKHR* pRectangles = {}; + uint32_t rectangleCount = {}; + const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles = {}; }; - static_assert( sizeof( PresentRegionKHR ) == sizeof( VkPresentRegionKHR ), "struct and wrapper have different size!" ); + static_assert( sizeof( PresentRegionKHR ) == sizeof( VkPresentRegionKHR ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PresentRegionsKHR { - VULKAN_HPP_CONSTEXPR PresentRegionsKHR( uint32_t swapchainCount_ = {}, - const VULKAN_HPP_NAMESPACE::PresentRegionKHR* pRegions_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + PresentRegionsKHR( uint32_t swapchainCount_ = {}, + const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions_ = {} ) VULKAN_HPP_NOEXCEPT : swapchainCount( swapchainCount_ ) , pRegions( pRegions_ ) {} @@ -59120,13 +66282,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PresentRegionsKHR& operator=( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PresentRegionsKHR & operator=( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PresentRegionsKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PresentRegionsKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -59138,51 +66300,50 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PresentRegionsKHR & setPRegions( const VULKAN_HPP_NAMESPACE::PresentRegionKHR* pRegions_ ) VULKAN_HPP_NOEXCEPT + PresentRegionsKHR & setPRegions( const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions_ ) VULKAN_HPP_NOEXCEPT { pRegions = pRegions_; return *this; } - operator VkPresentRegionsKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkPresentRegionsKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPresentRegionsKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PresentRegionsKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PresentRegionsKHR const & ) const = default; #else - bool operator==( PresentRegionsKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PresentRegionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( swapchainCount == rhs.swapchainCount ) - && ( pRegions == rhs.pRegions ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && + ( pRegions == rhs.pRegions ); } - bool operator!=( PresentRegionsKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PresentRegionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentRegionsKHR; - const void* pNext = {}; - uint32_t swapchainCount = {}; - const VULKAN_HPP_NAMESPACE::PresentRegionKHR* pRegions = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentRegionsKHR; + const void * pNext = {}; + uint32_t swapchainCount = {}; + const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions = {}; }; - static_assert( sizeof( PresentRegionsKHR ) == sizeof( VkPresentRegionsKHR ), "struct and wrapper have different size!" ); + static_assert( sizeof( PresentRegionsKHR ) == sizeof( VkPresentRegionsKHR ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PresentTimeGOOGLE { - VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE( uint32_t presentID_ = {}, + VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE( uint32_t presentID_ = {}, uint64_t desiredPresentTime_ = {} ) VULKAN_HPP_NOEXCEPT : presentID( presentID_ ) , desiredPresentTime( desiredPresentTime_ ) @@ -59193,9 +66354,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PresentTimeGOOGLE& operator=( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + PresentTimeGOOGLE & operator=( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -59211,42 +66372,43 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkPresentTimeGOOGLE const&() const VULKAN_HPP_NOEXCEPT + operator VkPresentTimeGOOGLE const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPresentTimeGOOGLE &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PresentTimeGOOGLE const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PresentTimeGOOGLE const & ) const = default; #else - bool operator==( PresentTimeGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PresentTimeGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( presentID == rhs.presentID ) - && ( desiredPresentTime == rhs.desiredPresentTime ); + return ( presentID == rhs.presentID ) && ( desiredPresentTime == rhs.desiredPresentTime ); } - bool operator!=( PresentTimeGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PresentTimeGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint32_t presentID = {}; + uint32_t presentID = {}; uint64_t desiredPresentTime = {}; }; - static_assert( sizeof( PresentTimeGOOGLE ) == sizeof( VkPresentTimeGOOGLE ), "struct and wrapper have different size!" ); + static_assert( sizeof( PresentTimeGOOGLE ) == sizeof( VkPresentTimeGOOGLE ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PresentTimesInfoGOOGLE { - VULKAN_HPP_CONSTEXPR PresentTimesInfoGOOGLE( uint32_t swapchainCount_ = {}, - const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE* pTimes_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + PresentTimesInfoGOOGLE( uint32_t swapchainCount_ = {}, + const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * pTimes_ = {} ) VULKAN_HPP_NOEXCEPT : swapchainCount( swapchainCount_ ) , pTimes( pTimes_ ) {} @@ -59262,13 +66424,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - PresentTimesInfoGOOGLE& operator=( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + PresentTimesInfoGOOGLE & operator=( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PresentTimesInfoGOOGLE & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PresentTimesInfoGOOGLE & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -59280,46 +66442,45 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PresentTimesInfoGOOGLE & setPTimes( const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE* pTimes_ ) VULKAN_HPP_NOEXCEPT + PresentTimesInfoGOOGLE & setPTimes( const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * pTimes_ ) VULKAN_HPP_NOEXCEPT { pTimes = pTimes_; return *this; } - operator VkPresentTimesInfoGOOGLE const&() const VULKAN_HPP_NOEXCEPT + operator VkPresentTimesInfoGOOGLE const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPresentTimesInfoGOOGLE &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PresentTimesInfoGOOGLE const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PresentTimesInfoGOOGLE const & ) const = default; #else - bool operator==( PresentTimesInfoGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PresentTimesInfoGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( swapchainCount == rhs.swapchainCount ) - && ( pTimes == rhs.pTimes ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && + ( pTimes == rhs.pTimes ); } - bool operator!=( PresentTimesInfoGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PresentTimesInfoGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentTimesInfoGOOGLE; - const void* pNext = {}; - uint32_t swapchainCount = {}; - const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE* pTimes = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentTimesInfoGOOGLE; + const void * pNext = {}; + uint32_t swapchainCount = {}; + const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * pTimes = {}; }; - static_assert( sizeof( PresentTimesInfoGOOGLE ) == sizeof( VkPresentTimesInfoGOOGLE ), "struct and wrapper have different size!" ); + static_assert( sizeof( PresentTimesInfoGOOGLE ) == sizeof( VkPresentTimesInfoGOOGLE ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct ProtectedSubmitInfo @@ -59339,13 +66500,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ProtectedSubmitInfo& operator=( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + ProtectedSubmitInfo & operator=( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ProtectedSubmitInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ProtectedSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -59357,46 +66518,46 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkProtectedSubmitInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkProtectedSubmitInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkProtectedSubmitInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ProtectedSubmitInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ProtectedSubmitInfo const & ) const = default; #else - bool operator==( ProtectedSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ProtectedSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( protectedSubmit == rhs.protectedSubmit ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( protectedSubmit == rhs.protectedSubmit ); } - bool operator!=( ProtectedSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ProtectedSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eProtectedSubmitInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eProtectedSubmitInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit = {}; }; - static_assert( sizeof( ProtectedSubmitInfo ) == sizeof( VkProtectedSubmitInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( ProtectedSubmitInfo ) == sizeof( VkProtectedSubmitInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct QueryPoolCreateInfo { - VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo( VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::QueryType queryType_ = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion, - uint32_t queryCount_ = {}, - VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo( + VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::QueryType queryType_ = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion, + uint32_t queryCount_ = {}, + VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , queryType( queryType_ ) , queryCount( queryCount_ ) @@ -59414,13 +66575,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - QueryPoolCreateInfo& operator=( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + QueryPoolCreateInfo & operator=( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - QueryPoolCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + QueryPoolCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -59444,57 +66605,56 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - QueryPoolCreateInfo & setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT + QueryPoolCreateInfo & + setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT { pipelineStatistics = pipelineStatistics_; return *this; } - operator VkQueryPoolCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkQueryPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkQueryPoolCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( QueryPoolCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueryPoolCreateInfo const & ) const = default; #else - bool operator==( QueryPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( QueryPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( queryType == rhs.queryType ) - && ( queryCount == rhs.queryCount ) - && ( pipelineStatistics == rhs.pipelineStatistics ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( queryType == rhs.queryType ) && ( queryCount == rhs.queryCount ) && + ( pipelineStatistics == rhs.pipelineStatistics ); } - bool operator!=( QueryPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( QueryPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::QueryType queryType = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion; - uint32_t queryCount = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::QueryType queryType = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion; + uint32_t queryCount = {}; VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {}; }; - static_assert( sizeof( QueryPoolCreateInfo ) == sizeof( VkQueryPoolCreateInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( QueryPoolCreateInfo ) == sizeof( VkQueryPoolCreateInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct QueryPoolPerformanceCreateInfoKHR { - VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR( uint32_t queueFamilyIndex_ = {}, - uint32_t counterIndexCount_ = {}, - const uint32_t* pCounterIndices_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR( uint32_t queueFamilyIndex_ = {}, + uint32_t counterIndexCount_ = {}, + const uint32_t * pCounterIndices_ = {} ) VULKAN_HPP_NOEXCEPT : queueFamilyIndex( queueFamilyIndex_ ) , counterIndexCount( counterIndexCount_ ) , pCounterIndices( pCounterIndices_ ) @@ -59502,7 +66662,9 @@ namespace VULKAN_HPP_NAMESPACE QueryPoolPerformanceCreateInfoKHR & operator=( QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( QueryPoolPerformanceCreateInfoKHR ) - offsetof( QueryPoolPerformanceCreateInfoKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( QueryPoolPerformanceCreateInfoKHR ) - offsetof( QueryPoolPerformanceCreateInfoKHR, pNext ) ); return *this; } @@ -59511,13 +66673,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - QueryPoolPerformanceCreateInfoKHR& operator=( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + QueryPoolPerformanceCreateInfoKHR & operator=( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - QueryPoolPerformanceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + QueryPoolPerformanceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -59535,128 +66697,142 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - QueryPoolPerformanceCreateInfoKHR & setPCounterIndices( const uint32_t* pCounterIndices_ ) VULKAN_HPP_NOEXCEPT + QueryPoolPerformanceCreateInfoKHR & setPCounterIndices( const uint32_t * pCounterIndices_ ) VULKAN_HPP_NOEXCEPT { pCounterIndices = pCounterIndices_; return *this; } - operator VkQueryPoolPerformanceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkQueryPoolPerformanceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkQueryPoolPerformanceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( QueryPoolPerformanceCreateInfoKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueryPoolPerformanceCreateInfoKHR const & ) const = default; #else - bool operator==( QueryPoolPerformanceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( QueryPoolPerformanceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( queueFamilyIndex == rhs.queueFamilyIndex ) - && ( counterIndexCount == rhs.counterIndexCount ) - && ( pCounterIndices == rhs.pCounterIndices ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queueFamilyIndex == rhs.queueFamilyIndex ) && + ( counterIndexCount == rhs.counterIndexCount ) && ( pCounterIndices == rhs.pCounterIndices ); } - bool operator!=( QueryPoolPerformanceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( QueryPoolPerformanceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceCreateInfoKHR; - const void* pNext = {}; - uint32_t queueFamilyIndex = {}; - uint32_t counterIndexCount = {}; - const uint32_t* pCounterIndices = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceCreateInfoKHR; + const void * pNext = {}; + uint32_t queueFamilyIndex = {}; + uint32_t counterIndexCount = {}; + const uint32_t * pCounterIndices = {}; }; - static_assert( sizeof( QueryPoolPerformanceCreateInfoKHR ) == sizeof( VkQueryPoolPerformanceCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( QueryPoolPerformanceCreateInfoKHR ) == sizeof( VkQueryPoolPerformanceCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct QueryPoolPerformanceQueryCreateInfoINTEL { - VULKAN_HPP_CONSTEXPR QueryPoolPerformanceQueryCreateInfoINTEL( VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ = VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR QueryPoolPerformanceQueryCreateInfoINTEL( + VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ = + VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual ) VULKAN_HPP_NOEXCEPT : performanceCountersSampling( performanceCountersSampling_ ) {} - QueryPoolPerformanceQueryCreateInfoINTEL & operator=( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + QueryPoolPerformanceQueryCreateInfoINTEL & + operator=( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( QueryPoolPerformanceQueryCreateInfoINTEL ) - offsetof( QueryPoolPerformanceQueryCreateInfoINTEL, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( QueryPoolPerformanceQueryCreateInfoINTEL ) - + offsetof( QueryPoolPerformanceQueryCreateInfoINTEL, pNext ) ); return *this; } - QueryPoolPerformanceQueryCreateInfoINTEL( VkQueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + QueryPoolPerformanceQueryCreateInfoINTEL( VkQueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - QueryPoolPerformanceQueryCreateInfoINTEL& operator=( VkQueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + QueryPoolPerformanceQueryCreateInfoINTEL & + operator=( VkQueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - QueryPoolPerformanceQueryCreateInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + QueryPoolPerformanceQueryCreateInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - QueryPoolPerformanceQueryCreateInfoINTEL & setPerformanceCountersSampling( VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ ) VULKAN_HPP_NOEXCEPT + QueryPoolPerformanceQueryCreateInfoINTEL & setPerformanceCountersSampling( + VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ ) VULKAN_HPP_NOEXCEPT { performanceCountersSampling = performanceCountersSampling_; return *this; } - operator VkQueryPoolPerformanceQueryCreateInfoINTEL const&() const VULKAN_HPP_NOEXCEPT + operator VkQueryPoolPerformanceQueryCreateInfoINTEL const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkQueryPoolPerformanceQueryCreateInfoINTEL &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( QueryPoolPerformanceQueryCreateInfoINTEL const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueryPoolPerformanceQueryCreateInfoINTEL const & ) const = default; #else - bool operator==( QueryPoolPerformanceQueryCreateInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( performanceCountersSampling == rhs.performanceCountersSampling ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( performanceCountersSampling == rhs.performanceCountersSampling ); } - bool operator!=( QueryPoolPerformanceQueryCreateInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling = VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling = + VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual; }; - static_assert( sizeof( QueryPoolPerformanceQueryCreateInfoINTEL ) == sizeof( VkQueryPoolPerformanceQueryCreateInfoINTEL ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( QueryPoolPerformanceQueryCreateInfoINTEL ) == + sizeof( VkQueryPoolPerformanceQueryCreateInfoINTEL ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct QueueFamilyCheckpointPropertiesNV { - VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointPropertiesNV( VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointPropertiesNV( + VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask_ = {} ) VULKAN_HPP_NOEXCEPT : checkpointExecutionStageMask( checkpointExecutionStageMask_ ) {} QueueFamilyCheckpointPropertiesNV & operator=( QueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( QueueFamilyCheckpointPropertiesNV ) - offsetof( QueueFamilyCheckpointPropertiesNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( QueueFamilyCheckpointPropertiesNV ) - offsetof( QueueFamilyCheckpointPropertiesNV, pNext ) ); return *this; } @@ -59665,33 +66841,32 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - QueueFamilyCheckpointPropertiesNV& operator=( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + QueueFamilyCheckpointPropertiesNV & operator=( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkQueueFamilyCheckpointPropertiesNV const&() const VULKAN_HPP_NOEXCEPT + operator VkQueueFamilyCheckpointPropertiesNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkQueueFamilyCheckpointPropertiesNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( QueueFamilyCheckpointPropertiesNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueueFamilyCheckpointPropertiesNV const & ) const = default; #else - bool operator==( QueueFamilyCheckpointPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( QueueFamilyCheckpointPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( checkpointExecutionStageMask == rhs.checkpointExecutionStageMask ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( checkpointExecutionStageMask == rhs.checkpointExecutionStageMask ); } - bool operator!=( QueueFamilyCheckpointPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( QueueFamilyCheckpointPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -59699,18 +66874,21 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyCheckpointPropertiesNV; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask = {}; }; - static_assert( sizeof( QueueFamilyCheckpointPropertiesNV ) == sizeof( VkQueueFamilyCheckpointPropertiesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( QueueFamilyCheckpointPropertiesNV ) == sizeof( VkQueueFamilyCheckpointPropertiesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct QueueFamilyProperties { - VULKAN_HPP_CONSTEXPR QueueFamilyProperties( VULKAN_HPP_NAMESPACE::QueueFlags queueFlags_ = {}, - uint32_t queueCount_ = {}, - uint32_t timestampValidBits_ = {}, - VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + QueueFamilyProperties( VULKAN_HPP_NAMESPACE::QueueFlags queueFlags_ = {}, + uint32_t queueCount_ = {}, + uint32_t timestampValidBits_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity_ = {} ) VULKAN_HPP_NOEXCEPT : queueFlags( queueFlags_ ) , queueCount( queueCount_ ) , timestampValidBits( timestampValidBits_ ) @@ -59722,51 +66900,52 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - QueueFamilyProperties& operator=( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT + QueueFamilyProperties & operator=( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkQueueFamilyProperties const&() const VULKAN_HPP_NOEXCEPT + operator VkQueueFamilyProperties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkQueueFamilyProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( QueueFamilyProperties const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueueFamilyProperties const & ) const = default; #else - bool operator==( QueueFamilyProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( QueueFamilyProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( queueFlags == rhs.queueFlags ) - && ( queueCount == rhs.queueCount ) - && ( timestampValidBits == rhs.timestampValidBits ) - && ( minImageTransferGranularity == rhs.minImageTransferGranularity ); + return ( queueFlags == rhs.queueFlags ) && ( queueCount == rhs.queueCount ) && + ( timestampValidBits == rhs.timestampValidBits ) && + ( minImageTransferGranularity == rhs.minImageTransferGranularity ); } - bool operator!=( QueueFamilyProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( QueueFamilyProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::QueueFlags queueFlags = {}; - uint32_t queueCount = {}; - uint32_t timestampValidBits = {}; - VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity = {}; + VULKAN_HPP_NAMESPACE::QueueFlags queueFlags = {}; + uint32_t queueCount = {}; + uint32_t timestampValidBits = {}; + VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity = {}; }; - static_assert( sizeof( QueueFamilyProperties ) == sizeof( VkQueueFamilyProperties ), "struct and wrapper have different size!" ); + static_assert( sizeof( QueueFamilyProperties ) == sizeof( VkQueueFamilyProperties ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct QueueFamilyProperties2 { - VULKAN_HPP_CONSTEXPR QueueFamilyProperties2( VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR QueueFamilyProperties2( + VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties_ = {} ) VULKAN_HPP_NOEXCEPT : queueFamilyProperties( queueFamilyProperties_ ) {} @@ -59781,55 +66960,56 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - QueueFamilyProperties2& operator=( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + QueueFamilyProperties2 & operator=( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkQueueFamilyProperties2 const&() const VULKAN_HPP_NOEXCEPT + operator VkQueueFamilyProperties2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkQueueFamilyProperties2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( QueueFamilyProperties2 const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueueFamilyProperties2 const & ) const = default; #else - bool operator==( QueueFamilyProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( QueueFamilyProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( queueFamilyProperties == rhs.queueFamilyProperties ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queueFamilyProperties == rhs.queueFamilyProperties ); } - bool operator!=( QueueFamilyProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( QueueFamilyProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyProperties2; - void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyProperties2; + void * pNext = {}; VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties = {}; }; - static_assert( sizeof( QueueFamilyProperties2 ) == sizeof( VkQueueFamilyProperties2 ), "struct and wrapper have different size!" ); + static_assert( sizeof( QueueFamilyProperties2 ) == sizeof( VkQueueFamilyProperties2 ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); #ifdef VK_ENABLE_BETA_EXTENSIONS struct RayTracingShaderGroupCreateInfoKHR { - VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoKHR( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral, - uint32_t generalShader_ = {}, - uint32_t closestHitShader_ = {}, - uint32_t anyHitShader_ = {}, - uint32_t intersectionShader_ = {}, - const void* pShaderGroupCaptureReplayHandle_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + RayTracingShaderGroupCreateInfoKHR( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ = + VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral, + uint32_t generalShader_ = {}, + uint32_t closestHitShader_ = {}, + uint32_t anyHitShader_ = {}, + uint32_t intersectionShader_ = {}, + const void * pShaderGroupCaptureReplayHandle_ = {} ) VULKAN_HPP_NOEXCEPT : type( type_ ) , generalShader( generalShader_ ) , closestHitShader( closestHitShader_ ) @@ -59840,7 +67020,9 @@ namespace VULKAN_HPP_NAMESPACE RayTracingShaderGroupCreateInfoKHR & operator=( RayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( RayTracingShaderGroupCreateInfoKHR ) - offsetof( RayTracingShaderGroupCreateInfoKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( RayTracingShaderGroupCreateInfoKHR ) - offsetof( RayTracingShaderGroupCreateInfoKHR, pNext ) ); return *this; } @@ -59849,19 +67031,21 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - RayTracingShaderGroupCreateInfoKHR& operator=( VkRayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + RayTracingShaderGroupCreateInfoKHR & + operator=( VkRayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - RayTracingShaderGroupCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + RayTracingShaderGroupCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - RayTracingShaderGroupCreateInfoKHR & setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ ) VULKAN_HPP_NOEXCEPT + RayTracingShaderGroupCreateInfoKHR & + setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ ) VULKAN_HPP_NOEXCEPT { type = type_; return *this; @@ -59891,86 +67075,92 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - RayTracingShaderGroupCreateInfoKHR & setPShaderGroupCaptureReplayHandle( const void* pShaderGroupCaptureReplayHandle_ ) VULKAN_HPP_NOEXCEPT + RayTracingShaderGroupCreateInfoKHR & + setPShaderGroupCaptureReplayHandle( const void * pShaderGroupCaptureReplayHandle_ ) VULKAN_HPP_NOEXCEPT { pShaderGroupCaptureReplayHandle = pShaderGroupCaptureReplayHandle_; return *this; } - operator VkRayTracingShaderGroupCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkRayTracingShaderGroupCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkRayTracingShaderGroupCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( RayTracingShaderGroupCreateInfoKHR const& ) const = default; -#else - bool operator==( RayTracingShaderGroupCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RayTracingShaderGroupCreateInfoKHR const & ) const = default; +# else + bool operator==( RayTracingShaderGroupCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( type == rhs.type ) - && ( generalShader == rhs.generalShader ) - && ( closestHitShader == rhs.closestHitShader ) - && ( anyHitShader == rhs.anyHitShader ) - && ( intersectionShader == rhs.intersectionShader ) - && ( pShaderGroupCaptureReplayHandle == rhs.pShaderGroupCaptureReplayHandle ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && + ( generalShader == rhs.generalShader ) && ( closestHitShader == rhs.closestHitShader ) && + ( anyHitShader == rhs.anyHitShader ) && ( intersectionShader == rhs.intersectionShader ) && + ( pShaderGroupCaptureReplayHandle == rhs.pShaderGroupCaptureReplayHandle ); } - bool operator!=( RayTracingShaderGroupCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RayTracingShaderGroupCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral; - uint32_t generalShader = {}; - uint32_t closestHitShader = {}; - uint32_t anyHitShader = {}; - uint32_t intersectionShader = {}; - const void* pShaderGroupCaptureReplayHandle = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type = + VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral; + uint32_t generalShader = {}; + uint32_t closestHitShader = {}; + uint32_t anyHitShader = {}; + uint32_t intersectionShader = {}; + const void * pShaderGroupCaptureReplayHandle = {}; }; - static_assert( sizeof( RayTracingShaderGroupCreateInfoKHR ) == sizeof( VkRayTracingShaderGroupCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( RayTracingShaderGroupCreateInfoKHR ) == sizeof( VkRayTracingShaderGroupCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_ENABLE_BETA_EXTENSIONS struct RayTracingPipelineInterfaceCreateInfoKHR { - VULKAN_HPP_CONSTEXPR RayTracingPipelineInterfaceCreateInfoKHR( uint32_t maxPayloadSize_ = {}, + VULKAN_HPP_CONSTEXPR RayTracingPipelineInterfaceCreateInfoKHR( uint32_t maxPayloadSize_ = {}, uint32_t maxAttributeSize_ = {}, - uint32_t maxCallableSize_ = {} ) VULKAN_HPP_NOEXCEPT + uint32_t maxCallableSize_ = {} ) VULKAN_HPP_NOEXCEPT : maxPayloadSize( maxPayloadSize_ ) , maxAttributeSize( maxAttributeSize_ ) , maxCallableSize( maxCallableSize_ ) {} - RayTracingPipelineInterfaceCreateInfoKHR & operator=( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + RayTracingPipelineInterfaceCreateInfoKHR & + operator=( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( RayTracingPipelineInterfaceCreateInfoKHR ) - offsetof( RayTracingPipelineInterfaceCreateInfoKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( RayTracingPipelineInterfaceCreateInfoKHR ) - + offsetof( RayTracingPipelineInterfaceCreateInfoKHR, pNext ) ); return *this; } - RayTracingPipelineInterfaceCreateInfoKHR( VkRayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + RayTracingPipelineInterfaceCreateInfoKHR( VkRayTracingPipelineInterfaceCreateInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - RayTracingPipelineInterfaceCreateInfoKHR& operator=( VkRayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + RayTracingPipelineInterfaceCreateInfoKHR & + operator=( VkRayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - RayTracingPipelineInterfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + RayTracingPipelineInterfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -59994,59 +67184,60 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkRayTracingPipelineInterfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkRayTracingPipelineInterfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkRayTracingPipelineInterfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( RayTracingPipelineInterfaceCreateInfoKHR const& ) const = default; -#else - bool operator==( RayTracingPipelineInterfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RayTracingPipelineInterfaceCreateInfoKHR const & ) const = default; +# else + bool operator==( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxPayloadSize == rhs.maxPayloadSize ) - && ( maxAttributeSize == rhs.maxAttributeSize ) - && ( maxCallableSize == rhs.maxCallableSize ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxPayloadSize == rhs.maxPayloadSize ) && + ( maxAttributeSize == rhs.maxAttributeSize ) && ( maxCallableSize == rhs.maxCallableSize ); } - bool operator!=( RayTracingPipelineInterfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineInterfaceCreateInfoKHR; - const void* pNext = {}; - uint32_t maxPayloadSize = {}; - uint32_t maxAttributeSize = {}; - uint32_t maxCallableSize = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineInterfaceCreateInfoKHR; + const void * pNext = {}; + uint32_t maxPayloadSize = {}; + uint32_t maxAttributeSize = {}; + uint32_t maxCallableSize = {}; }; - static_assert( sizeof( RayTracingPipelineInterfaceCreateInfoKHR ) == sizeof( VkRayTracingPipelineInterfaceCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( RayTracingPipelineInterfaceCreateInfoKHR ) == + sizeof( VkRayTracingPipelineInterfaceCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_ENABLE_BETA_EXTENSIONS struct RayTracingPipelineCreateInfoKHR { - VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoKHR( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, - uint32_t stageCount_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ = {}, - uint32_t groupCount_ = {}, - const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR* pGroups_ = {}, - uint32_t maxRecursionDepth_ = {}, - VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR libraries_ = {}, - const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR* pLibraryInterface_ = {}, - VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, - VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, - int32_t basePipelineIndex_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoKHR( + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, + uint32_t stageCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, + uint32_t groupCount_ = {}, + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR * pGroups_ = {}, + uint32_t maxRecursionDepth_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR libraries_ = {}, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , stageCount( stageCount_ ) , pStages( pStages_ ) @@ -60062,7 +67253,9 @@ namespace VULKAN_HPP_NAMESPACE RayTracingPipelineCreateInfoKHR & operator=( RayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( RayTracingPipelineCreateInfoKHR ) - offsetof( RayTracingPipelineCreateInfoKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( RayTracingPipelineCreateInfoKHR ) - offsetof( RayTracingPipelineCreateInfoKHR, pNext ) ); return *this; } @@ -60071,13 +67264,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - RayTracingPipelineCreateInfoKHR& operator=( VkRayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + RayTracingPipelineCreateInfoKHR & operator=( VkRayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - RayTracingPipelineCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + RayTracingPipelineCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -60095,7 +67288,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - RayTracingPipelineCreateInfoKHR & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ ) VULKAN_HPP_NOEXCEPT + RayTracingPipelineCreateInfoKHR & + setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT { pStages = pStages_; return *this; @@ -60107,7 +67301,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - RayTracingPipelineCreateInfoKHR & setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR* pGroups_ ) VULKAN_HPP_NOEXCEPT + RayTracingPipelineCreateInfoKHR & + setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR * pGroups_ ) VULKAN_HPP_NOEXCEPT { pGroups = pGroups_; return *this; @@ -60119,13 +67314,15 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - RayTracingPipelineCreateInfoKHR & setLibraries( VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR const & libraries_ ) VULKAN_HPP_NOEXCEPT + RayTracingPipelineCreateInfoKHR & + setLibraries( VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR const & libraries_ ) VULKAN_HPP_NOEXCEPT { libraries = libraries_; return *this; } - RayTracingPipelineCreateInfoKHR & setPLibraryInterface( const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR* pLibraryInterface_ ) VULKAN_HPP_NOEXCEPT + RayTracingPipelineCreateInfoKHR & setPLibraryInterface( + const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface_ ) VULKAN_HPP_NOEXCEPT { pLibraryInterface = pLibraryInterface_; return *this; @@ -60137,7 +67334,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - RayTracingPipelineCreateInfoKHR & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT + RayTracingPipelineCreateInfoKHR & + setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT { basePipelineHandle = basePipelineHandle_; return *this; @@ -60149,68 +67347,65 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkRayTracingPipelineCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkRayTracingPipelineCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkRayTracingPipelineCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( RayTracingPipelineCreateInfoKHR const& ) const = default; -#else - bool operator==( RayTracingPipelineCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RayTracingPipelineCreateInfoKHR const & ) const = default; +# else + bool operator==( RayTracingPipelineCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( stageCount == rhs.stageCount ) - && ( pStages == rhs.pStages ) - && ( groupCount == rhs.groupCount ) - && ( pGroups == rhs.pGroups ) - && ( maxRecursionDepth == rhs.maxRecursionDepth ) - && ( libraries == rhs.libraries ) - && ( pLibraryInterface == rhs.pLibraryInterface ) - && ( layout == rhs.layout ) - && ( basePipelineHandle == rhs.basePipelineHandle ) - && ( basePipelineIndex == rhs.basePipelineIndex ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) && ( groupCount == rhs.groupCount ) && + ( pGroups == rhs.pGroups ) && ( maxRecursionDepth == rhs.maxRecursionDepth ) && + ( libraries == rhs.libraries ) && ( pLibraryInterface == rhs.pLibraryInterface ) && + ( layout == rhs.layout ) && ( basePipelineHandle == rhs.basePipelineHandle ) && + ( basePipelineIndex == rhs.basePipelineIndex ); } - bool operator!=( RayTracingPipelineCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RayTracingPipelineCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; - uint32_t stageCount = {}; - const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages = {}; - uint32_t groupCount = {}; - const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR* pGroups = {}; - uint32_t maxRecursionDepth = {}; - VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR libraries = {}; - const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR* pLibraryInterface = {}; - VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; - VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; - int32_t basePipelineIndex = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; + uint32_t stageCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {}; + uint32_t groupCount = {}; + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR * pGroups = {}; + uint32_t maxRecursionDepth = {}; + VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR libraries = {}; + const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; + int32_t basePipelineIndex = {}; }; - static_assert( sizeof( RayTracingPipelineCreateInfoKHR ) == sizeof( VkRayTracingPipelineCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( RayTracingPipelineCreateInfoKHR ) == sizeof( VkRayTracingPipelineCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct RayTracingShaderGroupCreateInfoNV { - VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoNV( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral, - uint32_t generalShader_ = {}, - uint32_t closestHitShader_ = {}, - uint32_t anyHitShader_ = {}, - uint32_t intersectionShader_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + RayTracingShaderGroupCreateInfoNV( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ = + VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral, + uint32_t generalShader_ = {}, + uint32_t closestHitShader_ = {}, + uint32_t anyHitShader_ = {}, + uint32_t intersectionShader_ = {} ) VULKAN_HPP_NOEXCEPT : type( type_ ) , generalShader( generalShader_ ) , closestHitShader( closestHitShader_ ) @@ -60220,7 +67415,9 @@ namespace VULKAN_HPP_NAMESPACE RayTracingShaderGroupCreateInfoNV & operator=( RayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( RayTracingShaderGroupCreateInfoNV ) - offsetof( RayTracingShaderGroupCreateInfoNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( RayTracingShaderGroupCreateInfoNV ) - offsetof( RayTracingShaderGroupCreateInfoNV, pNext ) ); return *this; } @@ -60229,19 +67426,20 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - RayTracingShaderGroupCreateInfoNV& operator=( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + RayTracingShaderGroupCreateInfoNV & operator=( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - RayTracingShaderGroupCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + RayTracingShaderGroupCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - RayTracingShaderGroupCreateInfoNV & setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ ) VULKAN_HPP_NOEXCEPT + RayTracingShaderGroupCreateInfoNV & + setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ ) VULKAN_HPP_NOEXCEPT { type = type_; return *this; @@ -60271,59 +67469,59 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkRayTracingShaderGroupCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + operator VkRayTracingShaderGroupCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkRayTracingShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( RayTracingShaderGroupCreateInfoNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RayTracingShaderGroupCreateInfoNV const & ) const = default; #else - bool operator==( RayTracingShaderGroupCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RayTracingShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( type == rhs.type ) - && ( generalShader == rhs.generalShader ) - && ( closestHitShader == rhs.closestHitShader ) - && ( anyHitShader == rhs.anyHitShader ) - && ( intersectionShader == rhs.intersectionShader ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && + ( generalShader == rhs.generalShader ) && ( closestHitShader == rhs.closestHitShader ) && + ( anyHitShader == rhs.anyHitShader ) && ( intersectionShader == rhs.intersectionShader ); } - bool operator!=( RayTracingShaderGroupCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RayTracingShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral; - uint32_t generalShader = {}; - uint32_t closestHitShader = {}; - uint32_t anyHitShader = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type = + VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral; + uint32_t generalShader = {}; + uint32_t closestHitShader = {}; + uint32_t anyHitShader = {}; uint32_t intersectionShader = {}; }; - static_assert( sizeof( RayTracingShaderGroupCreateInfoNV ) == sizeof( VkRayTracingShaderGroupCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( RayTracingShaderGroupCreateInfoNV ) == sizeof( VkRayTracingShaderGroupCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct RayTracingPipelineCreateInfoNV { - VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, - uint32_t stageCount_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ = {}, - uint32_t groupCount_ = {}, - const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV* pGroups_ = {}, - uint32_t maxRecursionDepth_ = {}, - VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, - VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, - int32_t basePipelineIndex_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + RayTracingPipelineCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, + uint32_t stageCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, + uint32_t groupCount_ = {}, + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * pGroups_ = {}, + uint32_t maxRecursionDepth_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , stageCount( stageCount_ ) , pStages( pStages_ ) @@ -60337,7 +67535,9 @@ namespace VULKAN_HPP_NAMESPACE RayTracingPipelineCreateInfoNV & operator=( RayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( RayTracingPipelineCreateInfoNV ) - offsetof( RayTracingPipelineCreateInfoNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( RayTracingPipelineCreateInfoNV ) - offsetof( RayTracingPipelineCreateInfoNV, pNext ) ); return *this; } @@ -60346,13 +67546,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - RayTracingPipelineCreateInfoNV& operator=( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + RayTracingPipelineCreateInfoNV & operator=( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - RayTracingPipelineCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + RayTracingPipelineCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -60370,7 +67570,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - RayTracingPipelineCreateInfoNV & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ ) VULKAN_HPP_NOEXCEPT + RayTracingPipelineCreateInfoNV & + setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT { pStages = pStages_; return *this; @@ -60382,7 +67583,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - RayTracingPipelineCreateInfoNV & setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV* pGroups_ ) VULKAN_HPP_NOEXCEPT + RayTracingPipelineCreateInfoNV & + setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * pGroups_ ) VULKAN_HPP_NOEXCEPT { pGroups = pGroups_; return *this; @@ -60400,7 +67602,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - RayTracingPipelineCreateInfoNV & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT + RayTracingPipelineCreateInfoNV & + setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT { basePipelineHandle = basePipelineHandle_; return *this; @@ -60412,55 +67615,50 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkRayTracingPipelineCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + operator VkRayTracingPipelineCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkRayTracingPipelineCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( RayTracingPipelineCreateInfoNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RayTracingPipelineCreateInfoNV const & ) const = default; #else - bool operator==( RayTracingPipelineCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RayTracingPipelineCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( stageCount == rhs.stageCount ) - && ( pStages == rhs.pStages ) - && ( groupCount == rhs.groupCount ) - && ( pGroups == rhs.pGroups ) - && ( maxRecursionDepth == rhs.maxRecursionDepth ) - && ( layout == rhs.layout ) - && ( basePipelineHandle == rhs.basePipelineHandle ) - && ( basePipelineIndex == rhs.basePipelineIndex ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) && ( groupCount == rhs.groupCount ) && + ( pGroups == rhs.pGroups ) && ( maxRecursionDepth == rhs.maxRecursionDepth ) && ( layout == rhs.layout ) && + ( basePipelineHandle == rhs.basePipelineHandle ) && ( basePipelineIndex == rhs.basePipelineIndex ); } - bool operator!=( RayTracingPipelineCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RayTracingPipelineCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; - uint32_t stageCount = {}; - const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages = {}; - uint32_t groupCount = {}; - const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV* pGroups = {}; - uint32_t maxRecursionDepth = {}; - VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; - VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; - int32_t basePipelineIndex = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; + uint32_t stageCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {}; + uint32_t groupCount = {}; + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * pGroups = {}; + uint32_t maxRecursionDepth = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; + int32_t basePipelineIndex = {}; }; - static_assert( sizeof( RayTracingPipelineCreateInfoNV ) == sizeof( VkRayTracingPipelineCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( RayTracingPipelineCreateInfoNV ) == sizeof( VkRayTracingPipelineCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct RefreshCycleDurationGOOGLE { @@ -60473,31 +67671,31 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - RefreshCycleDurationGOOGLE& operator=( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + RefreshCycleDurationGOOGLE & operator=( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkRefreshCycleDurationGOOGLE const&() const VULKAN_HPP_NOEXCEPT + operator VkRefreshCycleDurationGOOGLE const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkRefreshCycleDurationGOOGLE &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( RefreshCycleDurationGOOGLE const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RefreshCycleDurationGOOGLE const & ) const = default; #else - bool operator==( RefreshCycleDurationGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RefreshCycleDurationGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT { return ( refreshDuration == rhs.refreshDuration ); } - bool operator!=( RefreshCycleDurationGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RefreshCycleDurationGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -60506,20 +67704,25 @@ namespace VULKAN_HPP_NAMESPACE public: uint64_t refreshDuration = {}; }; - static_assert( sizeof( RefreshCycleDurationGOOGLE ) == sizeof( VkRefreshCycleDurationGOOGLE ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( RefreshCycleDurationGOOGLE ) == sizeof( VkRefreshCycleDurationGOOGLE ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct RenderPassAttachmentBeginInfo { - VULKAN_HPP_CONSTEXPR RenderPassAttachmentBeginInfo( uint32_t attachmentCount_ = {}, - const VULKAN_HPP_NAMESPACE::ImageView* pAttachments_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + RenderPassAttachmentBeginInfo( uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ = {} ) VULKAN_HPP_NOEXCEPT : attachmentCount( attachmentCount_ ) , pAttachments( pAttachments_ ) {} RenderPassAttachmentBeginInfo & operator=( RenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( RenderPassAttachmentBeginInfo ) - offsetof( RenderPassAttachmentBeginInfo, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( RenderPassAttachmentBeginInfo ) - offsetof( RenderPassAttachmentBeginInfo, pNext ) ); return *this; } @@ -60528,13 +67731,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - RenderPassAttachmentBeginInfo& operator=( VkRenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + RenderPassAttachmentBeginInfo & operator=( VkRenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - RenderPassAttachmentBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + RenderPassAttachmentBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -60546,55 +67749,57 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - RenderPassAttachmentBeginInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView* pAttachments_ ) VULKAN_HPP_NOEXCEPT + RenderPassAttachmentBeginInfo & + setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ ) VULKAN_HPP_NOEXCEPT { pAttachments = pAttachments_; return *this; } - operator VkRenderPassAttachmentBeginInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkRenderPassAttachmentBeginInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkRenderPassAttachmentBeginInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( RenderPassAttachmentBeginInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassAttachmentBeginInfo const & ) const = default; #else - bool operator==( RenderPassAttachmentBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RenderPassAttachmentBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( attachmentCount == rhs.attachmentCount ) - && ( pAttachments == rhs.pAttachments ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentCount == rhs.attachmentCount ) && + ( pAttachments == rhs.pAttachments ); } - bool operator!=( RenderPassAttachmentBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RenderPassAttachmentBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassAttachmentBeginInfo; - const void* pNext = {}; - uint32_t attachmentCount = {}; - const VULKAN_HPP_NAMESPACE::ImageView* pAttachments = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassAttachmentBeginInfo; + const void * pNext = {}; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::ImageView * pAttachments = {}; }; - static_assert( sizeof( RenderPassAttachmentBeginInfo ) == sizeof( VkRenderPassAttachmentBeginInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( RenderPassAttachmentBeginInfo ) == sizeof( VkRenderPassAttachmentBeginInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct RenderPassBeginInfo { - VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, - VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {}, - VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}, - uint32_t clearValueCount_ = {}, - const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 + RenderPassBeginInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {}, + VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}, + uint32_t clearValueCount_ = {}, + const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues_ = {} ) VULKAN_HPP_NOEXCEPT : renderPass( renderPass_ ) , framebuffer( framebuffer_ ) , renderArea( renderArea_ ) @@ -60613,13 +67818,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - RenderPassBeginInfo& operator=( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + RenderPassBeginInfo & operator=( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - RenderPassBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + RenderPassBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -60649,66 +67854,64 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - RenderPassBeginInfo & setPClearValues( const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues_ ) VULKAN_HPP_NOEXCEPT + RenderPassBeginInfo & setPClearValues( const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues_ ) VULKAN_HPP_NOEXCEPT { pClearValues = pClearValues_; return *this; } - operator VkRenderPassBeginInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkRenderPassBeginInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( RenderPassBeginInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassBeginInfo const & ) const = default; #else - bool operator==( RenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( renderPass == rhs.renderPass ) - && ( framebuffer == rhs.framebuffer ) - && ( renderArea == rhs.renderArea ) - && ( clearValueCount == rhs.clearValueCount ) - && ( pClearValues == rhs.pClearValues ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( renderPass == rhs.renderPass ) && + ( framebuffer == rhs.framebuffer ) && ( renderArea == rhs.renderArea ) && + ( clearValueCount == rhs.clearValueCount ) && ( pClearValues == rhs.pClearValues ); } - bool operator!=( RenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassBeginInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; - VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {}; - VULKAN_HPP_NAMESPACE::Rect2D renderArea = {}; - uint32_t clearValueCount = {}; - const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassBeginInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {}; + VULKAN_HPP_NAMESPACE::Rect2D renderArea = {}; + uint32_t clearValueCount = {}; + const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues = {}; }; - static_assert( sizeof( RenderPassBeginInfo ) == sizeof( VkRenderPassBeginInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( RenderPassBeginInfo ) == sizeof( VkRenderPassBeginInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct SubpassDescription { - VULKAN_HPP_CONSTEXPR SubpassDescription( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, - uint32_t inputAttachmentCount_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentReference* pInputAttachments_ = {}, - uint32_t colorAttachmentCount_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentReference* pColorAttachments_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentReference* pResolveAttachments_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentReference* pDepthStencilAttachment_ = {}, - uint32_t preserveAttachmentCount_ = {}, - const uint32_t* pPreserveAttachments_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR SubpassDescription( + VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + uint32_t inputAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference * pInputAttachments_ = {}, + uint32_t colorAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference * pColorAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference * pResolveAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment_ = {}, + uint32_t preserveAttachmentCount_ = {}, + const uint32_t * pPreserveAttachments_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , pipelineBindPoint( pipelineBindPoint_ ) , inputAttachmentCount( inputAttachmentCount_ ) @@ -60726,9 +67929,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SubpassDescription& operator=( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT + SubpassDescription & operator=( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -60738,7 +67941,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - SubpassDescription & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + SubpassDescription & + setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT { pipelineBindPoint = pipelineBindPoint_; return *this; @@ -60750,7 +67954,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - SubpassDescription & setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference* pInputAttachments_ ) VULKAN_HPP_NOEXCEPT + SubpassDescription & + setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference * pInputAttachments_ ) VULKAN_HPP_NOEXCEPT { pInputAttachments = pInputAttachments_; return *this; @@ -60762,19 +67967,22 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - SubpassDescription & setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference* pColorAttachments_ ) VULKAN_HPP_NOEXCEPT + SubpassDescription & + setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference * pColorAttachments_ ) VULKAN_HPP_NOEXCEPT { pColorAttachments = pColorAttachments_; return *this; } - SubpassDescription & setPResolveAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference* pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT + SubpassDescription & setPResolveAttachments( + const VULKAN_HPP_NAMESPACE::AttachmentReference * pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT { pResolveAttachments = pResolveAttachments_; return *this; } - SubpassDescription & setPDepthStencilAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference* pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT + SubpassDescription & setPDepthStencilAttachment( + const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT { pDepthStencilAttachment = pDepthStencilAttachment_; return *this; @@ -60786,69 +67994,68 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - SubpassDescription & setPPreserveAttachments( const uint32_t* pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT + SubpassDescription & setPPreserveAttachments( const uint32_t * pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT { pPreserveAttachments = pPreserveAttachments_; return *this; } - operator VkSubpassDescription const&() const VULKAN_HPP_NOEXCEPT + operator VkSubpassDescription const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSubpassDescription &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SubpassDescription const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassDescription const & ) const = default; #else - bool operator==( SubpassDescription const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SubpassDescription const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( flags == rhs.flags ) - && ( pipelineBindPoint == rhs.pipelineBindPoint ) - && ( inputAttachmentCount == rhs.inputAttachmentCount ) - && ( pInputAttachments == rhs.pInputAttachments ) - && ( colorAttachmentCount == rhs.colorAttachmentCount ) - && ( pColorAttachments == rhs.pColorAttachments ) - && ( pResolveAttachments == rhs.pResolveAttachments ) - && ( pDepthStencilAttachment == rhs.pDepthStencilAttachment ) - && ( preserveAttachmentCount == rhs.preserveAttachmentCount ) - && ( pPreserveAttachments == rhs.pPreserveAttachments ); + return ( flags == rhs.flags ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && + ( inputAttachmentCount == rhs.inputAttachmentCount ) && ( pInputAttachments == rhs.pInputAttachments ) && + ( colorAttachmentCount == rhs.colorAttachmentCount ) && ( pColorAttachments == rhs.pColorAttachments ) && + ( pResolveAttachments == rhs.pResolveAttachments ) && + ( pDepthStencilAttachment == rhs.pDepthStencilAttachment ) && + ( preserveAttachmentCount == rhs.preserveAttachmentCount ) && + ( pPreserveAttachments == rhs.pPreserveAttachments ); } - bool operator!=( SubpassDescription const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SubpassDescription const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {}; - VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; - uint32_t inputAttachmentCount = {}; - const VULKAN_HPP_NAMESPACE::AttachmentReference* pInputAttachments = {}; - uint32_t colorAttachmentCount = {}; - const VULKAN_HPP_NAMESPACE::AttachmentReference* pColorAttachments = {}; - const VULKAN_HPP_NAMESPACE::AttachmentReference* pResolveAttachments = {}; - const VULKAN_HPP_NAMESPACE::AttachmentReference* pDepthStencilAttachment = {}; - uint32_t preserveAttachmentCount = {}; - const uint32_t* pPreserveAttachments = {}; + VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + uint32_t inputAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference * pInputAttachments = {}; + uint32_t colorAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference * pColorAttachments = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference * pResolveAttachments = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment = {}; + uint32_t preserveAttachmentCount = {}; + const uint32_t * pPreserveAttachments = {}; }; - static_assert( sizeof( SubpassDescription ) == sizeof( VkSubpassDescription ), "struct and wrapper have different size!" ); + static_assert( sizeof( SubpassDescription ) == sizeof( VkSubpassDescription ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct SubpassDependency { - VULKAN_HPP_CONSTEXPR SubpassDependency( uint32_t srcSubpass_ = {}, - uint32_t dstSubpass_ = {}, - VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {}, - VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {}, - VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, - VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, - VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + SubpassDependency( uint32_t srcSubpass_ = {}, + uint32_t dstSubpass_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {} ) VULKAN_HPP_NOEXCEPT : srcSubpass( srcSubpass_ ) , dstSubpass( dstSubpass_ ) , srcStageMask( srcStageMask_ ) @@ -60863,9 +68070,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SubpassDependency& operator=( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT + SubpassDependency & operator=( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -60911,57 +68118,56 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkSubpassDependency const&() const VULKAN_HPP_NOEXCEPT + operator VkSubpassDependency const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSubpassDependency &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SubpassDependency const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassDependency const & ) const = default; #else - bool operator==( SubpassDependency const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SubpassDependency const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( srcSubpass == rhs.srcSubpass ) - && ( dstSubpass == rhs.dstSubpass ) - && ( srcStageMask == rhs.srcStageMask ) - && ( dstStageMask == rhs.dstStageMask ) - && ( srcAccessMask == rhs.srcAccessMask ) - && ( dstAccessMask == rhs.dstAccessMask ) - && ( dependencyFlags == rhs.dependencyFlags ); + return ( srcSubpass == rhs.srcSubpass ) && ( dstSubpass == rhs.dstSubpass ) && + ( srcStageMask == rhs.srcStageMask ) && ( dstStageMask == rhs.dstStageMask ) && + ( srcAccessMask == rhs.srcAccessMask ) && ( dstAccessMask == rhs.dstAccessMask ) && + ( dependencyFlags == rhs.dependencyFlags ); } - bool operator!=( SubpassDependency const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SubpassDependency const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint32_t srcSubpass = {}; - uint32_t dstSubpass = {}; - VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {}; - VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {}; - VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; - VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; - VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {}; + uint32_t srcSubpass = {}; + uint32_t dstSubpass = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {}; }; - static_assert( sizeof( SubpassDependency ) == sizeof( VkSubpassDependency ), "struct and wrapper have different size!" ); + static_assert( sizeof( SubpassDependency ) == sizeof( VkSubpassDependency ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct RenderPassCreateInfo { - VULKAN_HPP_CONSTEXPR RenderPassCreateInfo( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {}, - uint32_t attachmentCount_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentDescription* pAttachments_ = {}, - uint32_t subpassCount_ = {}, - const VULKAN_HPP_NAMESPACE::SubpassDescription* pSubpasses_ = {}, - uint32_t dependencyCount_ = {}, - const VULKAN_HPP_NAMESPACE::SubpassDependency* pDependencies_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + RenderPassCreateInfo( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {}, + uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentDescription * pAttachments_ = {}, + uint32_t subpassCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubpassDescription * pSubpasses_ = {}, + uint32_t dependencyCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubpassDependency * pDependencies_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , attachmentCount( attachmentCount_ ) , pAttachments( pAttachments_ ) @@ -60982,13 +68188,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - RenderPassCreateInfo& operator=( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + RenderPassCreateInfo & operator=( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - RenderPassCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + RenderPassCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -61006,7 +68212,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - RenderPassCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription* pAttachments_ ) VULKAN_HPP_NOEXCEPT + RenderPassCreateInfo & + setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription * pAttachments_ ) VULKAN_HPP_NOEXCEPT { pAttachments = pAttachments_; return *this; @@ -61018,7 +68225,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - RenderPassCreateInfo & setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription* pSubpasses_ ) VULKAN_HPP_NOEXCEPT + RenderPassCreateInfo & + setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription * pSubpasses_ ) VULKAN_HPP_NOEXCEPT { pSubpasses = pSubpasses_; return *this; @@ -61030,71 +68238,69 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - RenderPassCreateInfo & setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency* pDependencies_ ) VULKAN_HPP_NOEXCEPT + RenderPassCreateInfo & + setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency * pDependencies_ ) VULKAN_HPP_NOEXCEPT { pDependencies = pDependencies_; return *this; } - operator VkRenderPassCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkRenderPassCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkRenderPassCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( RenderPassCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassCreateInfo const & ) const = default; #else - bool operator==( RenderPassCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RenderPassCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( attachmentCount == rhs.attachmentCount ) - && ( pAttachments == rhs.pAttachments ) - && ( subpassCount == rhs.subpassCount ) - && ( pSubpasses == rhs.pSubpasses ) - && ( dependencyCount == rhs.dependencyCount ) - && ( pDependencies == rhs.pDependencies ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( attachmentCount == rhs.attachmentCount ) && ( pAttachments == rhs.pAttachments ) && + ( subpassCount == rhs.subpassCount ) && ( pSubpasses == rhs.pSubpasses ) && + ( dependencyCount == rhs.dependencyCount ) && ( pDependencies == rhs.pDependencies ); } - bool operator!=( RenderPassCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RenderPassCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {}; - uint32_t attachmentCount = {}; - const VULKAN_HPP_NAMESPACE::AttachmentDescription* pAttachments = {}; - uint32_t subpassCount = {}; - const VULKAN_HPP_NAMESPACE::SubpassDescription* pSubpasses = {}; - uint32_t dependencyCount = {}; - const VULKAN_HPP_NAMESPACE::SubpassDependency* pDependencies = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {}; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentDescription * pAttachments = {}; + uint32_t subpassCount = {}; + const VULKAN_HPP_NAMESPACE::SubpassDescription * pSubpasses = {}; + uint32_t dependencyCount = {}; + const VULKAN_HPP_NAMESPACE::SubpassDependency * pDependencies = {}; }; - static_assert( sizeof( RenderPassCreateInfo ) == sizeof( VkRenderPassCreateInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( RenderPassCreateInfo ) == sizeof( VkRenderPassCreateInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct SubpassDescription2 { - VULKAN_HPP_CONSTEXPR SubpassDescription2( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, - uint32_t viewMask_ = {}, - uint32_t inputAttachmentCount_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentReference2* pInputAttachments_ = {}, - uint32_t colorAttachmentCount_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentReference2* pColorAttachments_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentReference2* pResolveAttachments_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilAttachment_ = {}, - uint32_t preserveAttachmentCount_ = {}, - const uint32_t* pPreserveAttachments_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR SubpassDescription2( + VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + uint32_t viewMask_ = {}, + uint32_t inputAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pInputAttachments_ = {}, + uint32_t colorAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pColorAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pResolveAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment_ = {}, + uint32_t preserveAttachmentCount_ = {}, + const uint32_t * pPreserveAttachments_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , pipelineBindPoint( pipelineBindPoint_ ) , viewMask( viewMask_ ) @@ -61119,13 +68325,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SubpassDescription2& operator=( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT + SubpassDescription2 & operator=( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - SubpassDescription2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + SubpassDescription2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -61137,7 +68343,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - SubpassDescription2 & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + SubpassDescription2 & + setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT { pipelineBindPoint = pipelineBindPoint_; return *this; @@ -61155,7 +68362,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - SubpassDescription2 & setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2* pInputAttachments_ ) VULKAN_HPP_NOEXCEPT + SubpassDescription2 & + setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pInputAttachments_ ) VULKAN_HPP_NOEXCEPT { pInputAttachments = pInputAttachments_; return *this; @@ -61167,19 +68375,22 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - SubpassDescription2 & setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2* pColorAttachments_ ) VULKAN_HPP_NOEXCEPT + SubpassDescription2 & + setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pColorAttachments_ ) VULKAN_HPP_NOEXCEPT { pColorAttachments = pColorAttachments_; return *this; } - SubpassDescription2 & setPResolveAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2* pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT + SubpassDescription2 & setPResolveAttachments( + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT { pResolveAttachments = pResolveAttachments_; return *this; } - SubpassDescription2 & setPDepthStencilAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT + SubpassDescription2 & setPDepthStencilAttachment( + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT { pDepthStencilAttachment = pDepthStencilAttachment_; return *this; @@ -61191,75 +68402,71 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - SubpassDescription2 & setPPreserveAttachments( const uint32_t* pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT + SubpassDescription2 & setPPreserveAttachments( const uint32_t * pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT { pPreserveAttachments = pPreserveAttachments_; return *this; } - operator VkSubpassDescription2 const&() const VULKAN_HPP_NOEXCEPT + operator VkSubpassDescription2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSubpassDescription2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SubpassDescription2 const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassDescription2 const & ) const = default; #else - bool operator==( SubpassDescription2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SubpassDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( pipelineBindPoint == rhs.pipelineBindPoint ) - && ( viewMask == rhs.viewMask ) - && ( inputAttachmentCount == rhs.inputAttachmentCount ) - && ( pInputAttachments == rhs.pInputAttachments ) - && ( colorAttachmentCount == rhs.colorAttachmentCount ) - && ( pColorAttachments == rhs.pColorAttachments ) - && ( pResolveAttachments == rhs.pResolveAttachments ) - && ( pDepthStencilAttachment == rhs.pDepthStencilAttachment ) - && ( preserveAttachmentCount == rhs.preserveAttachmentCount ) - && ( pPreserveAttachments == rhs.pPreserveAttachments ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( viewMask == rhs.viewMask ) && + ( inputAttachmentCount == rhs.inputAttachmentCount ) && ( pInputAttachments == rhs.pInputAttachments ) && + ( colorAttachmentCount == rhs.colorAttachmentCount ) && ( pColorAttachments == rhs.pColorAttachments ) && + ( pResolveAttachments == rhs.pResolveAttachments ) && + ( pDepthStencilAttachment == rhs.pDepthStencilAttachment ) && + ( preserveAttachmentCount == rhs.preserveAttachmentCount ) && + ( pPreserveAttachments == rhs.pPreserveAttachments ); } - bool operator!=( SubpassDescription2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SubpassDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescription2; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {}; - VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; - uint32_t viewMask = {}; - uint32_t inputAttachmentCount = {}; - const VULKAN_HPP_NAMESPACE::AttachmentReference2* pInputAttachments = {}; - uint32_t colorAttachmentCount = {}; - const VULKAN_HPP_NAMESPACE::AttachmentReference2* pColorAttachments = {}; - const VULKAN_HPP_NAMESPACE::AttachmentReference2* pResolveAttachments = {}; - const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilAttachment = {}; - uint32_t preserveAttachmentCount = {}; - const uint32_t* pPreserveAttachments = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescription2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + uint32_t viewMask = {}; + uint32_t inputAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pInputAttachments = {}; + uint32_t colorAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pColorAttachments = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pResolveAttachments = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment = {}; + uint32_t preserveAttachmentCount = {}; + const uint32_t * pPreserveAttachments = {}; }; - static_assert( sizeof( SubpassDescription2 ) == sizeof( VkSubpassDescription2 ), "struct and wrapper have different size!" ); + static_assert( sizeof( SubpassDescription2 ) == sizeof( VkSubpassDescription2 ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct SubpassDependency2 { - VULKAN_HPP_CONSTEXPR SubpassDependency2( uint32_t srcSubpass_ = {}, - uint32_t dstSubpass_ = {}, - VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {}, - VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {}, - VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, - VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, - VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {}, + VULKAN_HPP_CONSTEXPR SubpassDependency2( uint32_t srcSubpass_ = {}, + uint32_t dstSubpass_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {}, int32_t viewOffset_ = {} ) VULKAN_HPP_NOEXCEPT : srcSubpass( srcSubpass_ ) , dstSubpass( dstSubpass_ ) @@ -61282,13 +68489,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SubpassDependency2& operator=( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT + SubpassDependency2 & operator=( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - SubpassDependency2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + SubpassDependency2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -61330,7 +68537,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - SubpassDependency2 & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT + SubpassDependency2 & + setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT { dependencyFlags = dependencyFlags_; return *this; @@ -61342,65 +68550,61 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkSubpassDependency2 const&() const VULKAN_HPP_NOEXCEPT + operator VkSubpassDependency2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSubpassDependency2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SubpassDependency2 const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassDependency2 const & ) const = default; #else - bool operator==( SubpassDependency2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SubpassDependency2 const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( srcSubpass == rhs.srcSubpass ) - && ( dstSubpass == rhs.dstSubpass ) - && ( srcStageMask == rhs.srcStageMask ) - && ( dstStageMask == rhs.dstStageMask ) - && ( srcAccessMask == rhs.srcAccessMask ) - && ( dstAccessMask == rhs.dstAccessMask ) - && ( dependencyFlags == rhs.dependencyFlags ) - && ( viewOffset == rhs.viewOffset ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubpass == rhs.srcSubpass ) && + ( dstSubpass == rhs.dstSubpass ) && ( srcStageMask == rhs.srcStageMask ) && + ( dstStageMask == rhs.dstStageMask ) && ( srcAccessMask == rhs.srcAccessMask ) && + ( dstAccessMask == rhs.dstAccessMask ) && ( dependencyFlags == rhs.dependencyFlags ) && + ( viewOffset == rhs.viewOffset ); } - bool operator!=( SubpassDependency2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SubpassDependency2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDependency2; - const void* pNext = {}; - uint32_t srcSubpass = {}; - uint32_t dstSubpass = {}; - VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {}; - VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {}; - VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; - VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; - VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {}; - int32_t viewOffset = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDependency2; + const void * pNext = {}; + uint32_t srcSubpass = {}; + uint32_t dstSubpass = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {}; + int32_t viewOffset = {}; }; - static_assert( sizeof( SubpassDependency2 ) == sizeof( VkSubpassDependency2 ), "struct and wrapper have different size!" ); + static_assert( sizeof( SubpassDependency2 ) == sizeof( VkSubpassDependency2 ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct RenderPassCreateInfo2 { - VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {}, - uint32_t attachmentCount_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentDescription2* pAttachments_ = {}, - uint32_t subpassCount_ = {}, - const VULKAN_HPP_NAMESPACE::SubpassDescription2* pSubpasses_ = {}, - uint32_t dependencyCount_ = {}, - const VULKAN_HPP_NAMESPACE::SubpassDependency2* pDependencies_ = {}, - uint32_t correlatedViewMaskCount_ = {}, - const uint32_t* pCorrelatedViewMasks_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {}, + uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments_ = {}, + uint32_t subpassCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubpassDescription2 * pSubpasses_ = {}, + uint32_t dependencyCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubpassDependency2 * pDependencies_ = {}, + uint32_t correlatedViewMaskCount_ = {}, + const uint32_t * pCorrelatedViewMasks_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , attachmentCount( attachmentCount_ ) , pAttachments( pAttachments_ ) @@ -61423,13 +68627,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - RenderPassCreateInfo2& operator=( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + RenderPassCreateInfo2 & operator=( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - RenderPassCreateInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + RenderPassCreateInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -61447,7 +68651,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - RenderPassCreateInfo2 & setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription2* pAttachments_ ) VULKAN_HPP_NOEXCEPT + RenderPassCreateInfo2 & + setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments_ ) VULKAN_HPP_NOEXCEPT { pAttachments = pAttachments_; return *this; @@ -61459,7 +68664,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - RenderPassCreateInfo2 & setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription2* pSubpasses_ ) VULKAN_HPP_NOEXCEPT + RenderPassCreateInfo2 & + setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription2 * pSubpasses_ ) VULKAN_HPP_NOEXCEPT { pSubpasses = pSubpasses_; return *this; @@ -61471,7 +68677,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - RenderPassCreateInfo2 & setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency2* pDependencies_ ) VULKAN_HPP_NOEXCEPT + RenderPassCreateInfo2 & + setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency2 * pDependencies_ ) VULKAN_HPP_NOEXCEPT { pDependencies = pDependencies_; return *this; @@ -61483,118 +68690,121 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - RenderPassCreateInfo2 & setPCorrelatedViewMasks( const uint32_t* pCorrelatedViewMasks_ ) VULKAN_HPP_NOEXCEPT + RenderPassCreateInfo2 & setPCorrelatedViewMasks( const uint32_t * pCorrelatedViewMasks_ ) VULKAN_HPP_NOEXCEPT { pCorrelatedViewMasks = pCorrelatedViewMasks_; return *this; } - operator VkRenderPassCreateInfo2 const&() const VULKAN_HPP_NOEXCEPT + operator VkRenderPassCreateInfo2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkRenderPassCreateInfo2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( RenderPassCreateInfo2 const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassCreateInfo2 const & ) const = default; #else - bool operator==( RenderPassCreateInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RenderPassCreateInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( attachmentCount == rhs.attachmentCount ) - && ( pAttachments == rhs.pAttachments ) - && ( subpassCount == rhs.subpassCount ) - && ( pSubpasses == rhs.pSubpasses ) - && ( dependencyCount == rhs.dependencyCount ) - && ( pDependencies == rhs.pDependencies ) - && ( correlatedViewMaskCount == rhs.correlatedViewMaskCount ) - && ( pCorrelatedViewMasks == rhs.pCorrelatedViewMasks ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( attachmentCount == rhs.attachmentCount ) && ( pAttachments == rhs.pAttachments ) && + ( subpassCount == rhs.subpassCount ) && ( pSubpasses == rhs.pSubpasses ) && + ( dependencyCount == rhs.dependencyCount ) && ( pDependencies == rhs.pDependencies ) && + ( correlatedViewMaskCount == rhs.correlatedViewMaskCount ) && + ( pCorrelatedViewMasks == rhs.pCorrelatedViewMasks ); } - bool operator!=( RenderPassCreateInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RenderPassCreateInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo2; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {}; - uint32_t attachmentCount = {}; - const VULKAN_HPP_NAMESPACE::AttachmentDescription2* pAttachments = {}; - uint32_t subpassCount = {}; - const VULKAN_HPP_NAMESPACE::SubpassDescription2* pSubpasses = {}; - uint32_t dependencyCount = {}; - const VULKAN_HPP_NAMESPACE::SubpassDependency2* pDependencies = {}; - uint32_t correlatedViewMaskCount = {}; - const uint32_t* pCorrelatedViewMasks = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {}; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments = {}; + uint32_t subpassCount = {}; + const VULKAN_HPP_NAMESPACE::SubpassDescription2 * pSubpasses = {}; + uint32_t dependencyCount = {}; + const VULKAN_HPP_NAMESPACE::SubpassDependency2 * pDependencies = {}; + uint32_t correlatedViewMaskCount = {}; + const uint32_t * pCorrelatedViewMasks = {}; }; - static_assert( sizeof( RenderPassCreateInfo2 ) == sizeof( VkRenderPassCreateInfo2 ), "struct and wrapper have different size!" ); + static_assert( sizeof( RenderPassCreateInfo2 ) == sizeof( VkRenderPassCreateInfo2 ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct RenderPassFragmentDensityMapCreateInfoEXT { - VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT( VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT( + VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment_ = {} ) VULKAN_HPP_NOEXCEPT : fragmentDensityMapAttachment( fragmentDensityMapAttachment_ ) {} - RenderPassFragmentDensityMapCreateInfoEXT & operator=( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + RenderPassFragmentDensityMapCreateInfoEXT & + operator=( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( RenderPassFragmentDensityMapCreateInfoEXT ) - offsetof( RenderPassFragmentDensityMapCreateInfoEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( RenderPassFragmentDensityMapCreateInfoEXT ) - + offsetof( RenderPassFragmentDensityMapCreateInfoEXT, pNext ) ); return *this; } - RenderPassFragmentDensityMapCreateInfoEXT( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + RenderPassFragmentDensityMapCreateInfoEXT( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - RenderPassFragmentDensityMapCreateInfoEXT& operator=( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + RenderPassFragmentDensityMapCreateInfoEXT & + operator=( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - RenderPassFragmentDensityMapCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + RenderPassFragmentDensityMapCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - RenderPassFragmentDensityMapCreateInfoEXT & setFragmentDensityMapAttachment( VULKAN_HPP_NAMESPACE::AttachmentReference const & fragmentDensityMapAttachment_ ) VULKAN_HPP_NOEXCEPT + RenderPassFragmentDensityMapCreateInfoEXT & setFragmentDensityMapAttachment( + VULKAN_HPP_NAMESPACE::AttachmentReference const & fragmentDensityMapAttachment_ ) VULKAN_HPP_NOEXCEPT { fragmentDensityMapAttachment = fragmentDensityMapAttachment_; return *this; } - operator VkRenderPassFragmentDensityMapCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkRenderPassFragmentDensityMapCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkRenderPassFragmentDensityMapCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( RenderPassFragmentDensityMapCreateInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassFragmentDensityMapCreateInfoEXT const & ) const = default; #else - bool operator==( RenderPassFragmentDensityMapCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( fragmentDensityMapAttachment == rhs.fragmentDensityMapAttachment ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( fragmentDensityMapAttachment == rhs.fragmentDensityMapAttachment ); } - bool operator!=( RenderPassFragmentDensityMapCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -61602,77 +68812,87 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassFragmentDensityMapCreateInfoEXT; - const void* pNext = {}; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment = {}; }; - static_assert( sizeof( RenderPassFragmentDensityMapCreateInfoEXT ) == sizeof( VkRenderPassFragmentDensityMapCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( RenderPassFragmentDensityMapCreateInfoEXT ) == + sizeof( VkRenderPassFragmentDensityMapCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct RenderPassInputAttachmentAspectCreateInfo { - VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo( uint32_t aspectReferenceCount_ = {}, - const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference* pAspectReferences_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo( + uint32_t aspectReferenceCount_ = {}, + const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences_ = {} ) VULKAN_HPP_NOEXCEPT : aspectReferenceCount( aspectReferenceCount_ ) , pAspectReferences( pAspectReferences_ ) {} - RenderPassInputAttachmentAspectCreateInfo & operator=( RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + RenderPassInputAttachmentAspectCreateInfo & + operator=( RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( RenderPassInputAttachmentAspectCreateInfo ) - offsetof( RenderPassInputAttachmentAspectCreateInfo, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( RenderPassInputAttachmentAspectCreateInfo ) - + offsetof( RenderPassInputAttachmentAspectCreateInfo, pNext ) ); return *this; } - RenderPassInputAttachmentAspectCreateInfo( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + RenderPassInputAttachmentAspectCreateInfo( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - RenderPassInputAttachmentAspectCreateInfo& operator=( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + RenderPassInputAttachmentAspectCreateInfo & + operator=( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - RenderPassInputAttachmentAspectCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + RenderPassInputAttachmentAspectCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - RenderPassInputAttachmentAspectCreateInfo & setAspectReferenceCount( uint32_t aspectReferenceCount_ ) VULKAN_HPP_NOEXCEPT + RenderPassInputAttachmentAspectCreateInfo & + setAspectReferenceCount( uint32_t aspectReferenceCount_ ) VULKAN_HPP_NOEXCEPT { aspectReferenceCount = aspectReferenceCount_; return *this; } - RenderPassInputAttachmentAspectCreateInfo & setPAspectReferences( const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference* pAspectReferences_ ) VULKAN_HPP_NOEXCEPT + RenderPassInputAttachmentAspectCreateInfo & setPAspectReferences( + const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences_ ) VULKAN_HPP_NOEXCEPT { pAspectReferences = pAspectReferences_; return *this; } - operator VkRenderPassInputAttachmentAspectCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkRenderPassInputAttachmentAspectCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkRenderPassInputAttachmentAspectCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( RenderPassInputAttachmentAspectCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassInputAttachmentAspectCreateInfo const & ) const = default; #else - bool operator==( RenderPassInputAttachmentAspectCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RenderPassInputAttachmentAspectCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( aspectReferenceCount == rhs.aspectReferenceCount ) - && ( pAspectReferences == rhs.pAspectReferences ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( aspectReferenceCount == rhs.aspectReferenceCount ) && + ( pAspectReferences == rhs.pAspectReferences ); } - bool operator!=( RenderPassInputAttachmentAspectCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RenderPassInputAttachmentAspectCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -61680,21 +68900,24 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassInputAttachmentAspectCreateInfo; - const void* pNext = {}; - uint32_t aspectReferenceCount = {}; - const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference* pAspectReferences = {}; + const void * pNext = {}; + uint32_t aspectReferenceCount = {}; + const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences = {}; }; - static_assert( sizeof( RenderPassInputAttachmentAspectCreateInfo ) == sizeof( VkRenderPassInputAttachmentAspectCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( RenderPassInputAttachmentAspectCreateInfo ) == + sizeof( VkRenderPassInputAttachmentAspectCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct RenderPassMultiviewCreateInfo { - VULKAN_HPP_CONSTEXPR RenderPassMultiviewCreateInfo( uint32_t subpassCount_ = {}, - const uint32_t* pViewMasks_ = {}, - uint32_t dependencyCount_ = {}, - const int32_t* pViewOffsets_ = {}, - uint32_t correlationMaskCount_ = {}, - const uint32_t* pCorrelationMasks_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR RenderPassMultiviewCreateInfo( uint32_t subpassCount_ = {}, + const uint32_t * pViewMasks_ = {}, + uint32_t dependencyCount_ = {}, + const int32_t * pViewOffsets_ = {}, + uint32_t correlationMaskCount_ = {}, + const uint32_t * pCorrelationMasks_ = {} ) VULKAN_HPP_NOEXCEPT : subpassCount( subpassCount_ ) , pViewMasks( pViewMasks_ ) , dependencyCount( dependencyCount_ ) @@ -61705,7 +68928,9 @@ namespace VULKAN_HPP_NAMESPACE RenderPassMultiviewCreateInfo & operator=( RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( RenderPassMultiviewCreateInfo ) - offsetof( RenderPassMultiviewCreateInfo, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( RenderPassMultiviewCreateInfo ) - offsetof( RenderPassMultiviewCreateInfo, pNext ) ); return *this; } @@ -61714,13 +68939,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - RenderPassMultiviewCreateInfo& operator=( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + RenderPassMultiviewCreateInfo & operator=( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - RenderPassMultiviewCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + RenderPassMultiviewCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -61732,7 +68957,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - RenderPassMultiviewCreateInfo & setPViewMasks( const uint32_t* pViewMasks_ ) VULKAN_HPP_NOEXCEPT + RenderPassMultiviewCreateInfo & setPViewMasks( const uint32_t * pViewMasks_ ) VULKAN_HPP_NOEXCEPT { pViewMasks = pViewMasks_; return *this; @@ -61744,7 +68969,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - RenderPassMultiviewCreateInfo & setPViewOffsets( const int32_t* pViewOffsets_ ) VULKAN_HPP_NOEXCEPT + RenderPassMultiviewCreateInfo & setPViewOffsets( const int32_t * pViewOffsets_ ) VULKAN_HPP_NOEXCEPT { pViewOffsets = pViewOffsets_; return *this; @@ -61756,60 +68981,59 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - RenderPassMultiviewCreateInfo & setPCorrelationMasks( const uint32_t* pCorrelationMasks_ ) VULKAN_HPP_NOEXCEPT + RenderPassMultiviewCreateInfo & setPCorrelationMasks( const uint32_t * pCorrelationMasks_ ) VULKAN_HPP_NOEXCEPT { pCorrelationMasks = pCorrelationMasks_; return *this; } - operator VkRenderPassMultiviewCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkRenderPassMultiviewCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkRenderPassMultiviewCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( RenderPassMultiviewCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassMultiviewCreateInfo const & ) const = default; #else - bool operator==( RenderPassMultiviewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RenderPassMultiviewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( subpassCount == rhs.subpassCount ) - && ( pViewMasks == rhs.pViewMasks ) - && ( dependencyCount == rhs.dependencyCount ) - && ( pViewOffsets == rhs.pViewOffsets ) - && ( correlationMaskCount == rhs.correlationMaskCount ) - && ( pCorrelationMasks == rhs.pCorrelationMasks ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subpassCount == rhs.subpassCount ) && + ( pViewMasks == rhs.pViewMasks ) && ( dependencyCount == rhs.dependencyCount ) && + ( pViewOffsets == rhs.pViewOffsets ) && ( correlationMaskCount == rhs.correlationMaskCount ) && + ( pCorrelationMasks == rhs.pCorrelationMasks ); } - bool operator!=( RenderPassMultiviewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RenderPassMultiviewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassMultiviewCreateInfo; - const void* pNext = {}; - uint32_t subpassCount = {}; - const uint32_t* pViewMasks = {}; - uint32_t dependencyCount = {}; - const int32_t* pViewOffsets = {}; - uint32_t correlationMaskCount = {}; - const uint32_t* pCorrelationMasks = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassMultiviewCreateInfo; + const void * pNext = {}; + uint32_t subpassCount = {}; + const uint32_t * pViewMasks = {}; + uint32_t dependencyCount = {}; + const int32_t * pViewOffsets = {}; + uint32_t correlationMaskCount = {}; + const uint32_t * pCorrelationMasks = {}; }; - static_assert( sizeof( RenderPassMultiviewCreateInfo ) == sizeof( VkRenderPassMultiviewCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( RenderPassMultiviewCreateInfo ) == sizeof( VkRenderPassMultiviewCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct SubpassSampleLocationsEXT { - VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT( uint32_t subpassIndex_ = {}, - VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT( + uint32_t subpassIndex_ = {}, + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT : subpassIndex( subpassIndex_ ) , sampleLocationsInfo( sampleLocationsInfo_ ) {} @@ -61819,9 +69043,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SubpassSampleLocationsEXT& operator=( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + SubpassSampleLocationsEXT & operator=( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -61831,59 +69055,66 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - SubpassSampleLocationsEXT & setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT + SubpassSampleLocationsEXT & setSampleLocationsInfo( + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT { sampleLocationsInfo = sampleLocationsInfo_; return *this; } - operator VkSubpassSampleLocationsEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkSubpassSampleLocationsEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSubpassSampleLocationsEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SubpassSampleLocationsEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassSampleLocationsEXT const & ) const = default; #else - bool operator==( SubpassSampleLocationsEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SubpassSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( subpassIndex == rhs.subpassIndex ) - && ( sampleLocationsInfo == rhs.sampleLocationsInfo ); + return ( subpassIndex == rhs.subpassIndex ) && ( sampleLocationsInfo == rhs.sampleLocationsInfo ); } - bool operator!=( SubpassSampleLocationsEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SubpassSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint32_t subpassIndex = {}; + uint32_t subpassIndex = {}; VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {}; }; - static_assert( sizeof( SubpassSampleLocationsEXT ) == sizeof( VkSubpassSampleLocationsEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( SubpassSampleLocationsEXT ) == sizeof( VkSubpassSampleLocationsEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct RenderPassSampleLocationsBeginInfoEXT { - VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT( uint32_t attachmentInitialSampleLocationsCount_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations_ = {}, - uint32_t postSubpassSampleLocationsCount_ = {}, - const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT* pPostSubpassSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT( + uint32_t attachmentInitialSampleLocationsCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations_ = {}, + uint32_t postSubpassSampleLocationsCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * pPostSubpassSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT : attachmentInitialSampleLocationsCount( attachmentInitialSampleLocationsCount_ ) , pAttachmentInitialSampleLocations( pAttachmentInitialSampleLocations_ ) , postSubpassSampleLocationsCount( postSubpassSampleLocationsCount_ ) , pPostSubpassSampleLocations( pPostSubpassSampleLocations_ ) {} - RenderPassSampleLocationsBeginInfoEXT & operator=( RenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + RenderPassSampleLocationsBeginInfoEXT & + operator=( RenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( RenderPassSampleLocationsBeginInfoEXT ) - offsetof( RenderPassSampleLocationsBeginInfoEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( RenderPassSampleLocationsBeginInfoEXT ) - + offsetof( RenderPassSampleLocationsBeginInfoEXT, pNext ) ); return *this; } @@ -61892,66 +69123,71 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - RenderPassSampleLocationsBeginInfoEXT& operator=( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + RenderPassSampleLocationsBeginInfoEXT & + operator=( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - RenderPassSampleLocationsBeginInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + RenderPassSampleLocationsBeginInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - RenderPassSampleLocationsBeginInfoEXT & setAttachmentInitialSampleLocationsCount( uint32_t attachmentInitialSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT + RenderPassSampleLocationsBeginInfoEXT & + setAttachmentInitialSampleLocationsCount( uint32_t attachmentInitialSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT { attachmentInitialSampleLocationsCount = attachmentInitialSampleLocationsCount_; return *this; } - RenderPassSampleLocationsBeginInfoEXT & setPAttachmentInitialSampleLocations( const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations_ ) VULKAN_HPP_NOEXCEPT + RenderPassSampleLocationsBeginInfoEXT & setPAttachmentInitialSampleLocations( + const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations_ ) + VULKAN_HPP_NOEXCEPT { pAttachmentInitialSampleLocations = pAttachmentInitialSampleLocations_; return *this; } - RenderPassSampleLocationsBeginInfoEXT & setPostSubpassSampleLocationsCount( uint32_t postSubpassSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT + RenderPassSampleLocationsBeginInfoEXT & + setPostSubpassSampleLocationsCount( uint32_t postSubpassSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT { postSubpassSampleLocationsCount = postSubpassSampleLocationsCount_; return *this; } - RenderPassSampleLocationsBeginInfoEXT & setPPostSubpassSampleLocations( const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT* pPostSubpassSampleLocations_ ) VULKAN_HPP_NOEXCEPT + RenderPassSampleLocationsBeginInfoEXT & setPPostSubpassSampleLocations( + const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * pPostSubpassSampleLocations_ ) VULKAN_HPP_NOEXCEPT { pPostSubpassSampleLocations = pPostSubpassSampleLocations_; return *this; } - operator VkRenderPassSampleLocationsBeginInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkRenderPassSampleLocationsBeginInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkRenderPassSampleLocationsBeginInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( RenderPassSampleLocationsBeginInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassSampleLocationsBeginInfoEXT const & ) const = default; #else - bool operator==( RenderPassSampleLocationsBeginInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RenderPassSampleLocationsBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( attachmentInitialSampleLocationsCount == rhs.attachmentInitialSampleLocationsCount ) - && ( pAttachmentInitialSampleLocations == rhs.pAttachmentInitialSampleLocations ) - && ( postSubpassSampleLocationsCount == rhs.postSubpassSampleLocationsCount ) - && ( pPostSubpassSampleLocations == rhs.pPostSubpassSampleLocations ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( attachmentInitialSampleLocationsCount == rhs.attachmentInitialSampleLocationsCount ) && + ( pAttachmentInitialSampleLocations == rhs.pAttachmentInitialSampleLocations ) && + ( postSubpassSampleLocationsCount == rhs.postSubpassSampleLocationsCount ) && + ( pPostSubpassSampleLocations == rhs.pPostSubpassSampleLocations ); } - bool operator!=( RenderPassSampleLocationsBeginInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RenderPassSampleLocationsBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -61959,24 +69195,29 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassSampleLocationsBeginInfoEXT; - const void* pNext = {}; - uint32_t attachmentInitialSampleLocationsCount = {}; - const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations = {}; - uint32_t postSubpassSampleLocationsCount = {}; - const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT* pPostSubpassSampleLocations = {}; + const void * pNext = {}; + uint32_t attachmentInitialSampleLocationsCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations = {}; + uint32_t postSubpassSampleLocationsCount = {}; + const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * pPostSubpassSampleLocations = {}; }; - static_assert( sizeof( RenderPassSampleLocationsBeginInfoEXT ) == sizeof( VkRenderPassSampleLocationsBeginInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( RenderPassSampleLocationsBeginInfoEXT ) == sizeof( VkRenderPassSampleLocationsBeginInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct RenderPassTransformBeginInfoQCOM { - VULKAN_HPP_CONSTEXPR RenderPassTransformBeginInfoQCOM( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity ) VULKAN_HPP_NOEXCEPT - : transform( transform_ ) + VULKAN_HPP_CONSTEXPR RenderPassTransformBeginInfoQCOM( + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity ) VULKAN_HPP_NOEXCEPT : transform( transform_ ) {} RenderPassTransformBeginInfoQCOM & operator=( RenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( RenderPassTransformBeginInfoQCOM ) - offsetof( RenderPassTransformBeginInfoQCOM, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( RenderPassTransformBeginInfoQCOM ) - offsetof( RenderPassTransformBeginInfoQCOM, pNext ) ); return *this; } @@ -61985,76 +69226,79 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - RenderPassTransformBeginInfoQCOM& operator=( VkRenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + RenderPassTransformBeginInfoQCOM & operator=( VkRenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - RenderPassTransformBeginInfoQCOM & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + RenderPassTransformBeginInfoQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - RenderPassTransformBeginInfoQCOM & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT + RenderPassTransformBeginInfoQCOM & + setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT { transform = transform_; return *this; } - operator VkRenderPassTransformBeginInfoQCOM const&() const VULKAN_HPP_NOEXCEPT + operator VkRenderPassTransformBeginInfoQCOM const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkRenderPassTransformBeginInfoQCOM &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( RenderPassTransformBeginInfoQCOM const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassTransformBeginInfoQCOM const & ) const = default; #else - bool operator==( RenderPassTransformBeginInfoQCOM const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RenderPassTransformBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( transform == rhs.transform ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transform == rhs.transform ); } - bool operator!=( RenderPassTransformBeginInfoQCOM const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RenderPassTransformBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassTransformBeginInfoQCOM; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassTransformBeginInfoQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; }; - static_assert( sizeof( RenderPassTransformBeginInfoQCOM ) == sizeof( VkRenderPassTransformBeginInfoQCOM ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( RenderPassTransformBeginInfoQCOM ) == sizeof( VkRenderPassTransformBeginInfoQCOM ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct SamplerCreateInfo { - VULKAN_HPP_CONSTEXPR SamplerCreateInfo( VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::Filter magFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, - VULKAN_HPP_NAMESPACE::Filter minFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, - VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest, - VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, - VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, - VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, - float mipLodBias_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ = {}, - float maxAnisotropy_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ = {}, - VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, - float minLod_ = {}, - float maxLod_ = {}, - VULKAN_HPP_NAMESPACE::BorderColor borderColor_ = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack, - VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR SamplerCreateInfo( + VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Filter magFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, + VULKAN_HPP_NAMESPACE::Filter minFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, + VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest, + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, + float mipLodBias_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ = {}, + float maxAnisotropy_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ = {}, + VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, + float minLod_ = {}, + float maxLod_ = {}, + VULKAN_HPP_NAMESPACE::BorderColor borderColor_ = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack, + VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , magFilter( magFilter_ ) , minFilter( minFilter_ ) @@ -62084,13 +69328,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SamplerCreateInfo& operator=( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + SamplerCreateInfo & operator=( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - SamplerCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + SamplerCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -62186,85 +69430,81 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - SamplerCreateInfo & setUnnormalizedCoordinates( VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ ) VULKAN_HPP_NOEXCEPT + SamplerCreateInfo & + setUnnormalizedCoordinates( VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ ) VULKAN_HPP_NOEXCEPT { unnormalizedCoordinates = unnormalizedCoordinates_; return *this; } - operator VkSamplerCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkSamplerCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSamplerCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SamplerCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerCreateInfo const & ) const = default; #else - bool operator==( SamplerCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SamplerCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( magFilter == rhs.magFilter ) - && ( minFilter == rhs.minFilter ) - && ( mipmapMode == rhs.mipmapMode ) - && ( addressModeU == rhs.addressModeU ) - && ( addressModeV == rhs.addressModeV ) - && ( addressModeW == rhs.addressModeW ) - && ( mipLodBias == rhs.mipLodBias ) - && ( anisotropyEnable == rhs.anisotropyEnable ) - && ( maxAnisotropy == rhs.maxAnisotropy ) - && ( compareEnable == rhs.compareEnable ) - && ( compareOp == rhs.compareOp ) - && ( minLod == rhs.minLod ) - && ( maxLod == rhs.maxLod ) - && ( borderColor == rhs.borderColor ) - && ( unnormalizedCoordinates == rhs.unnormalizedCoordinates ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( magFilter == rhs.magFilter ) && ( minFilter == rhs.minFilter ) && ( mipmapMode == rhs.mipmapMode ) && + ( addressModeU == rhs.addressModeU ) && ( addressModeV == rhs.addressModeV ) && + ( addressModeW == rhs.addressModeW ) && ( mipLodBias == rhs.mipLodBias ) && + ( anisotropyEnable == rhs.anisotropyEnable ) && ( maxAnisotropy == rhs.maxAnisotropy ) && + ( compareEnable == rhs.compareEnable ) && ( compareOp == rhs.compareOp ) && ( minLod == rhs.minLod ) && + ( maxLod == rhs.maxLod ) && ( borderColor == rhs.borderColor ) && + ( unnormalizedCoordinates == rhs.unnormalizedCoordinates ); } - bool operator!=( SamplerCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SamplerCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::Filter magFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest; - VULKAN_HPP_NAMESPACE::Filter minFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest; - VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest; - VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat; - VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat; - VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat; - float mipLodBias = {}; - VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable = {}; - float maxAnisotropy = {}; - VULKAN_HPP_NAMESPACE::Bool32 compareEnable = {}; - VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever; - float minLod = {}; - float maxLod = {}; - VULKAN_HPP_NAMESPACE::BorderColor borderColor = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack; - VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Filter magFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest; + VULKAN_HPP_NAMESPACE::Filter minFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest; + VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest; + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat; + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat; + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat; + float mipLodBias = {}; + VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable = {}; + float maxAnisotropy = {}; + VULKAN_HPP_NAMESPACE::Bool32 compareEnable = {}; + VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever; + float minLod = {}; + float maxLod = {}; + VULKAN_HPP_NAMESPACE::BorderColor borderColor = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack; + VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates = {}; }; - static_assert( sizeof( SamplerCreateInfo ) == sizeof( VkSamplerCreateInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( SamplerCreateInfo ) == sizeof( VkSamplerCreateInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct SamplerReductionModeCreateInfo { - VULKAN_HPP_CONSTEXPR SamplerReductionModeCreateInfo( VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ = VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR SamplerReductionModeCreateInfo( + VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ = + VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage ) VULKAN_HPP_NOEXCEPT : reductionMode( reductionMode_ ) {} SamplerReductionModeCreateInfo & operator=( SamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( SamplerReductionModeCreateInfo ) - offsetof( SamplerReductionModeCreateInfo, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( SamplerReductionModeCreateInfo ) - offsetof( SamplerReductionModeCreateInfo, pNext ) ); return *this; } @@ -62273,68 +69513,72 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SamplerReductionModeCreateInfo& operator=( VkSamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + SamplerReductionModeCreateInfo & operator=( VkSamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - SamplerReductionModeCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + SamplerReductionModeCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - SamplerReductionModeCreateInfo & setReductionMode( VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ ) VULKAN_HPP_NOEXCEPT + SamplerReductionModeCreateInfo & + setReductionMode( VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ ) VULKAN_HPP_NOEXCEPT { reductionMode = reductionMode_; return *this; } - operator VkSamplerReductionModeCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkSamplerReductionModeCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSamplerReductionModeCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SamplerReductionModeCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerReductionModeCreateInfo const & ) const = default; #else - bool operator==( SamplerReductionModeCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SamplerReductionModeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( reductionMode == rhs.reductionMode ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( reductionMode == rhs.reductionMode ); } - bool operator!=( SamplerReductionModeCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SamplerReductionModeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerReductionModeCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode = VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerReductionModeCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode = + VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage; }; - static_assert( sizeof( SamplerReductionModeCreateInfo ) == sizeof( VkSamplerReductionModeCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( SamplerReductionModeCreateInfo ) == sizeof( VkSamplerReductionModeCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct SamplerYcbcrConversionCreateInfo { - VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, - VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, - VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, - VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, - VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, - VULKAN_HPP_NAMESPACE::Filter chromaFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, - VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo( + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ = + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, + VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, + VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + VULKAN_HPP_NAMESPACE::Filter chromaFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, + VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ = {} ) VULKAN_HPP_NOEXCEPT : format( format_ ) , ycbcrModel( ycbcrModel_ ) , ycbcrRange( ycbcrRange_ ) @@ -62347,7 +69591,9 @@ namespace VULKAN_HPP_NAMESPACE SamplerYcbcrConversionCreateInfo & operator=( SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( SamplerYcbcrConversionCreateInfo ) - offsetof( SamplerYcbcrConversionCreateInfo, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( SamplerYcbcrConversionCreateInfo ) - offsetof( SamplerYcbcrConversionCreateInfo, pNext ) ); return *this; } @@ -62356,13 +69602,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SamplerYcbcrConversionCreateInfo& operator=( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + SamplerYcbcrConversionCreateInfo & operator=( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - SamplerYcbcrConversionCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + SamplerYcbcrConversionCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -62374,31 +69620,36 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - SamplerYcbcrConversionCreateInfo & setYcbcrModel( VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ ) VULKAN_HPP_NOEXCEPT + SamplerYcbcrConversionCreateInfo & + setYcbcrModel( VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ ) VULKAN_HPP_NOEXCEPT { ycbcrModel = ycbcrModel_; return *this; } - SamplerYcbcrConversionCreateInfo & setYcbcrRange( VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ ) VULKAN_HPP_NOEXCEPT + SamplerYcbcrConversionCreateInfo & + setYcbcrRange( VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ ) VULKAN_HPP_NOEXCEPT { ycbcrRange = ycbcrRange_; return *this; } - SamplerYcbcrConversionCreateInfo & setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT + SamplerYcbcrConversionCreateInfo & + setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT { components = components_; return *this; } - SamplerYcbcrConversionCreateInfo & setXChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ ) VULKAN_HPP_NOEXCEPT + SamplerYcbcrConversionCreateInfo & + setXChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ ) VULKAN_HPP_NOEXCEPT { xChromaOffset = xChromaOffset_; return *this; } - SamplerYcbcrConversionCreateInfo & setYChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ ) VULKAN_HPP_NOEXCEPT + SamplerYcbcrConversionCreateInfo & + setYChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ ) VULKAN_HPP_NOEXCEPT { yChromaOffset = yChromaOffset_; return *this; @@ -62410,104 +69661,108 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - SamplerYcbcrConversionCreateInfo & setForceExplicitReconstruction( VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ ) VULKAN_HPP_NOEXCEPT + SamplerYcbcrConversionCreateInfo & + setForceExplicitReconstruction( VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ ) VULKAN_HPP_NOEXCEPT { forceExplicitReconstruction = forceExplicitReconstruction_; return *this; } - operator VkSamplerYcbcrConversionCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkSamplerYcbcrConversionCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSamplerYcbcrConversionCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SamplerYcbcrConversionCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerYcbcrConversionCreateInfo const & ) const = default; #else - bool operator==( SamplerYcbcrConversionCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SamplerYcbcrConversionCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( format == rhs.format ) - && ( ycbcrModel == rhs.ycbcrModel ) - && ( ycbcrRange == rhs.ycbcrRange ) - && ( components == rhs.components ) - && ( xChromaOffset == rhs.xChromaOffset ) - && ( yChromaOffset == rhs.yChromaOffset ) - && ( chromaFilter == rhs.chromaFilter ) - && ( forceExplicitReconstruction == rhs.forceExplicitReconstruction ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && + ( ycbcrModel == rhs.ycbcrModel ) && ( ycbcrRange == rhs.ycbcrRange ) && ( components == rhs.components ) && + ( xChromaOffset == rhs.xChromaOffset ) && ( yChromaOffset == rhs.yChromaOffset ) && + ( chromaFilter == rhs.chromaFilter ) && ( forceExplicitReconstruction == rhs.forceExplicitReconstruction ); } - bool operator!=( SamplerYcbcrConversionCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SamplerYcbcrConversionCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; - VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity; - VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull; - VULKAN_HPP_NAMESPACE::ComponentMapping components = {}; - VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; - VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; - VULKAN_HPP_NAMESPACE::Filter chromaFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest; - VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel = + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity; + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull; + VULKAN_HPP_NAMESPACE::ComponentMapping components = {}; + VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + VULKAN_HPP_NAMESPACE::Filter chromaFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest; + VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction = {}; }; - static_assert( sizeof( SamplerYcbcrConversionCreateInfo ) == sizeof( VkSamplerYcbcrConversionCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( SamplerYcbcrConversionCreateInfo ) == sizeof( VkSamplerYcbcrConversionCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct SamplerYcbcrConversionImageFormatProperties { - VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties( uint32_t combinedImageSamplerDescriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties( + uint32_t combinedImageSamplerDescriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT : combinedImageSamplerDescriptorCount( combinedImageSamplerDescriptorCount_ ) {} - SamplerYcbcrConversionImageFormatProperties & operator=( SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + SamplerYcbcrConversionImageFormatProperties & + operator=( SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( SamplerYcbcrConversionImageFormatProperties ) - offsetof( SamplerYcbcrConversionImageFormatProperties, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( SamplerYcbcrConversionImageFormatProperties ) - + offsetof( SamplerYcbcrConversionImageFormatProperties, pNext ) ); return *this; } - SamplerYcbcrConversionImageFormatProperties( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + SamplerYcbcrConversionImageFormatProperties( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - SamplerYcbcrConversionImageFormatProperties& operator=( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + SamplerYcbcrConversionImageFormatProperties & + operator=( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkSamplerYcbcrConversionImageFormatProperties const&() const VULKAN_HPP_NOEXCEPT + operator VkSamplerYcbcrConversionImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSamplerYcbcrConversionImageFormatProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SamplerYcbcrConversionImageFormatProperties const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerYcbcrConversionImageFormatProperties const & ) const = default; #else - bool operator==( SamplerYcbcrConversionImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SamplerYcbcrConversionImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( combinedImageSamplerDescriptorCount == rhs.combinedImageSamplerDescriptorCount ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( combinedImageSamplerDescriptorCount == rhs.combinedImageSamplerDescriptorCount ); } - bool operator!=( SamplerYcbcrConversionImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SamplerYcbcrConversionImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -62515,21 +69770,26 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionImageFormatProperties; - void* pNext = {}; - uint32_t combinedImageSamplerDescriptorCount = {}; + void * pNext = {}; + uint32_t combinedImageSamplerDescriptorCount = {}; }; - static_assert( sizeof( SamplerYcbcrConversionImageFormatProperties ) == sizeof( VkSamplerYcbcrConversionImageFormatProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( SamplerYcbcrConversionImageFormatProperties ) == + sizeof( VkSamplerYcbcrConversionImageFormatProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct SamplerYcbcrConversionInfo { - VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionInfo( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + SamplerYcbcrConversionInfo( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ = {} ) VULKAN_HPP_NOEXCEPT : conversion( conversion_ ) {} SamplerYcbcrConversionInfo & operator=( SamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( SamplerYcbcrConversionInfo ) - offsetof( SamplerYcbcrConversionInfo, pNext ) ); + memcpy( + &pNext, &rhs.pNext, sizeof( SamplerYcbcrConversionInfo ) - offsetof( SamplerYcbcrConversionInfo, pNext ) ); return *this; } @@ -62538,61 +69798,63 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SamplerYcbcrConversionInfo& operator=( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT + SamplerYcbcrConversionInfo & operator=( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - SamplerYcbcrConversionInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + SamplerYcbcrConversionInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - SamplerYcbcrConversionInfo & setConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ ) VULKAN_HPP_NOEXCEPT + SamplerYcbcrConversionInfo & + setConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ ) VULKAN_HPP_NOEXCEPT { conversion = conversion_; return *this; } - operator VkSamplerYcbcrConversionInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkSamplerYcbcrConversionInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSamplerYcbcrConversionInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SamplerYcbcrConversionInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerYcbcrConversionInfo const & ) const = default; #else - bool operator==( SamplerYcbcrConversionInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SamplerYcbcrConversionInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( conversion == rhs.conversion ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( conversion == rhs.conversion ); } - bool operator!=( SamplerYcbcrConversionInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SamplerYcbcrConversionInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionInfo; - const void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionInfo; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion = {}; }; - static_assert( sizeof( SamplerYcbcrConversionInfo ) == sizeof( VkSamplerYcbcrConversionInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( SamplerYcbcrConversionInfo ) == sizeof( VkSamplerYcbcrConversionInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct SemaphoreCreateInfo { - VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo( VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + SemaphoreCreateInfo( VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) {} @@ -62607,13 +69869,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SemaphoreCreateInfo& operator=( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + SemaphoreCreateInfo & operator=( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - SemaphoreCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + SemaphoreCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -62625,44 +69887,45 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkSemaphoreCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkSemaphoreCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SemaphoreCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreCreateInfo const & ) const = default; #else - bool operator==( SemaphoreCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); } - bool operator!=( SemaphoreCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreCreateInfo; - const void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreCreateInfo; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags = {}; }; - static_assert( sizeof( SemaphoreCreateInfo ) == sizeof( VkSemaphoreCreateInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( SemaphoreCreateInfo ) == sizeof( VkSemaphoreCreateInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct SemaphoreGetFdInfoKHR { VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) + VULKAN_HPP_NOEXCEPT : semaphore( semaphore_ ) , handleType( handleType_ ) {} @@ -62678,13 +69941,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SemaphoreGetFdInfoKHR& operator=( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + SemaphoreGetFdInfoKHR & operator=( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - SemaphoreGetFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + SemaphoreGetFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -62696,60 +69959,65 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - SemaphoreGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + SemaphoreGetFdInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } - operator VkSemaphoreGetFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkSemaphoreGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSemaphoreGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SemaphoreGetFdInfoKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreGetFdInfoKHR const & ) const = default; #else - bool operator==( SemaphoreGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SemaphoreGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( semaphore == rhs.semaphore ) - && ( handleType == rhs.handleType ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && + ( handleType == rhs.handleType ); } - bool operator!=( SemaphoreGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SemaphoreGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetFdInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetFdInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; }; - static_assert( sizeof( SemaphoreGetFdInfoKHR ) == sizeof( VkSemaphoreGetFdInfoKHR ), "struct and wrapper have different size!" ); + static_assert( sizeof( SemaphoreGetFdInfoKHR ) == sizeof( VkSemaphoreGetFdInfoKHR ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); #ifdef VK_USE_PLATFORM_WIN32_KHR struct SemaphoreGetWin32HandleInfoKHR { - VULKAN_HPP_CONSTEXPR SemaphoreGetWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR SemaphoreGetWin32HandleInfoKHR( + VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT : semaphore( semaphore_ ) , handleType( handleType_ ) {} SemaphoreGetWin32HandleInfoKHR & operator=( SemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( SemaphoreGetWin32HandleInfoKHR ) - offsetof( SemaphoreGetWin32HandleInfoKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( SemaphoreGetWin32HandleInfoKHR ) - offsetof( SemaphoreGetWin32HandleInfoKHR, pNext ) ); return *this; } @@ -62758,13 +70026,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SemaphoreGetWin32HandleInfoKHR& operator=( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + SemaphoreGetWin32HandleInfoKHR & operator=( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - SemaphoreGetWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + SemaphoreGetWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -62776,53 +70044,55 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - SemaphoreGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + SemaphoreGetWin32HandleInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } - operator VkSemaphoreGetWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkSemaphoreGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSemaphoreGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SemaphoreGetWin32HandleInfoKHR const& ) const = default; -#else - bool operator==( SemaphoreGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreGetWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( SemaphoreGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( semaphore == rhs.semaphore ) - && ( handleType == rhs.handleType ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && + ( handleType == rhs.handleType ); } - bool operator!=( SemaphoreGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SemaphoreGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetWin32HandleInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetWin32HandleInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; }; - static_assert( sizeof( SemaphoreGetWin32HandleInfoKHR ) == sizeof( VkSemaphoreGetWin32HandleInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( SemaphoreGetWin32HandleInfoKHR ) == sizeof( VkSemaphoreGetWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ struct SemaphoreSignalInfo { VULKAN_HPP_CONSTEXPR SemaphoreSignalInfo( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, - uint64_t value_ = {} ) VULKAN_HPP_NOEXCEPT + uint64_t value_ = {} ) VULKAN_HPP_NOEXCEPT : semaphore( semaphore_ ) , value( value_ ) {} @@ -62838,13 +70108,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SemaphoreSignalInfo& operator=( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT + SemaphoreSignalInfo & operator=( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - SemaphoreSignalInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + SemaphoreSignalInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -62862,46 +70132,46 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkSemaphoreSignalInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkSemaphoreSignalInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSemaphoreSignalInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SemaphoreSignalInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreSignalInfo const & ) const = default; #else - bool operator==( SemaphoreSignalInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SemaphoreSignalInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( semaphore == rhs.semaphore ) - && ( value == rhs.value ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && + ( value == rhs.value ); } - bool operator!=( SemaphoreSignalInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SemaphoreSignalInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreSignalInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; - uint64_t value = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreSignalInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + uint64_t value = {}; }; - static_assert( sizeof( SemaphoreSignalInfo ) == sizeof( VkSemaphoreSignalInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( SemaphoreSignalInfo ) == sizeof( VkSemaphoreSignalInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct SemaphoreTypeCreateInfo { - VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo( VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType_ = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary, - uint64_t initialValue_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo( + VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType_ = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary, + uint64_t initialValue_ = {} ) VULKAN_HPP_NOEXCEPT : semaphoreType( semaphoreType_ ) , initialValue( initialValue_ ) {} @@ -62917,13 +70187,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SemaphoreTypeCreateInfo& operator=( VkSemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + SemaphoreTypeCreateInfo & operator=( VkSemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - SemaphoreTypeCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + SemaphoreTypeCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -62941,48 +70211,47 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkSemaphoreTypeCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkSemaphoreTypeCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSemaphoreTypeCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SemaphoreTypeCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreTypeCreateInfo const & ) const = default; #else - bool operator==( SemaphoreTypeCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SemaphoreTypeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( semaphoreType == rhs.semaphoreType ) - && ( initialValue == rhs.initialValue ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphoreType == rhs.semaphoreType ) && + ( initialValue == rhs.initialValue ); } - bool operator!=( SemaphoreTypeCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SemaphoreTypeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreTypeCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary; - uint64_t initialValue = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreTypeCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary; + uint64_t initialValue = {}; }; - static_assert( sizeof( SemaphoreTypeCreateInfo ) == sizeof( VkSemaphoreTypeCreateInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( SemaphoreTypeCreateInfo ) == sizeof( VkSemaphoreTypeCreateInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct SemaphoreWaitInfo { - VULKAN_HPP_CONSTEXPR SemaphoreWaitInfo( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ = {}, - uint32_t semaphoreCount_ = {}, - const VULKAN_HPP_NAMESPACE::Semaphore* pSemaphores_ = {}, - const uint64_t* pValues_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR SemaphoreWaitInfo( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ = {}, + uint32_t semaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore * pSemaphores_ = {}, + const uint64_t * pValues_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , semaphoreCount( semaphoreCount_ ) , pSemaphores( pSemaphores_ ) @@ -63000,13 +70269,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SemaphoreWaitInfo& operator=( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + SemaphoreWaitInfo & operator=( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - SemaphoreWaitInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + SemaphoreWaitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -63024,72 +70293,68 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - SemaphoreWaitInfo & setPSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pSemaphores_ ) VULKAN_HPP_NOEXCEPT + SemaphoreWaitInfo & setPSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pSemaphores_ ) VULKAN_HPP_NOEXCEPT { pSemaphores = pSemaphores_; return *this; } - SemaphoreWaitInfo & setPValues( const uint64_t* pValues_ ) VULKAN_HPP_NOEXCEPT + SemaphoreWaitInfo & setPValues( const uint64_t * pValues_ ) VULKAN_HPP_NOEXCEPT { pValues = pValues_; return *this; } - operator VkSemaphoreWaitInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkSemaphoreWaitInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSemaphoreWaitInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SemaphoreWaitInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreWaitInfo const & ) const = default; #else - bool operator==( SemaphoreWaitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SemaphoreWaitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( semaphoreCount == rhs.semaphoreCount ) - && ( pSemaphores == rhs.pSemaphores ) - && ( pValues == rhs.pValues ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( semaphoreCount == rhs.semaphoreCount ) && ( pSemaphores == rhs.pSemaphores ) && + ( pValues == rhs.pValues ); } - bool operator!=( SemaphoreWaitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SemaphoreWaitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreWaitInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags = {}; - uint32_t semaphoreCount = {}; - const VULKAN_HPP_NAMESPACE::Semaphore* pSemaphores = {}; - const uint64_t* pValues = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreWaitInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags = {}; + uint32_t semaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore * pSemaphores = {}; + const uint64_t * pValues = {}; }; - static_assert( sizeof( SemaphoreWaitInfo ) == sizeof( VkSemaphoreWaitInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( SemaphoreWaitInfo ) == sizeof( VkSemaphoreWaitInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct SetStateFlagsIndirectCommandNV { - VULKAN_HPP_CONSTEXPR SetStateFlagsIndirectCommandNV( uint32_t data_ = {} ) VULKAN_HPP_NOEXCEPT - : data( data_ ) - {} + VULKAN_HPP_CONSTEXPR SetStateFlagsIndirectCommandNV( uint32_t data_ = {} ) VULKAN_HPP_NOEXCEPT : data( data_ ) {} SetStateFlagsIndirectCommandNV( VkSetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - SetStateFlagsIndirectCommandNV& operator=( VkSetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + SetStateFlagsIndirectCommandNV & operator=( VkSetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -63099,25 +70364,25 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkSetStateFlagsIndirectCommandNV const&() const VULKAN_HPP_NOEXCEPT + operator VkSetStateFlagsIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSetStateFlagsIndirectCommandNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SetStateFlagsIndirectCommandNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SetStateFlagsIndirectCommandNV const & ) const = default; #else - bool operator==( SetStateFlagsIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SetStateFlagsIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return ( data == rhs.data ); } - bool operator!=( SetStateFlagsIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SetStateFlagsIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -63126,14 +70391,16 @@ namespace VULKAN_HPP_NAMESPACE public: uint32_t data = {}; }; - static_assert( sizeof( SetStateFlagsIndirectCommandNV ) == sizeof( VkSetStateFlagsIndirectCommandNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( SetStateFlagsIndirectCommandNV ) == sizeof( VkSetStateFlagsIndirectCommandNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct ShaderModuleCreateInfo { - VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ = {}, - size_t codeSize_ = {}, - const uint32_t* pCode_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ = {}, + size_t codeSize_ = {}, + const uint32_t * pCode_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , codeSize( codeSize_ ) , pCode( pCode_ ) @@ -63150,13 +70417,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ShaderModuleCreateInfo& operator=( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + ShaderModuleCreateInfo & operator=( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ShaderModuleCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ShaderModuleCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -63174,106 +70441,110 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - ShaderModuleCreateInfo & setPCode( const uint32_t* pCode_ ) VULKAN_HPP_NOEXCEPT + ShaderModuleCreateInfo & setPCode( const uint32_t * pCode_ ) VULKAN_HPP_NOEXCEPT { pCode = pCode_; return *this; } - operator VkShaderModuleCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkShaderModuleCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkShaderModuleCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ShaderModuleCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ShaderModuleCreateInfo const & ) const = default; #else - bool operator==( ShaderModuleCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ShaderModuleCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( codeSize == rhs.codeSize ) - && ( pCode == rhs.pCode ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( codeSize == rhs.codeSize ) && ( pCode == rhs.pCode ); } - bool operator!=( ShaderModuleCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ShaderModuleCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags = {}; - size_t codeSize = {}; - const uint32_t* pCode = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags = {}; + size_t codeSize = {}; + const uint32_t * pCode = {}; }; - static_assert( sizeof( ShaderModuleCreateInfo ) == sizeof( VkShaderModuleCreateInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( ShaderModuleCreateInfo ) == sizeof( VkShaderModuleCreateInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct ShaderModuleValidationCacheCreateInfoEXT { - VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT( + VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ = {} ) VULKAN_HPP_NOEXCEPT : validationCache( validationCache_ ) {} - ShaderModuleValidationCacheCreateInfoEXT & operator=( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ShaderModuleValidationCacheCreateInfoEXT & + operator=( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( ShaderModuleValidationCacheCreateInfoEXT ) - offsetof( ShaderModuleValidationCacheCreateInfoEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( ShaderModuleValidationCacheCreateInfoEXT ) - + offsetof( ShaderModuleValidationCacheCreateInfoEXT, pNext ) ); return *this; } - ShaderModuleValidationCacheCreateInfoEXT( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ShaderModuleValidationCacheCreateInfoEXT( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - ShaderModuleValidationCacheCreateInfoEXT& operator=( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ShaderModuleValidationCacheCreateInfoEXT & + operator=( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ShaderModuleValidationCacheCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ShaderModuleValidationCacheCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - ShaderModuleValidationCacheCreateInfoEXT & setValidationCache( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ ) VULKAN_HPP_NOEXCEPT + ShaderModuleValidationCacheCreateInfoEXT & + setValidationCache( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ ) VULKAN_HPP_NOEXCEPT { validationCache = validationCache_; return *this; } - operator VkShaderModuleValidationCacheCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkShaderModuleValidationCacheCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkShaderModuleValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ShaderModuleValidationCacheCreateInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ShaderModuleValidationCacheCreateInfoEXT const & ) const = default; #else - bool operator==( ShaderModuleValidationCacheCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( validationCache == rhs.validationCache ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( validationCache == rhs.validationCache ); } - bool operator!=( ShaderModuleValidationCacheCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -63281,19 +70552,22 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleValidationCacheCreateInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache = {}; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache = {}; }; - static_assert( sizeof( ShaderModuleValidationCacheCreateInfoEXT ) == sizeof( VkShaderModuleValidationCacheCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ShaderModuleValidationCacheCreateInfoEXT ) == + sizeof( VkShaderModuleValidationCacheCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct ShaderResourceUsageAMD { - VULKAN_HPP_CONSTEXPR ShaderResourceUsageAMD( uint32_t numUsedVgprs_ = {}, - uint32_t numUsedSgprs_ = {}, + VULKAN_HPP_CONSTEXPR ShaderResourceUsageAMD( uint32_t numUsedVgprs_ = {}, + uint32_t numUsedSgprs_ = {}, uint32_t ldsSizePerLocalWorkGroup_ = {}, - size_t ldsUsageSizeInBytes_ = {}, - size_t scratchMemUsageInBytes_ = {} ) VULKAN_HPP_NOEXCEPT + size_t ldsUsageSizeInBytes_ = {}, + size_t scratchMemUsageInBytes_ = {} ) VULKAN_HPP_NOEXCEPT : numUsedVgprs( numUsedVgprs_ ) , numUsedSgprs( numUsedSgprs_ ) , ldsSizePerLocalWorkGroup( ldsSizePerLocalWorkGroup_ ) @@ -63306,59 +70580,60 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ShaderResourceUsageAMD& operator=( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT + ShaderResourceUsageAMD & operator=( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkShaderResourceUsageAMD const&() const VULKAN_HPP_NOEXCEPT + operator VkShaderResourceUsageAMD const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkShaderResourceUsageAMD &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ShaderResourceUsageAMD const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ShaderResourceUsageAMD const & ) const = default; #else - bool operator==( ShaderResourceUsageAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ShaderResourceUsageAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( numUsedVgprs == rhs.numUsedVgprs ) - && ( numUsedSgprs == rhs.numUsedSgprs ) - && ( ldsSizePerLocalWorkGroup == rhs.ldsSizePerLocalWorkGroup ) - && ( ldsUsageSizeInBytes == rhs.ldsUsageSizeInBytes ) - && ( scratchMemUsageInBytes == rhs.scratchMemUsageInBytes ); + return ( numUsedVgprs == rhs.numUsedVgprs ) && ( numUsedSgprs == rhs.numUsedSgprs ) && + ( ldsSizePerLocalWorkGroup == rhs.ldsSizePerLocalWorkGroup ) && + ( ldsUsageSizeInBytes == rhs.ldsUsageSizeInBytes ) && + ( scratchMemUsageInBytes == rhs.scratchMemUsageInBytes ); } - bool operator!=( ShaderResourceUsageAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ShaderResourceUsageAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint32_t numUsedVgprs = {}; - uint32_t numUsedSgprs = {}; + uint32_t numUsedVgprs = {}; + uint32_t numUsedSgprs = {}; uint32_t ldsSizePerLocalWorkGroup = {}; - size_t ldsUsageSizeInBytes = {}; - size_t scratchMemUsageInBytes = {}; + size_t ldsUsageSizeInBytes = {}; + size_t scratchMemUsageInBytes = {}; }; - static_assert( sizeof( ShaderResourceUsageAMD ) == sizeof( VkShaderResourceUsageAMD ), "struct and wrapper have different size!" ); + static_assert( sizeof( ShaderResourceUsageAMD ) == sizeof( VkShaderResourceUsageAMD ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct ShaderStatisticsInfoAMD { - VULKAN_HPP_CONSTEXPR_14 ShaderStatisticsInfoAMD( VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask_ = {}, - VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage_ = {}, - uint32_t numPhysicalVgprs_ = {}, - uint32_t numPhysicalSgprs_ = {}, - uint32_t numAvailableVgprs_ = {}, - uint32_t numAvailableSgprs_ = {}, - std::array const& computeWorkGroupSize_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 + ShaderStatisticsInfoAMD( VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask_ = {}, + VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage_ = {}, + uint32_t numPhysicalVgprs_ = {}, + uint32_t numPhysicalSgprs_ = {}, + uint32_t numAvailableVgprs_ = {}, + uint32_t numAvailableSgprs_ = {}, + std::array const & computeWorkGroupSize_ = {} ) VULKAN_HPP_NOEXCEPT : shaderStageMask( shaderStageMask_ ) , resourceUsage( resourceUsage_ ) , numPhysicalVgprs( numPhysicalVgprs_ ) @@ -63373,63 +70648,65 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ShaderStatisticsInfoAMD& operator=( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + ShaderStatisticsInfoAMD & operator=( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkShaderStatisticsInfoAMD const&() const VULKAN_HPP_NOEXCEPT + operator VkShaderStatisticsInfoAMD const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkShaderStatisticsInfoAMD &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ShaderStatisticsInfoAMD const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ShaderStatisticsInfoAMD const & ) const = default; #else - bool operator==( ShaderStatisticsInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ShaderStatisticsInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( shaderStageMask == rhs.shaderStageMask ) - && ( resourceUsage == rhs.resourceUsage ) - && ( numPhysicalVgprs == rhs.numPhysicalVgprs ) - && ( numPhysicalSgprs == rhs.numPhysicalSgprs ) - && ( numAvailableVgprs == rhs.numAvailableVgprs ) - && ( numAvailableSgprs == rhs.numAvailableSgprs ) - && ( computeWorkGroupSize == rhs.computeWorkGroupSize ); + return ( shaderStageMask == rhs.shaderStageMask ) && ( resourceUsage == rhs.resourceUsage ) && + ( numPhysicalVgprs == rhs.numPhysicalVgprs ) && ( numPhysicalSgprs == rhs.numPhysicalSgprs ) && + ( numAvailableVgprs == rhs.numAvailableVgprs ) && ( numAvailableSgprs == rhs.numAvailableSgprs ) && + ( computeWorkGroupSize == rhs.computeWorkGroupSize ); } - bool operator!=( ShaderStatisticsInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ShaderStatisticsInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask = {}; - VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage = {}; - uint32_t numPhysicalVgprs = {}; - uint32_t numPhysicalSgprs = {}; - uint32_t numAvailableVgprs = {}; - uint32_t numAvailableSgprs = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask = {}; + VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage = {}; + uint32_t numPhysicalVgprs = {}; + uint32_t numPhysicalSgprs = {}; + uint32_t numAvailableVgprs = {}; + uint32_t numAvailableSgprs = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D computeWorkGroupSize = {}; }; - static_assert( sizeof( ShaderStatisticsInfoAMD ) == sizeof( VkShaderStatisticsInfoAMD ), "struct and wrapper have different size!" ); + static_assert( sizeof( ShaderStatisticsInfoAMD ) == sizeof( VkShaderStatisticsInfoAMD ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct SharedPresentSurfaceCapabilitiesKHR { - VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR( + VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags_ = {} ) VULKAN_HPP_NOEXCEPT : sharedPresentSupportedUsageFlags( sharedPresentSupportedUsageFlags_ ) {} - SharedPresentSurfaceCapabilitiesKHR & operator=( SharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + SharedPresentSurfaceCapabilitiesKHR & + operator=( SharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( SharedPresentSurfaceCapabilitiesKHR ) - offsetof( SharedPresentSurfaceCapabilitiesKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( SharedPresentSurfaceCapabilitiesKHR ) - offsetof( SharedPresentSurfaceCapabilitiesKHR, pNext ) ); return *this; } @@ -63438,33 +70715,33 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SharedPresentSurfaceCapabilitiesKHR& operator=( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + SharedPresentSurfaceCapabilitiesKHR & + operator=( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkSharedPresentSurfaceCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkSharedPresentSurfaceCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSharedPresentSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SharedPresentSurfaceCapabilitiesKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SharedPresentSurfaceCapabilitiesKHR const & ) const = default; #else - bool operator==( SharedPresentSurfaceCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SharedPresentSurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( sharedPresentSupportedUsageFlags == rhs.sharedPresentSupportedUsageFlags ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( sharedPresentSupportedUsageFlags == rhs.sharedPresentSupportedUsageFlags ); } - bool operator!=( SharedPresentSurfaceCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SharedPresentSurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -63472,17 +70749,20 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSharedPresentSurfaceCapabilitiesKHR; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags = {}; }; - static_assert( sizeof( SharedPresentSurfaceCapabilitiesKHR ) == sizeof( VkSharedPresentSurfaceCapabilitiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( SharedPresentSurfaceCapabilitiesKHR ) == sizeof( VkSharedPresentSurfaceCapabilitiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct SparseImageFormatProperties { - VULKAN_HPP_CONSTEXPR SparseImageFormatProperties( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, - VULKAN_HPP_NAMESPACE::Extent3D imageGranularity_ = {}, - VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + SparseImageFormatProperties( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D imageGranularity_ = {}, + VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT : aspectMask( aspectMask_ ) , imageGranularity( imageGranularity_ ) , flags( flags_ ) @@ -63493,55 +70773,57 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SparseImageFormatProperties& operator=( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + SparseImageFormatProperties & operator=( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkSparseImageFormatProperties const&() const VULKAN_HPP_NOEXCEPT + operator VkSparseImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSparseImageFormatProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SparseImageFormatProperties const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseImageFormatProperties const & ) const = default; #else - bool operator==( SparseImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SparseImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( aspectMask == rhs.aspectMask ) - && ( imageGranularity == rhs.imageGranularity ) - && ( flags == rhs.flags ); + return ( aspectMask == rhs.aspectMask ) && ( imageGranularity == rhs.imageGranularity ) && ( flags == rhs.flags ); } - bool operator!=( SparseImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SparseImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; - VULKAN_HPP_NAMESPACE::Extent3D imageGranularity = {}; - VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags = {}; + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + VULKAN_HPP_NAMESPACE::Extent3D imageGranularity = {}; + VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags = {}; }; - static_assert( sizeof( SparseImageFormatProperties ) == sizeof( VkSparseImageFormatProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( SparseImageFormatProperties ) == sizeof( VkSparseImageFormatProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct SparseImageFormatProperties2 { - VULKAN_HPP_CONSTEXPR SparseImageFormatProperties2( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR SparseImageFormatProperties2( + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties_ = {} ) VULKAN_HPP_NOEXCEPT : properties( properties_ ) {} SparseImageFormatProperties2 & operator=( SparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( SparseImageFormatProperties2 ) - offsetof( SparseImageFormatProperties2, pNext ) ); + memcpy( + &pNext, &rhs.pNext, sizeof( SparseImageFormatProperties2 ) - offsetof( SparseImageFormatProperties2, pNext ) ); return *this; } @@ -63550,53 +70832,54 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SparseImageFormatProperties2& operator=( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + SparseImageFormatProperties2 & operator=( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkSparseImageFormatProperties2 const&() const VULKAN_HPP_NOEXCEPT + operator VkSparseImageFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSparseImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SparseImageFormatProperties2 const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseImageFormatProperties2 const & ) const = default; #else - bool operator==( SparseImageFormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SparseImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( properties == rhs.properties ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( properties == rhs.properties ); } - bool operator!=( SparseImageFormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SparseImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageFormatProperties2; - void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageFormatProperties2; + void * pNext = {}; VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties = {}; }; - static_assert( sizeof( SparseImageFormatProperties2 ) == sizeof( VkSparseImageFormatProperties2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( SparseImageFormatProperties2 ) == sizeof( VkSparseImageFormatProperties2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct SparseImageMemoryRequirements { - VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties_ = {}, - uint32_t imageMipTailFirstLod_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + SparseImageMemoryRequirements( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties_ = {}, + uint32_t imageMipTailFirstLod_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride_ = {} ) VULKAN_HPP_NOEXCEPT : formatProperties( formatProperties_ ) , imageMipTailFirstLod( imageMipTailFirstLod_ ) , imageMipTailSize( imageMipTailSize_ ) @@ -63609,59 +70892,62 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SparseImageMemoryRequirements& operator=( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + SparseImageMemoryRequirements & operator=( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkSparseImageMemoryRequirements const&() const VULKAN_HPP_NOEXCEPT + operator VkSparseImageMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSparseImageMemoryRequirements &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SparseImageMemoryRequirements const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseImageMemoryRequirements const & ) const = default; #else - bool operator==( SparseImageMemoryRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SparseImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( formatProperties == rhs.formatProperties ) - && ( imageMipTailFirstLod == rhs.imageMipTailFirstLod ) - && ( imageMipTailSize == rhs.imageMipTailSize ) - && ( imageMipTailOffset == rhs.imageMipTailOffset ) - && ( imageMipTailStride == rhs.imageMipTailStride ); + return ( formatProperties == rhs.formatProperties ) && ( imageMipTailFirstLod == rhs.imageMipTailFirstLod ) && + ( imageMipTailSize == rhs.imageMipTailSize ) && ( imageMipTailOffset == rhs.imageMipTailOffset ) && + ( imageMipTailStride == rhs.imageMipTailStride ); } - bool operator!=( SparseImageMemoryRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SparseImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties = {}; - uint32_t imageMipTailFirstLod = {}; - VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize = {}; - VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset = {}; - VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride = {}; + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties = {}; + uint32_t imageMipTailFirstLod = {}; + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride = {}; }; - static_assert( sizeof( SparseImageMemoryRequirements ) == sizeof( VkSparseImageMemoryRequirements ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( SparseImageMemoryRequirements ) == sizeof( VkSparseImageMemoryRequirements ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct SparseImageMemoryRequirements2 { - VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements2( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements2( + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements_ = {} ) VULKAN_HPP_NOEXCEPT : memoryRequirements( memoryRequirements_ ) {} SparseImageMemoryRequirements2 & operator=( SparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( SparseImageMemoryRequirements2 ) - offsetof( SparseImageMemoryRequirements2, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( SparseImageMemoryRequirements2 ) - offsetof( SparseImageMemoryRequirements2, pNext ) ); return *this; } @@ -63670,58 +70956,63 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SparseImageMemoryRequirements2& operator=( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT + SparseImageMemoryRequirements2 & operator=( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkSparseImageMemoryRequirements2 const&() const VULKAN_HPP_NOEXCEPT + operator VkSparseImageMemoryRequirements2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSparseImageMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SparseImageMemoryRequirements2 const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseImageMemoryRequirements2 const & ) const = default; #else - bool operator==( SparseImageMemoryRequirements2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SparseImageMemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memoryRequirements == rhs.memoryRequirements ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryRequirements == rhs.memoryRequirements ); } - bool operator!=( SparseImageMemoryRequirements2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SparseImageMemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageMemoryRequirements2; - void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageMemoryRequirements2; + void * pNext = {}; VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements = {}; }; - static_assert( sizeof( SparseImageMemoryRequirements2 ) == sizeof( VkSparseImageMemoryRequirements2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( SparseImageMemoryRequirements2 ) == sizeof( VkSparseImageMemoryRequirements2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #ifdef VK_USE_PLATFORM_GGP struct StreamDescriptorSurfaceCreateInfoGGP { - VULKAN_HPP_CONSTEXPR StreamDescriptorSurfaceCreateInfoGGP( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ = {}, - GgpStreamDescriptor streamDescriptor_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + StreamDescriptorSurfaceCreateInfoGGP( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ = {}, + GgpStreamDescriptor streamDescriptor_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , streamDescriptor( streamDescriptor_ ) {} - StreamDescriptorSurfaceCreateInfoGGP & operator=( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT + StreamDescriptorSurfaceCreateInfoGGP & + operator=( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( StreamDescriptorSurfaceCreateInfoGGP ) - offsetof( StreamDescriptorSurfaceCreateInfoGGP, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( StreamDescriptorSurfaceCreateInfoGGP ) - + offsetof( StreamDescriptorSurfaceCreateInfoGGP, pNext ) ); return *this; } @@ -63730,87 +71021,90 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - StreamDescriptorSurfaceCreateInfoGGP& operator=( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT + StreamDescriptorSurfaceCreateInfoGGP & + operator=( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - StreamDescriptorSurfaceCreateInfoGGP & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + StreamDescriptorSurfaceCreateInfoGGP & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - StreamDescriptorSurfaceCreateInfoGGP & setFlags( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ ) VULKAN_HPP_NOEXCEPT + StreamDescriptorSurfaceCreateInfoGGP & + setFlags( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - StreamDescriptorSurfaceCreateInfoGGP & setStreamDescriptor( GgpStreamDescriptor streamDescriptor_ ) VULKAN_HPP_NOEXCEPT + StreamDescriptorSurfaceCreateInfoGGP & + setStreamDescriptor( GgpStreamDescriptor streamDescriptor_ ) VULKAN_HPP_NOEXCEPT { streamDescriptor = streamDescriptor_; return *this; } - operator VkStreamDescriptorSurfaceCreateInfoGGP const&() const VULKAN_HPP_NOEXCEPT + operator VkStreamDescriptorSurfaceCreateInfoGGP const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkStreamDescriptorSurfaceCreateInfoGGP &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( StreamDescriptorSurfaceCreateInfoGGP const& ) const = default; -#else - bool operator==( StreamDescriptorSurfaceCreateInfoGGP const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( StreamDescriptorSurfaceCreateInfoGGP const & ) const = default; +# else + bool operator==( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( streamDescriptor == rhs.streamDescriptor ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( streamDescriptor == rhs.streamDescriptor ); } - bool operator!=( StreamDescriptorSurfaceCreateInfoGGP const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eStreamDescriptorSurfaceCreateInfoGGP; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags = {}; - GgpStreamDescriptor streamDescriptor = {}; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags = {}; + GgpStreamDescriptor streamDescriptor = {}; }; - static_assert( sizeof( StreamDescriptorSurfaceCreateInfoGGP ) == sizeof( VkStreamDescriptorSurfaceCreateInfoGGP ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( StreamDescriptorSurfaceCreateInfoGGP ) == sizeof( VkStreamDescriptorSurfaceCreateInfoGGP ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_USE_PLATFORM_GGP*/ #ifdef VK_ENABLE_BETA_EXTENSIONS struct StridedBufferRegionKHR { - VULKAN_HPP_CONSTEXPR StridedBufferRegionKHR( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_CONSTEXPR StridedBufferRegionKHR( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT : buffer( buffer_ ) , offset( offset_ ) , stride( stride_ ) , size( size_ ) {} - explicit StridedBufferRegionKHR( IndirectCommandsStreamNV const& indirectCommandsStreamNV, + explicit StridedBufferRegionKHR( IndirectCommandsStreamNV const & indirectCommandsStreamNV, VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) - : buffer( indirectCommandsStreamNV.buffer ) - , offset( indirectCommandsStreamNV.offset ) - , stride( stride_ ) - , size( size_ ) + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) + : buffer( indirectCommandsStreamNV.buffer ), + offset( indirectCommandsStreamNV.offset ), + stride( stride_ ), + size( size_ ) {} StridedBufferRegionKHR( VkStridedBufferRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT @@ -63818,9 +71112,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - StridedBufferRegionKHR& operator=( VkStridedBufferRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + StridedBufferRegionKHR & operator=( VkStridedBufferRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -63848,52 +71142,51 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkStridedBufferRegionKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkStridedBufferRegionKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkStridedBufferRegionKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( StridedBufferRegionKHR const& ) const = default; -#else - bool operator==( StridedBufferRegionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( StridedBufferRegionKHR const & ) const = default; +# else + bool operator==( StridedBufferRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( buffer == rhs.buffer ) - && ( offset == rhs.offset ) - && ( stride == rhs.stride ) - && ( size == rhs.size ); + return ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && ( stride == rhs.stride ) && ( size == rhs.size ); } - bool operator!=( StridedBufferRegionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( StridedBufferRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; VULKAN_HPP_NAMESPACE::DeviceSize stride = {}; - VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; }; - static_assert( sizeof( StridedBufferRegionKHR ) == sizeof( VkStridedBufferRegionKHR ), "struct and wrapper have different size!" ); + static_assert( sizeof( StridedBufferRegionKHR ) == sizeof( VkStridedBufferRegionKHR ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct SubmitInfo { - VULKAN_HPP_CONSTEXPR SubmitInfo( uint32_t waitSemaphoreCount_ = {}, - const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineStageFlags* pWaitDstStageMask_ = {}, - uint32_t commandBufferCount_ = {}, - const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers_ = {}, - uint32_t signalSemaphoreCount_ = {}, - const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + SubmitInfo( uint32_t waitSemaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineStageFlags * pWaitDstStageMask_ = {}, + uint32_t commandBufferCount_ = {}, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers_ = {}, + uint32_t signalSemaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ = {} ) VULKAN_HPP_NOEXCEPT : waitSemaphoreCount( waitSemaphoreCount_ ) , pWaitSemaphores( pWaitSemaphores_ ) , pWaitDstStageMask( pWaitDstStageMask_ ) @@ -63914,13 +71207,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SubmitInfo& operator=( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + SubmitInfo & operator=( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - SubmitInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + SubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -63932,13 +71225,14 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - SubmitInfo & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT + SubmitInfo & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT { pWaitSemaphores = pWaitSemaphores_; return *this; } - SubmitInfo & setPWaitDstStageMask( const VULKAN_HPP_NAMESPACE::PipelineStageFlags* pWaitDstStageMask_ ) VULKAN_HPP_NOEXCEPT + SubmitInfo & + setPWaitDstStageMask( const VULKAN_HPP_NAMESPACE::PipelineStageFlags * pWaitDstStageMask_ ) VULKAN_HPP_NOEXCEPT { pWaitDstStageMask = pWaitDstStageMask_; return *this; @@ -63950,7 +71244,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - SubmitInfo & setPCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers_ ) VULKAN_HPP_NOEXCEPT + SubmitInfo & setPCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers_ ) VULKAN_HPP_NOEXCEPT { pCommandBuffers = pCommandBuffers_; return *this; @@ -63962,61 +71256,57 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - SubmitInfo & setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT + SubmitInfo & setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT { pSignalSemaphores = pSignalSemaphores_; return *this; } - operator VkSubmitInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkSubmitInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSubmitInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SubmitInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubmitInfo const & ) const = default; #else - bool operator==( SubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) - && ( pWaitSemaphores == rhs.pWaitSemaphores ) - && ( pWaitDstStageMask == rhs.pWaitDstStageMask ) - && ( commandBufferCount == rhs.commandBufferCount ) - && ( pCommandBuffers == rhs.pCommandBuffers ) - && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) - && ( pSignalSemaphores == rhs.pSignalSemaphores ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) && + ( pWaitSemaphores == rhs.pWaitSemaphores ) && ( pWaitDstStageMask == rhs.pWaitDstStageMask ) && + ( commandBufferCount == rhs.commandBufferCount ) && ( pCommandBuffers == rhs.pCommandBuffers ) && + ( signalSemaphoreCount == rhs.signalSemaphoreCount ) && ( pSignalSemaphores == rhs.pSignalSemaphores ); } - bool operator!=( SubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubmitInfo; - const void* pNext = {}; - uint32_t waitSemaphoreCount = {}; - const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores = {}; - const VULKAN_HPP_NAMESPACE::PipelineStageFlags* pWaitDstStageMask = {}; - uint32_t commandBufferCount = {}; - const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers = {}; - uint32_t signalSemaphoreCount = {}; - const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubmitInfo; + const void * pNext = {}; + uint32_t waitSemaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores = {}; + const VULKAN_HPP_NAMESPACE::PipelineStageFlags * pWaitDstStageMask = {}; + uint32_t commandBufferCount = {}; + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers = {}; + uint32_t signalSemaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores = {}; }; static_assert( sizeof( SubmitInfo ) == sizeof( VkSubmitInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct SubpassBeginInfo { - VULKAN_HPP_CONSTEXPR SubpassBeginInfo( VULKAN_HPP_NAMESPACE::SubpassContents contents_ = VULKAN_HPP_NAMESPACE::SubpassContents::eInline ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR SubpassBeginInfo( VULKAN_HPP_NAMESPACE::SubpassContents contents_ = + VULKAN_HPP_NAMESPACE::SubpassContents::eInline ) VULKAN_HPP_NOEXCEPT : contents( contents_ ) {} @@ -64031,13 +71321,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SubpassBeginInfo& operator=( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + SubpassBeginInfo & operator=( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - SubpassBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + SubpassBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -64049,53 +71339,57 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkSubpassBeginInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkSubpassBeginInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSubpassBeginInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SubpassBeginInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassBeginInfo const & ) const = default; #else - bool operator==( SubpassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SubpassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( contents == rhs.contents ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( contents == rhs.contents ); } - bool operator!=( SubpassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SubpassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassBeginInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::SubpassContents contents = VULKAN_HPP_NAMESPACE::SubpassContents::eInline; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassBeginInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SubpassContents contents = VULKAN_HPP_NAMESPACE::SubpassContents::eInline; }; - static_assert( sizeof( SubpassBeginInfo ) == sizeof( VkSubpassBeginInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( SubpassBeginInfo ) == sizeof( VkSubpassBeginInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct SubpassDescriptionDepthStencilResolve { - VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolve( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, - VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, - const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilResolveAttachment_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolve( + VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, + VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment_ = {} ) VULKAN_HPP_NOEXCEPT : depthResolveMode( depthResolveMode_ ) , stencilResolveMode( stencilResolveMode_ ) , pDepthStencilResolveAttachment( pDepthStencilResolveAttachment_ ) {} - SubpassDescriptionDepthStencilResolve & operator=( SubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT + SubpassDescriptionDepthStencilResolve & + operator=( SubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( SubpassDescriptionDepthStencilResolve ) - offsetof( SubpassDescriptionDepthStencilResolve, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( SubpassDescriptionDepthStencilResolve ) - + offsetof( SubpassDescriptionDepthStencilResolve, pNext ) ); return *this; } @@ -64104,78 +71398,81 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SubpassDescriptionDepthStencilResolve& operator=( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT + SubpassDescriptionDepthStencilResolve & + operator=( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - SubpassDescriptionDepthStencilResolve & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + SubpassDescriptionDepthStencilResolve & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - SubpassDescriptionDepthStencilResolve & setDepthResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_ ) VULKAN_HPP_NOEXCEPT + SubpassDescriptionDepthStencilResolve & + setDepthResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_ ) VULKAN_HPP_NOEXCEPT { depthResolveMode = depthResolveMode_; return *this; } - SubpassDescriptionDepthStencilResolve & setStencilResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ ) VULKAN_HPP_NOEXCEPT + SubpassDescriptionDepthStencilResolve & + setStencilResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ ) VULKAN_HPP_NOEXCEPT { stencilResolveMode = stencilResolveMode_; return *this; } - SubpassDescriptionDepthStencilResolve & setPDepthStencilResolveAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilResolveAttachment_ ) VULKAN_HPP_NOEXCEPT + SubpassDescriptionDepthStencilResolve & setPDepthStencilResolveAttachment( + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment_ ) VULKAN_HPP_NOEXCEPT { pDepthStencilResolveAttachment = pDepthStencilResolveAttachment_; return *this; } - operator VkSubpassDescriptionDepthStencilResolve const&() const VULKAN_HPP_NOEXCEPT + operator VkSubpassDescriptionDepthStencilResolve const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSubpassDescriptionDepthStencilResolve &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SubpassDescriptionDepthStencilResolve const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassDescriptionDepthStencilResolve const & ) const = default; #else - bool operator==( SubpassDescriptionDepthStencilResolve const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SubpassDescriptionDepthStencilResolve const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( depthResolveMode == rhs.depthResolveMode ) - && ( stencilResolveMode == rhs.stencilResolveMode ) - && ( pDepthStencilResolveAttachment == rhs.pDepthStencilResolveAttachment ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthResolveMode == rhs.depthResolveMode ) && + ( stencilResolveMode == rhs.stencilResolveMode ) && + ( pDepthStencilResolveAttachment == rhs.pDepthStencilResolveAttachment ); } - bool operator!=( SubpassDescriptionDepthStencilResolve const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SubpassDescriptionDepthStencilResolve const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescriptionDepthStencilResolve; - const void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescriptionDepthStencilResolve; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone; VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone; - const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilResolveAttachment = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment = {}; }; - static_assert( sizeof( SubpassDescriptionDepthStencilResolve ) == sizeof( VkSubpassDescriptionDepthStencilResolve ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( SubpassDescriptionDepthStencilResolve ) == sizeof( VkSubpassDescriptionDepthStencilResolve ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct SubpassEndInfo { - VULKAN_HPP_CONSTEXPR SubpassEndInfo() VULKAN_HPP_NOEXCEPT - {} + VULKAN_HPP_CONSTEXPR SubpassEndInfo() VULKAN_HPP_NOEXCEPT {} SubpassEndInfo & operator=( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -64188,38 +71485,37 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SubpassEndInfo& operator=( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT + SubpassEndInfo & operator=( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - SubpassEndInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + SubpassEndInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - operator VkSubpassEndInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkSubpassEndInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSubpassEndInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SubpassEndInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassEndInfo const & ) const = default; #else - bool operator==( SubpassEndInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SubpassEndInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ); } - bool operator!=( SubpassEndInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SubpassEndInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -64227,24 +71523,26 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassEndInfo; - const void* pNext = {}; + const void * pNext = {}; }; static_assert( sizeof( SubpassEndInfo ) == sizeof( VkSubpassEndInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct SurfaceCapabilities2EXT { - VULKAN_HPP_CONSTEXPR SurfaceCapabilities2EXT( uint32_t minImageCount_ = {}, - uint32_t maxImageCount_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {}, - uint32_t maxImageArrayLayers_ = {}, - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, - VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {}, - VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {}, - VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR SurfaceCapabilities2EXT( + uint32_t minImageCount_ = {}, + uint32_t maxImageCount_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {}, + uint32_t maxImageArrayLayers_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {}, + VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters_ = {} ) VULKAN_HPP_NOEXCEPT : minImageCount( minImageCount_ ) , maxImageCount( maxImageCount_ ) , currentExtent( currentExtent_ ) @@ -64269,78 +71567,77 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SurfaceCapabilities2EXT& operator=( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + SurfaceCapabilities2EXT & operator=( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkSurfaceCapabilities2EXT const&() const VULKAN_HPP_NOEXCEPT + operator VkSurfaceCapabilities2EXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSurfaceCapabilities2EXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SurfaceCapabilities2EXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceCapabilities2EXT const & ) const = default; #else - bool operator==( SurfaceCapabilities2EXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SurfaceCapabilities2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( minImageCount == rhs.minImageCount ) - && ( maxImageCount == rhs.maxImageCount ) - && ( currentExtent == rhs.currentExtent ) - && ( minImageExtent == rhs.minImageExtent ) - && ( maxImageExtent == rhs.maxImageExtent ) - && ( maxImageArrayLayers == rhs.maxImageArrayLayers ) - && ( supportedTransforms == rhs.supportedTransforms ) - && ( currentTransform == rhs.currentTransform ) - && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha ) - && ( supportedUsageFlags == rhs.supportedUsageFlags ) - && ( supportedSurfaceCounters == rhs.supportedSurfaceCounters ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minImageCount == rhs.minImageCount ) && + ( maxImageCount == rhs.maxImageCount ) && ( currentExtent == rhs.currentExtent ) && + ( minImageExtent == rhs.minImageExtent ) && ( maxImageExtent == rhs.maxImageExtent ) && + ( maxImageArrayLayers == rhs.maxImageArrayLayers ) && ( supportedTransforms == rhs.supportedTransforms ) && + ( currentTransform == rhs.currentTransform ) && + ( supportedCompositeAlpha == rhs.supportedCompositeAlpha ) && + ( supportedUsageFlags == rhs.supportedUsageFlags ) && + ( supportedSurfaceCounters == rhs.supportedSurfaceCounters ); } - bool operator!=( SurfaceCapabilities2EXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SurfaceCapabilities2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2EXT; - void* pNext = {}; - uint32_t minImageCount = {}; - uint32_t maxImageCount = {}; - VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {}; - VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {}; - VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {}; - uint32_t maxImageArrayLayers = {}; - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {}; - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; - VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {}; - VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2EXT; + void * pNext = {}; + uint32_t minImageCount = {}; + uint32_t maxImageCount = {}; + VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {}; + uint32_t maxImageArrayLayers = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {}; VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters = {}; }; - static_assert( sizeof( SurfaceCapabilities2EXT ) == sizeof( VkSurfaceCapabilities2EXT ), "struct and wrapper have different size!" ); + static_assert( sizeof( SurfaceCapabilities2EXT ) == sizeof( VkSurfaceCapabilities2EXT ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct SurfaceCapabilitiesKHR { - VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesKHR( uint32_t minImageCount_ = {}, - uint32_t maxImageCount_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {}, - uint32_t maxImageArrayLayers_ = {}, - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, - VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {}, - VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + SurfaceCapabilitiesKHR( uint32_t minImageCount_ = {}, + uint32_t maxImageCount_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {}, + uint32_t maxImageArrayLayers_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {}, + VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {} ) VULKAN_HPP_NOEXCEPT : minImageCount( minImageCount_ ) , maxImageCount( maxImageCount_ ) , currentExtent( currentExtent_ ) @@ -64358,63 +71655,62 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SurfaceCapabilitiesKHR& operator=( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + SurfaceCapabilitiesKHR & operator=( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkSurfaceCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkSurfaceCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SurfaceCapabilitiesKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceCapabilitiesKHR const & ) const = default; #else - bool operator==( SurfaceCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( minImageCount == rhs.minImageCount ) - && ( maxImageCount == rhs.maxImageCount ) - && ( currentExtent == rhs.currentExtent ) - && ( minImageExtent == rhs.minImageExtent ) - && ( maxImageExtent == rhs.maxImageExtent ) - && ( maxImageArrayLayers == rhs.maxImageArrayLayers ) - && ( supportedTransforms == rhs.supportedTransforms ) - && ( currentTransform == rhs.currentTransform ) - && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha ) - && ( supportedUsageFlags == rhs.supportedUsageFlags ); + return ( minImageCount == rhs.minImageCount ) && ( maxImageCount == rhs.maxImageCount ) && + ( currentExtent == rhs.currentExtent ) && ( minImageExtent == rhs.minImageExtent ) && + ( maxImageExtent == rhs.maxImageExtent ) && ( maxImageArrayLayers == rhs.maxImageArrayLayers ) && + ( supportedTransforms == rhs.supportedTransforms ) && ( currentTransform == rhs.currentTransform ) && + ( supportedCompositeAlpha == rhs.supportedCompositeAlpha ) && + ( supportedUsageFlags == rhs.supportedUsageFlags ); } - bool operator!=( SurfaceCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint32_t minImageCount = {}; - uint32_t maxImageCount = {}; - VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {}; - VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {}; - VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {}; - uint32_t maxImageArrayLayers = {}; - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {}; - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + uint32_t minImageCount = {}; + uint32_t maxImageCount = {}; + VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {}; + uint32_t maxImageArrayLayers = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {}; - VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {}; }; - static_assert( sizeof( SurfaceCapabilitiesKHR ) == sizeof( VkSurfaceCapabilitiesKHR ), "struct and wrapper have different size!" ); + static_assert( sizeof( SurfaceCapabilitiesKHR ) == sizeof( VkSurfaceCapabilitiesKHR ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct SurfaceCapabilities2KHR { - VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR( + VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities_ = {} ) VULKAN_HPP_NOEXCEPT : surfaceCapabilities( surfaceCapabilities_ ) {} @@ -64429,121 +71725,132 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SurfaceCapabilities2KHR& operator=( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + SurfaceCapabilities2KHR & operator=( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkSurfaceCapabilities2KHR const&() const VULKAN_HPP_NOEXCEPT + operator VkSurfaceCapabilities2KHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSurfaceCapabilities2KHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SurfaceCapabilities2KHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceCapabilities2KHR const & ) const = default; #else - bool operator==( SurfaceCapabilities2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SurfaceCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( surfaceCapabilities == rhs.surfaceCapabilities ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surfaceCapabilities == rhs.surfaceCapabilities ); } - bool operator!=( SurfaceCapabilities2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SurfaceCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2KHR; - void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2KHR; + void * pNext = {}; VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities = {}; }; - static_assert( sizeof( SurfaceCapabilities2KHR ) == sizeof( VkSurfaceCapabilities2KHR ), "struct and wrapper have different size!" ); + static_assert( sizeof( SurfaceCapabilities2KHR ) == sizeof( VkSurfaceCapabilities2KHR ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); #ifdef VK_USE_PLATFORM_WIN32_KHR struct SurfaceCapabilitiesFullScreenExclusiveEXT { - VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT( VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT( + VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ = {} ) VULKAN_HPP_NOEXCEPT : fullScreenExclusiveSupported( fullScreenExclusiveSupported_ ) {} - SurfaceCapabilitiesFullScreenExclusiveEXT & operator=( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT + SurfaceCapabilitiesFullScreenExclusiveEXT & + operator=( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( SurfaceCapabilitiesFullScreenExclusiveEXT ) - offsetof( SurfaceCapabilitiesFullScreenExclusiveEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( SurfaceCapabilitiesFullScreenExclusiveEXT ) - + offsetof( SurfaceCapabilitiesFullScreenExclusiveEXT, pNext ) ); return *this; } - SurfaceCapabilitiesFullScreenExclusiveEXT( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT + SurfaceCapabilitiesFullScreenExclusiveEXT( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - SurfaceCapabilitiesFullScreenExclusiveEXT& operator=( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT + SurfaceCapabilitiesFullScreenExclusiveEXT & + operator=( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - SurfaceCapabilitiesFullScreenExclusiveEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + SurfaceCapabilitiesFullScreenExclusiveEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - SurfaceCapabilitiesFullScreenExclusiveEXT & setFullScreenExclusiveSupported( VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ ) VULKAN_HPP_NOEXCEPT + SurfaceCapabilitiesFullScreenExclusiveEXT & + setFullScreenExclusiveSupported( VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ ) VULKAN_HPP_NOEXCEPT { fullScreenExclusiveSupported = fullScreenExclusiveSupported_; return *this; } - operator VkSurfaceCapabilitiesFullScreenExclusiveEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkSurfaceCapabilitiesFullScreenExclusiveEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSurfaceCapabilitiesFullScreenExclusiveEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SurfaceCapabilitiesFullScreenExclusiveEXT const& ) const = default; -#else - bool operator==( SurfaceCapabilitiesFullScreenExclusiveEXT const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceCapabilitiesFullScreenExclusiveEXT const & ) const = default; +# else + bool operator==( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( fullScreenExclusiveSupported == rhs.fullScreenExclusiveSupported ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( fullScreenExclusiveSupported == rhs.fullScreenExclusiveSupported ); } - bool operator!=( SurfaceCapabilitiesFullScreenExclusiveEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported = {}; }; - static_assert( sizeof( SurfaceCapabilitiesFullScreenExclusiveEXT ) == sizeof( VkSurfaceCapabilitiesFullScreenExclusiveEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( SurfaceCapabilitiesFullScreenExclusiveEXT ) == + sizeof( VkSurfaceCapabilitiesFullScreenExclusiveEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ struct SurfaceFormatKHR { - VULKAN_HPP_CONSTEXPR SurfaceFormatKHR( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + SurfaceFormatKHR( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace_ = + VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear ) VULKAN_HPP_NOEXCEPT : format( format_ ) , colorSpace( colorSpace_ ) {} @@ -64553,47 +71860,48 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SurfaceFormatKHR& operator=( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT + SurfaceFormatKHR & operator=( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkSurfaceFormatKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkSurfaceFormatKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSurfaceFormatKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SurfaceFormatKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceFormatKHR const & ) const = default; #else - bool operator==( SurfaceFormatKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SurfaceFormatKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( format == rhs.format ) - && ( colorSpace == rhs.colorSpace ); + return ( format == rhs.format ) && ( colorSpace == rhs.colorSpace ); } - bool operator!=( SurfaceFormatKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SurfaceFormatKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear; }; - static_assert( sizeof( SurfaceFormatKHR ) == sizeof( VkSurfaceFormatKHR ), "struct and wrapper have different size!" ); + static_assert( sizeof( SurfaceFormatKHR ) == sizeof( VkSurfaceFormatKHR ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct SurfaceFormat2KHR { - VULKAN_HPP_CONSTEXPR SurfaceFormat2KHR( VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + SurfaceFormat2KHR( VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat_ = {} ) VULKAN_HPP_NOEXCEPT : surfaceFormat( surfaceFormat_ ) {} @@ -64608,56 +71916,59 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SurfaceFormat2KHR& operator=( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + SurfaceFormat2KHR & operator=( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkSurfaceFormat2KHR const&() const VULKAN_HPP_NOEXCEPT + operator VkSurfaceFormat2KHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSurfaceFormat2KHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SurfaceFormat2KHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceFormat2KHR const & ) const = default; #else - bool operator==( SurfaceFormat2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SurfaceFormat2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( surfaceFormat == rhs.surfaceFormat ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surfaceFormat == rhs.surfaceFormat ); } - bool operator!=( SurfaceFormat2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SurfaceFormat2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFormat2KHR; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFormat2KHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat = {}; }; - static_assert( sizeof( SurfaceFormat2KHR ) == sizeof( VkSurfaceFormat2KHR ), "struct and wrapper have different size!" ); + static_assert( sizeof( SurfaceFormat2KHR ) == sizeof( VkSurfaceFormat2KHR ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); #ifdef VK_USE_PLATFORM_WIN32_KHR struct SurfaceFullScreenExclusiveInfoEXT { - VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveInfoEXT( VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ = VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + SurfaceFullScreenExclusiveInfoEXT( VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ = + VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault ) VULKAN_HPP_NOEXCEPT : fullScreenExclusive( fullScreenExclusive_ ) {} SurfaceFullScreenExclusiveInfoEXT & operator=( SurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( SurfaceFullScreenExclusiveInfoEXT ) - offsetof( SurfaceFullScreenExclusiveInfoEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( SurfaceFullScreenExclusiveInfoEXT ) - offsetof( SurfaceFullScreenExclusiveInfoEXT, pNext ) ); return *this; } @@ -64666,57 +71977,59 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SurfaceFullScreenExclusiveInfoEXT& operator=( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + SurfaceFullScreenExclusiveInfoEXT & operator=( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - SurfaceFullScreenExclusiveInfoEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + SurfaceFullScreenExclusiveInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - SurfaceFullScreenExclusiveInfoEXT & setFullScreenExclusive( VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ ) VULKAN_HPP_NOEXCEPT + SurfaceFullScreenExclusiveInfoEXT & + setFullScreenExclusive( VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ ) VULKAN_HPP_NOEXCEPT { fullScreenExclusive = fullScreenExclusive_; return *this; } - operator VkSurfaceFullScreenExclusiveInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkSurfaceFullScreenExclusiveInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSurfaceFullScreenExclusiveInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SurfaceFullScreenExclusiveInfoEXT const& ) const = default; -#else - bool operator==( SurfaceFullScreenExclusiveInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceFullScreenExclusiveInfoEXT const & ) const = default; +# else + bool operator==( SurfaceFullScreenExclusiveInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( fullScreenExclusive == rhs.fullScreenExclusive ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fullScreenExclusive == rhs.fullScreenExclusive ); } - bool operator!=( SurfaceFullScreenExclusiveInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SurfaceFullScreenExclusiveInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFullScreenExclusiveInfoEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive = VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFullScreenExclusiveInfoEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive = + VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault; }; - static_assert( sizeof( SurfaceFullScreenExclusiveInfoEXT ) == sizeof( VkSurfaceFullScreenExclusiveInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( SurfaceFullScreenExclusiveInfoEXT ) == sizeof( VkSurfaceFullScreenExclusiveInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #ifdef VK_USE_PLATFORM_WIN32_KHR @@ -64726,9 +72039,13 @@ namespace VULKAN_HPP_NAMESPACE : hmonitor( hmonitor_ ) {} - SurfaceFullScreenExclusiveWin32InfoEXT & operator=( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + SurfaceFullScreenExclusiveWin32InfoEXT & + operator=( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( SurfaceFullScreenExclusiveWin32InfoEXT ) - offsetof( SurfaceFullScreenExclusiveWin32InfoEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( SurfaceFullScreenExclusiveWin32InfoEXT ) - + offsetof( SurfaceFullScreenExclusiveWin32InfoEXT, pNext ) ); return *this; } @@ -64737,13 +72054,14 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SurfaceFullScreenExclusiveWin32InfoEXT& operator=( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + SurfaceFullScreenExclusiveWin32InfoEXT & + operator=( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - SurfaceFullScreenExclusiveWin32InfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + SurfaceFullScreenExclusiveWin32InfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -64755,50 +72073,53 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkSurfaceFullScreenExclusiveWin32InfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkSurfaceFullScreenExclusiveWin32InfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSurfaceFullScreenExclusiveWin32InfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SurfaceFullScreenExclusiveWin32InfoEXT const& ) const = default; -#else - bool operator==( SurfaceFullScreenExclusiveWin32InfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceFullScreenExclusiveWin32InfoEXT const & ) const = default; +# else + bool operator==( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( hmonitor == rhs.hmonitor ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hmonitor == rhs.hmonitor ); } - bool operator!=( SurfaceFullScreenExclusiveWin32InfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT; - const void* pNext = {}; - HMONITOR hmonitor = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT; + const void * pNext = {}; + HMONITOR hmonitor = {}; }; - static_assert( sizeof( SurfaceFullScreenExclusiveWin32InfoEXT ) == sizeof( VkSurfaceFullScreenExclusiveWin32InfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( SurfaceFullScreenExclusiveWin32InfoEXT ) == sizeof( VkSurfaceFullScreenExclusiveWin32InfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ struct SurfaceProtectedCapabilitiesKHR { - VULKAN_HPP_CONSTEXPR SurfaceProtectedCapabilitiesKHR( VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + SurfaceProtectedCapabilitiesKHR( VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ = {} ) VULKAN_HPP_NOEXCEPT : supportsProtected( supportsProtected_ ) {} SurfaceProtectedCapabilitiesKHR & operator=( SurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( SurfaceProtectedCapabilitiesKHR ) - offsetof( SurfaceProtectedCapabilitiesKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( SurfaceProtectedCapabilitiesKHR ) - offsetof( SurfaceProtectedCapabilitiesKHR, pNext ) ); return *this; } @@ -64807,67 +72128,71 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SurfaceProtectedCapabilitiesKHR& operator=( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + SurfaceProtectedCapabilitiesKHR & operator=( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - SurfaceProtectedCapabilitiesKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + SurfaceProtectedCapabilitiesKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - SurfaceProtectedCapabilitiesKHR & setSupportsProtected( VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ ) VULKAN_HPP_NOEXCEPT + SurfaceProtectedCapabilitiesKHR & + setSupportsProtected( VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ ) VULKAN_HPP_NOEXCEPT { supportsProtected = supportsProtected_; return *this; } - operator VkSurfaceProtectedCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkSurfaceProtectedCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSurfaceProtectedCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SurfaceProtectedCapabilitiesKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceProtectedCapabilitiesKHR const & ) const = default; #else - bool operator==( SurfaceProtectedCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SurfaceProtectedCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( supportsProtected == rhs.supportsProtected ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportsProtected == rhs.supportsProtected ); } - bool operator!=( SurfaceProtectedCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SurfaceProtectedCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceProtectedCapabilitiesKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 supportsProtected = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceProtectedCapabilitiesKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 supportsProtected = {}; }; - static_assert( sizeof( SurfaceProtectedCapabilitiesKHR ) == sizeof( VkSurfaceProtectedCapabilitiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( SurfaceProtectedCapabilitiesKHR ) == sizeof( VkSurfaceProtectedCapabilitiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct SwapchainCounterCreateInfoEXT { - VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT( + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ = {} ) VULKAN_HPP_NOEXCEPT : surfaceCounters( surfaceCounters_ ) {} SwapchainCounterCreateInfoEXT & operator=( SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( SwapchainCounterCreateInfoEXT ) - offsetof( SwapchainCounterCreateInfoEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( SwapchainCounterCreateInfoEXT ) - offsetof( SwapchainCounterCreateInfoEXT, pNext ) ); return *this; } @@ -64876,76 +72201,80 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SwapchainCounterCreateInfoEXT& operator=( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + SwapchainCounterCreateInfoEXT & operator=( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - SwapchainCounterCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + SwapchainCounterCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - SwapchainCounterCreateInfoEXT & setSurfaceCounters( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ ) VULKAN_HPP_NOEXCEPT + SwapchainCounterCreateInfoEXT & + setSurfaceCounters( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ ) VULKAN_HPP_NOEXCEPT { surfaceCounters = surfaceCounters_; return *this; } - operator VkSwapchainCounterCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkSwapchainCounterCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSwapchainCounterCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SwapchainCounterCreateInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SwapchainCounterCreateInfoEXT const & ) const = default; #else - bool operator==( SwapchainCounterCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SwapchainCounterCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( surfaceCounters == rhs.surfaceCounters ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surfaceCounters == rhs.surfaceCounters ); } - bool operator!=( SwapchainCounterCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SwapchainCounterCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCounterCreateInfoEXT; - const void* pNext = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCounterCreateInfoEXT; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters = {}; }; - static_assert( sizeof( SwapchainCounterCreateInfoEXT ) == sizeof( VkSwapchainCounterCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( SwapchainCounterCreateInfoEXT ) == sizeof( VkSwapchainCounterCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct SwapchainCreateInfoKHR { - VULKAN_HPP_CONSTEXPR SwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ = {}, - VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {}, - uint32_t minImageCount_ = {}, - VULKAN_HPP_NAMESPACE::Format imageFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear, - VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {}, - uint32_t imageArrayLayers_ = {}, - VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ = {}, - VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, - uint32_t queueFamilyIndexCount_ = {}, - const uint32_t* pQueueFamilyIndices_ = {}, - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, - VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque, - VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate, - VULKAN_HPP_NAMESPACE::Bool32 clipped_ = {}, - VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR SwapchainCreateInfoKHR( + VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {}, + uint32_t minImageCount_ = {}, + VULKAN_HPP_NAMESPACE::Format imageFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear, + VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {}, + uint32_t imageArrayLayers_ = {}, + VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ = {}, + VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, + uint32_t queueFamilyIndexCount_ = {}, + const uint32_t * pQueueFamilyIndices_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ = + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque, + VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate, + VULKAN_HPP_NAMESPACE::Bool32 clipped_ = {}, + VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , surface( surface_ ) , minImageCount( minImageCount_ ) @@ -64975,13 +72304,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SwapchainCreateInfoKHR& operator=( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + SwapchainCreateInfoKHR & operator=( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - SwapchainCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + SwapchainCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -65011,7 +72340,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - SwapchainCreateInfoKHR & setImageColorSpace( VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ ) VULKAN_HPP_NOEXCEPT + SwapchainCreateInfoKHR & + setImageColorSpace( VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ ) VULKAN_HPP_NOEXCEPT { imageColorSpace = imageColorSpace_; return *this; @@ -65035,7 +72365,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - SwapchainCreateInfoKHR & setImageSharingMode( VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ ) VULKAN_HPP_NOEXCEPT + SwapchainCreateInfoKHR & + setImageSharingMode( VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ ) VULKAN_HPP_NOEXCEPT { imageSharingMode = imageSharingMode_; return *this; @@ -65047,19 +72378,21 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - SwapchainCreateInfoKHR & setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + SwapchainCreateInfoKHR & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT { pQueueFamilyIndices = pQueueFamilyIndices_; return *this; } - SwapchainCreateInfoKHR & setPreTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ ) VULKAN_HPP_NOEXCEPT + SwapchainCreateInfoKHR & + setPreTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ ) VULKAN_HPP_NOEXCEPT { preTransform = preTransform_; return *this; } - SwapchainCreateInfoKHR & setCompositeAlpha( VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ ) VULKAN_HPP_NOEXCEPT + SwapchainCreateInfoKHR & + setCompositeAlpha( VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ ) VULKAN_HPP_NOEXCEPT { compositeAlpha = compositeAlpha_; return *this; @@ -65083,79 +72416,76 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkSwapchainCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SwapchainCreateInfoKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SwapchainCreateInfoKHR const & ) const = default; #else - bool operator==( SwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( surface == rhs.surface ) - && ( minImageCount == rhs.minImageCount ) - && ( imageFormat == rhs.imageFormat ) - && ( imageColorSpace == rhs.imageColorSpace ) - && ( imageExtent == rhs.imageExtent ) - && ( imageArrayLayers == rhs.imageArrayLayers ) - && ( imageUsage == rhs.imageUsage ) - && ( imageSharingMode == rhs.imageSharingMode ) - && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) - && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ) - && ( preTransform == rhs.preTransform ) - && ( compositeAlpha == rhs.compositeAlpha ) - && ( presentMode == rhs.presentMode ) - && ( clipped == rhs.clipped ) - && ( oldSwapchain == rhs.oldSwapchain ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( surface == rhs.surface ) && + ( minImageCount == rhs.minImageCount ) && ( imageFormat == rhs.imageFormat ) && + ( imageColorSpace == rhs.imageColorSpace ) && ( imageExtent == rhs.imageExtent ) && + ( imageArrayLayers == rhs.imageArrayLayers ) && ( imageUsage == rhs.imageUsage ) && + ( imageSharingMode == rhs.imageSharingMode ) && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && + ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ) && ( preTransform == rhs.preTransform ) && + ( compositeAlpha == rhs.compositeAlpha ) && ( presentMode == rhs.presentMode ) && + ( clipped == rhs.clipped ) && ( oldSwapchain == rhs.oldSwapchain ); } - bool operator!=( SwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCreateInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags = {}; - VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {}; - uint32_t minImageCount = {}; - VULKAN_HPP_NAMESPACE::Format imageFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; - VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear; - VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {}; - uint32_t imageArrayLayers = {}; - VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage = {}; - VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; - uint32_t queueFamilyIndexCount = {}; - const uint32_t* pQueueFamilyIndices = {}; - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; - VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque; - VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate; - VULKAN_HPP_NAMESPACE::Bool32 clipped = {}; - VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {}; + uint32_t minImageCount = {}; + VULKAN_HPP_NAMESPACE::Format imageFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear; + VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {}; + uint32_t imageArrayLayers = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage = {}; + VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; + uint32_t queueFamilyIndexCount = {}; + const uint32_t * pQueueFamilyIndices = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha = + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque; + VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate; + VULKAN_HPP_NAMESPACE::Bool32 clipped = {}; + VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain = {}; }; - static_assert( sizeof( SwapchainCreateInfoKHR ) == sizeof( VkSwapchainCreateInfoKHR ), "struct and wrapper have different size!" ); + static_assert( sizeof( SwapchainCreateInfoKHR ) == sizeof( VkSwapchainCreateInfoKHR ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct SwapchainDisplayNativeHdrCreateInfoAMD { - VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ = {} ) VULKAN_HPP_NOEXCEPT - : localDimmingEnable( localDimmingEnable_ ) + VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ = {} ) + VULKAN_HPP_NOEXCEPT : localDimmingEnable( localDimmingEnable_ ) {} - SwapchainDisplayNativeHdrCreateInfoAMD & operator=( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + SwapchainDisplayNativeHdrCreateInfoAMD & + operator=( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( SwapchainDisplayNativeHdrCreateInfoAMD ) - offsetof( SwapchainDisplayNativeHdrCreateInfoAMD, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( SwapchainDisplayNativeHdrCreateInfoAMD ) - + offsetof( SwapchainDisplayNativeHdrCreateInfoAMD, pNext ) ); return *this; } @@ -65164,45 +72494,45 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - SwapchainDisplayNativeHdrCreateInfoAMD& operator=( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + SwapchainDisplayNativeHdrCreateInfoAMD & + operator=( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - SwapchainDisplayNativeHdrCreateInfoAMD & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + SwapchainDisplayNativeHdrCreateInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - SwapchainDisplayNativeHdrCreateInfoAMD & setLocalDimmingEnable( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ ) VULKAN_HPP_NOEXCEPT + SwapchainDisplayNativeHdrCreateInfoAMD & + setLocalDimmingEnable( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ ) VULKAN_HPP_NOEXCEPT { localDimmingEnable = localDimmingEnable_; return *this; } - operator VkSwapchainDisplayNativeHdrCreateInfoAMD const&() const VULKAN_HPP_NOEXCEPT + operator VkSwapchainDisplayNativeHdrCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSwapchainDisplayNativeHdrCreateInfoAMD &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SwapchainDisplayNativeHdrCreateInfoAMD const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SwapchainDisplayNativeHdrCreateInfoAMD const & ) const = default; #else - bool operator==( SwapchainDisplayNativeHdrCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( localDimmingEnable == rhs.localDimmingEnable ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( localDimmingEnable == rhs.localDimmingEnable ); } - bool operator!=( SwapchainDisplayNativeHdrCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -65210,21 +72540,27 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable = {}; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable = {}; }; - static_assert( sizeof( SwapchainDisplayNativeHdrCreateInfoAMD ) == sizeof( VkSwapchainDisplayNativeHdrCreateInfoAMD ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( SwapchainDisplayNativeHdrCreateInfoAMD ) == sizeof( VkSwapchainDisplayNativeHdrCreateInfoAMD ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct TextureLODGatherFormatPropertiesAMD { - VULKAN_HPP_CONSTEXPR TextureLODGatherFormatPropertiesAMD( VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR TextureLODGatherFormatPropertiesAMD( + VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD_ = {} ) VULKAN_HPP_NOEXCEPT : supportsTextureGatherLODBiasAMD( supportsTextureGatherLODBiasAMD_ ) {} - TextureLODGatherFormatPropertiesAMD & operator=( TextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + TextureLODGatherFormatPropertiesAMD & + operator=( TextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( TextureLODGatherFormatPropertiesAMD ) - offsetof( TextureLODGatherFormatPropertiesAMD, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( TextureLODGatherFormatPropertiesAMD ) - offsetof( TextureLODGatherFormatPropertiesAMD, pNext ) ); return *this; } @@ -65233,33 +72569,33 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - TextureLODGatherFormatPropertiesAMD& operator=( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + TextureLODGatherFormatPropertiesAMD & + operator=( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkTextureLODGatherFormatPropertiesAMD const&() const VULKAN_HPP_NOEXCEPT + operator VkTextureLODGatherFormatPropertiesAMD const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkTextureLODGatherFormatPropertiesAMD &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( TextureLODGatherFormatPropertiesAMD const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( TextureLODGatherFormatPropertiesAMD const & ) const = default; #else - bool operator==( TextureLODGatherFormatPropertiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( TextureLODGatherFormatPropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( supportsTextureGatherLODBiasAMD == rhs.supportsTextureGatherLODBiasAMD ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( supportsTextureGatherLODBiasAMD == rhs.supportsTextureGatherLODBiasAMD ); } - bool operator!=( TextureLODGatherFormatPropertiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( TextureLODGatherFormatPropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -65267,18 +72603,21 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTextureLodGatherFormatPropertiesAMD; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD = {}; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD = {}; }; - static_assert( sizeof( TextureLODGatherFormatPropertiesAMD ) == sizeof( VkTextureLODGatherFormatPropertiesAMD ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( TextureLODGatherFormatPropertiesAMD ) == sizeof( VkTextureLODGatherFormatPropertiesAMD ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct TimelineSemaphoreSubmitInfo { - VULKAN_HPP_CONSTEXPR TimelineSemaphoreSubmitInfo( uint32_t waitSemaphoreValueCount_ = {}, - const uint64_t* pWaitSemaphoreValues_ = {}, - uint32_t signalSemaphoreValueCount_ = {}, - const uint64_t* pSignalSemaphoreValues_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + TimelineSemaphoreSubmitInfo( uint32_t waitSemaphoreValueCount_ = {}, + const uint64_t * pWaitSemaphoreValues_ = {}, + uint32_t signalSemaphoreValueCount_ = {}, + const uint64_t * pSignalSemaphoreValues_ = {} ) VULKAN_HPP_NOEXCEPT : waitSemaphoreValueCount( waitSemaphoreValueCount_ ) , pWaitSemaphoreValues( pWaitSemaphoreValues_ ) , signalSemaphoreValueCount( signalSemaphoreValueCount_ ) @@ -65287,7 +72626,8 @@ namespace VULKAN_HPP_NAMESPACE TimelineSemaphoreSubmitInfo & operator=( TimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( TimelineSemaphoreSubmitInfo ) - offsetof( TimelineSemaphoreSubmitInfo, pNext ) ); + memcpy( + &pNext, &rhs.pNext, sizeof( TimelineSemaphoreSubmitInfo ) - offsetof( TimelineSemaphoreSubmitInfo, pNext ) ); return *this; } @@ -65296,13 +72636,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - TimelineSemaphoreSubmitInfo& operator=( VkTimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + TimelineSemaphoreSubmitInfo & operator=( VkTimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - TimelineSemaphoreSubmitInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + TimelineSemaphoreSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -65314,80 +72654,80 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - TimelineSemaphoreSubmitInfo & setPWaitSemaphoreValues( const uint64_t* pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + TimelineSemaphoreSubmitInfo & setPWaitSemaphoreValues( const uint64_t * pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT { pWaitSemaphoreValues = pWaitSemaphoreValues_; return *this; } - TimelineSemaphoreSubmitInfo & setSignalSemaphoreValueCount( uint32_t signalSemaphoreValueCount_ ) VULKAN_HPP_NOEXCEPT + TimelineSemaphoreSubmitInfo & + setSignalSemaphoreValueCount( uint32_t signalSemaphoreValueCount_ ) VULKAN_HPP_NOEXCEPT { signalSemaphoreValueCount = signalSemaphoreValueCount_; return *this; } - TimelineSemaphoreSubmitInfo & setPSignalSemaphoreValues( const uint64_t* pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + TimelineSemaphoreSubmitInfo & + setPSignalSemaphoreValues( const uint64_t * pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT { pSignalSemaphoreValues = pSignalSemaphoreValues_; return *this; } - operator VkTimelineSemaphoreSubmitInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkTimelineSemaphoreSubmitInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkTimelineSemaphoreSubmitInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( TimelineSemaphoreSubmitInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( TimelineSemaphoreSubmitInfo const & ) const = default; #else - bool operator==( TimelineSemaphoreSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( TimelineSemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( waitSemaphoreValueCount == rhs.waitSemaphoreValueCount ) - && ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues ) - && ( signalSemaphoreValueCount == rhs.signalSemaphoreValueCount ) - && ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( waitSemaphoreValueCount == rhs.waitSemaphoreValueCount ) && + ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues ) && + ( signalSemaphoreValueCount == rhs.signalSemaphoreValueCount ) && + ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues ); } - bool operator!=( TimelineSemaphoreSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( TimelineSemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTimelineSemaphoreSubmitInfo; - const void* pNext = {}; - uint32_t waitSemaphoreValueCount = {}; - const uint64_t* pWaitSemaphoreValues = {}; - uint32_t signalSemaphoreValueCount = {}; - const uint64_t* pSignalSemaphoreValues = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTimelineSemaphoreSubmitInfo; + const void * pNext = {}; + uint32_t waitSemaphoreValueCount = {}; + const uint64_t * pWaitSemaphoreValues = {}; + uint32_t signalSemaphoreValueCount = {}; + const uint64_t * pSignalSemaphoreValues = {}; }; - static_assert( sizeof( TimelineSemaphoreSubmitInfo ) == sizeof( VkTimelineSemaphoreSubmitInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( TimelineSemaphoreSubmitInfo ) == sizeof( VkTimelineSemaphoreSubmitInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #ifdef VK_ENABLE_BETA_EXTENSIONS struct TraceRaysIndirectCommandKHR { - VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommandKHR( uint32_t width_ = {}, + VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommandKHR( uint32_t width_ = {}, uint32_t height_ = {}, - uint32_t depth_ = {} ) VULKAN_HPP_NOEXCEPT + uint32_t depth_ = {} ) VULKAN_HPP_NOEXCEPT : width( width_ ) , height( height_ ) , depth( depth_ ) {} - explicit TraceRaysIndirectCommandKHR( Extent2D const& extent2D, - uint32_t depth_ = {} ) - : width( extent2D.width ) - , height( extent2D.height ) - , depth( depth_ ) + explicit TraceRaysIndirectCommandKHR( Extent2D const & extent2D, uint32_t depth_ = {} ) + : width( extent2D.width ), height( extent2D.height ), depth( depth_ ) {} TraceRaysIndirectCommandKHR( VkTraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT @@ -65395,9 +72735,9 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - TraceRaysIndirectCommandKHR& operator=( VkTraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT + TraceRaysIndirectCommandKHR & operator=( VkTraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -65419,46 +72759,46 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkTraceRaysIndirectCommandKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkTraceRaysIndirectCommandKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkTraceRaysIndirectCommandKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( TraceRaysIndirectCommandKHR const& ) const = default; -#else - bool operator==( TraceRaysIndirectCommandKHR const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( TraceRaysIndirectCommandKHR const & ) const = default; +# else + bool operator==( TraceRaysIndirectCommandKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( width == rhs.width ) - && ( height == rhs.height ) - && ( depth == rhs.depth ); + return ( width == rhs.width ) && ( height == rhs.height ) && ( depth == rhs.depth ); } - bool operator!=( TraceRaysIndirectCommandKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( TraceRaysIndirectCommandKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - uint32_t width = {}; + uint32_t width = {}; uint32_t height = {}; - uint32_t depth = {}; + uint32_t depth = {}; }; - static_assert( sizeof( TraceRaysIndirectCommandKHR ) == sizeof( VkTraceRaysIndirectCommandKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( TraceRaysIndirectCommandKHR ) == sizeof( VkTraceRaysIndirectCommandKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct ValidationCacheCreateInfoEXT { VULKAN_HPP_CONSTEXPR ValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ = {}, - size_t initialDataSize_ = {}, - const void* pInitialData_ = {} ) VULKAN_HPP_NOEXCEPT + size_t initialDataSize_ = {}, + const void * pInitialData_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , initialDataSize( initialDataSize_ ) , pInitialData( pInitialData_ ) @@ -65466,7 +72806,8 @@ namespace VULKAN_HPP_NAMESPACE ValidationCacheCreateInfoEXT & operator=( ValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( ValidationCacheCreateInfoEXT ) - offsetof( ValidationCacheCreateInfoEXT, pNext ) ); + memcpy( + &pNext, &rhs.pNext, sizeof( ValidationCacheCreateInfoEXT ) - offsetof( ValidationCacheCreateInfoEXT, pNext ) ); return *this; } @@ -65475,19 +72816,20 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ValidationCacheCreateInfoEXT& operator=( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ValidationCacheCreateInfoEXT & operator=( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ValidationCacheCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ValidationCacheCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - ValidationCacheCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + ValidationCacheCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; @@ -65499,56 +72841,56 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - ValidationCacheCreateInfoEXT & setPInitialData( const void* pInitialData_ ) VULKAN_HPP_NOEXCEPT + ValidationCacheCreateInfoEXT & setPInitialData( const void * pInitialData_ ) VULKAN_HPP_NOEXCEPT { pInitialData = pInitialData_; return *this; } - operator VkValidationCacheCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkValidationCacheCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ValidationCacheCreateInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ValidationCacheCreateInfoEXT const & ) const = default; #else - bool operator==( ValidationCacheCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( initialDataSize == rhs.initialDataSize ) - && ( pInitialData == rhs.pInitialData ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( initialDataSize == rhs.initialDataSize ) && ( pInitialData == rhs.pInitialData ); } - bool operator!=( ValidationCacheCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationCacheCreateInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags = {}; - size_t initialDataSize = {}; - const void* pInitialData = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationCacheCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags = {}; + size_t initialDataSize = {}; + const void * pInitialData = {}; }; - static_assert( sizeof( ValidationCacheCreateInfoEXT ) == sizeof( VkValidationCacheCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ValidationCacheCreateInfoEXT ) == sizeof( VkValidationCacheCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct ValidationFeaturesEXT { - VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT( uint32_t enabledValidationFeatureCount_ = {}, - const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT* pEnabledValidationFeatures_ = {}, - uint32_t disabledValidationFeatureCount_ = {}, - const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT* pDisabledValidationFeatures_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT( + uint32_t enabledValidationFeatureCount_ = {}, + const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * pEnabledValidationFeatures_ = {}, + uint32_t disabledValidationFeatureCount_ = {}, + const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures_ = {} ) VULKAN_HPP_NOEXCEPT : enabledValidationFeatureCount( enabledValidationFeatureCount_ ) , pEnabledValidationFeatures( pEnabledValidationFeatures_ ) , disabledValidationFeatureCount( disabledValidationFeatureCount_ ) @@ -65566,86 +72908,91 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ValidationFeaturesEXT& operator=( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ValidationFeaturesEXT & operator=( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ValidationFeaturesEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ValidationFeaturesEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - ValidationFeaturesEXT & setEnabledValidationFeatureCount( uint32_t enabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT + ValidationFeaturesEXT & + setEnabledValidationFeatureCount( uint32_t enabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT { enabledValidationFeatureCount = enabledValidationFeatureCount_; return *this; } - ValidationFeaturesEXT & setPEnabledValidationFeatures( const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT* pEnabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT + ValidationFeaturesEXT & setPEnabledValidationFeatures( + const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * pEnabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT { pEnabledValidationFeatures = pEnabledValidationFeatures_; return *this; } - ValidationFeaturesEXT & setDisabledValidationFeatureCount( uint32_t disabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT + ValidationFeaturesEXT & + setDisabledValidationFeatureCount( uint32_t disabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT { disabledValidationFeatureCount = disabledValidationFeatureCount_; return *this; } - ValidationFeaturesEXT & setPDisabledValidationFeatures( const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT* pDisabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT + ValidationFeaturesEXT & setPDisabledValidationFeatures( + const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT { pDisabledValidationFeatures = pDisabledValidationFeatures_; return *this; } - operator VkValidationFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkValidationFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkValidationFeaturesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ValidationFeaturesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ValidationFeaturesEXT const & ) const = default; #else - bool operator==( ValidationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ValidationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( enabledValidationFeatureCount == rhs.enabledValidationFeatureCount ) - && ( pEnabledValidationFeatures == rhs.pEnabledValidationFeatures ) - && ( disabledValidationFeatureCount == rhs.disabledValidationFeatureCount ) - && ( pDisabledValidationFeatures == rhs.pDisabledValidationFeatures ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( enabledValidationFeatureCount == rhs.enabledValidationFeatureCount ) && + ( pEnabledValidationFeatures == rhs.pEnabledValidationFeatures ) && + ( disabledValidationFeatureCount == rhs.disabledValidationFeatureCount ) && + ( pDisabledValidationFeatures == rhs.pDisabledValidationFeatures ); } - bool operator!=( ValidationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ValidationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFeaturesEXT; - const void* pNext = {}; - uint32_t enabledValidationFeatureCount = {}; - const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT* pEnabledValidationFeatures = {}; - uint32_t disabledValidationFeatureCount = {}; - const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT* pDisabledValidationFeatures = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFeaturesEXT; + const void * pNext = {}; + uint32_t enabledValidationFeatureCount = {}; + const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * pEnabledValidationFeatures = {}; + uint32_t disabledValidationFeatureCount = {}; + const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures = {}; }; - static_assert( sizeof( ValidationFeaturesEXT ) == sizeof( VkValidationFeaturesEXT ), "struct and wrapper have different size!" ); + static_assert( sizeof( ValidationFeaturesEXT ) == sizeof( VkValidationFeaturesEXT ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct ValidationFlagsEXT { - VULKAN_HPP_CONSTEXPR ValidationFlagsEXT( uint32_t disabledValidationCheckCount_ = {}, - const VULKAN_HPP_NAMESPACE::ValidationCheckEXT* pDisabledValidationChecks_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ValidationFlagsEXT( + uint32_t disabledValidationCheckCount_ = {}, + const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * pDisabledValidationChecks_ = {} ) VULKAN_HPP_NOEXCEPT : disabledValidationCheckCount( disabledValidationCheckCount_ ) , pDisabledValidationChecks( pDisabledValidationChecks_ ) {} @@ -65661,13 +73008,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ValidationFlagsEXT& operator=( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ValidationFlagsEXT & operator=( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ValidationFlagsEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ValidationFlagsEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -65679,53 +73026,54 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - ValidationFlagsEXT & setPDisabledValidationChecks( const VULKAN_HPP_NAMESPACE::ValidationCheckEXT* pDisabledValidationChecks_ ) VULKAN_HPP_NOEXCEPT + ValidationFlagsEXT & setPDisabledValidationChecks( + const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * pDisabledValidationChecks_ ) VULKAN_HPP_NOEXCEPT { pDisabledValidationChecks = pDisabledValidationChecks_; return *this; } - operator VkValidationFlagsEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkValidationFlagsEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkValidationFlagsEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ValidationFlagsEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ValidationFlagsEXT const & ) const = default; #else - bool operator==( ValidationFlagsEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ValidationFlagsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( disabledValidationCheckCount == rhs.disabledValidationCheckCount ) - && ( pDisabledValidationChecks == rhs.pDisabledValidationChecks ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( disabledValidationCheckCount == rhs.disabledValidationCheckCount ) && + ( pDisabledValidationChecks == rhs.pDisabledValidationChecks ); } - bool operator!=( ValidationFlagsEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ValidationFlagsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFlagsEXT; - const void* pNext = {}; - uint32_t disabledValidationCheckCount = {}; - const VULKAN_HPP_NAMESPACE::ValidationCheckEXT* pDisabledValidationChecks = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFlagsEXT; + const void * pNext = {}; + uint32_t disabledValidationCheckCount = {}; + const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * pDisabledValidationChecks = {}; }; - static_assert( sizeof( ValidationFlagsEXT ) == sizeof( VkValidationFlagsEXT ), "struct and wrapper have different size!" ); + static_assert( sizeof( ValidationFlagsEXT ) == sizeof( VkValidationFlagsEXT ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); #ifdef VK_USE_PLATFORM_VI_NN struct ViSurfaceCreateInfoNN { VULKAN_HPP_CONSTEXPR ViSurfaceCreateInfoNN( VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags_ = {}, - void* window_ = {} ) VULKAN_HPP_NOEXCEPT + void * window_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , window( window_ ) {} @@ -65741,13 +73089,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - ViSurfaceCreateInfoNN& operator=( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT + ViSurfaceCreateInfoNN & operator=( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ViSurfaceCreateInfoNN & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ViSurfaceCreateInfoNN & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -65759,55 +73107,53 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - ViSurfaceCreateInfoNN & setWindow( void* window_ ) VULKAN_HPP_NOEXCEPT + ViSurfaceCreateInfoNN & setWindow( void * window_ ) VULKAN_HPP_NOEXCEPT { window = window_; return *this; } - operator VkViSurfaceCreateInfoNN const&() const VULKAN_HPP_NOEXCEPT + operator VkViSurfaceCreateInfoNN const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkViSurfaceCreateInfoNN &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ViSurfaceCreateInfoNN const& ) const = default; -#else - bool operator==( ViSurfaceCreateInfoNN const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ViSurfaceCreateInfoNN const & ) const = default; +# else + bool operator==( ViSurfaceCreateInfoNN const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( window == rhs.window ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( window == rhs.window ); } - bool operator!=( ViSurfaceCreateInfoNN const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ViSurfaceCreateInfoNN const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eViSurfaceCreateInfoNN; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags = {}; - void* window = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eViSurfaceCreateInfoNN; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags = {}; + void * window = {}; }; - static_assert( sizeof( ViSurfaceCreateInfoNN ) == sizeof( VkViSurfaceCreateInfoNN ), "struct and wrapper have different size!" ); + static_assert( sizeof( ViSurfaceCreateInfoNN ) == sizeof( VkViSurfaceCreateInfoNN ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); #endif /*VK_USE_PLATFORM_VI_NN*/ #ifdef VK_USE_PLATFORM_WAYLAND_KHR struct WaylandSurfaceCreateInfoKHR { - VULKAN_HPP_CONSTEXPR WaylandSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags_ = {}, - struct wl_display* display_ = {}, - struct wl_surface* surface_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR WaylandSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags_ = {}, + struct wl_display * display_ = {}, + struct wl_surface * surface_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , display( display_ ) , surface( surface_ ) @@ -65815,7 +73161,8 @@ namespace VULKAN_HPP_NAMESPACE WaylandSurfaceCreateInfoKHR & operator=( WaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( WaylandSurfaceCreateInfoKHR ) - offsetof( WaylandSurfaceCreateInfoKHR, pNext ) ); + memcpy( + &pNext, &rhs.pNext, sizeof( WaylandSurfaceCreateInfoKHR ) - offsetof( WaylandSurfaceCreateInfoKHR, pNext ) ); return *this; } @@ -65824,85 +73171,86 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - WaylandSurfaceCreateInfoKHR& operator=( VkWaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + WaylandSurfaceCreateInfoKHR & operator=( VkWaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - WaylandSurfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + WaylandSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - WaylandSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + WaylandSurfaceCreateInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - WaylandSurfaceCreateInfoKHR & setDisplay( struct wl_display* display_ ) VULKAN_HPP_NOEXCEPT + WaylandSurfaceCreateInfoKHR & setDisplay( struct wl_display * display_ ) VULKAN_HPP_NOEXCEPT { display = display_; return *this; } - WaylandSurfaceCreateInfoKHR & setSurface( struct wl_surface* surface_ ) VULKAN_HPP_NOEXCEPT + WaylandSurfaceCreateInfoKHR & setSurface( struct wl_surface * surface_ ) VULKAN_HPP_NOEXCEPT { surface = surface_; return *this; } - operator VkWaylandSurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkWaylandSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkWaylandSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( WaylandSurfaceCreateInfoKHR const& ) const = default; -#else - bool operator==( WaylandSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( WaylandSurfaceCreateInfoKHR const & ) const = default; +# else + bool operator==( WaylandSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( display == rhs.display ) - && ( surface == rhs.surface ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( display == rhs.display ) && + ( surface == rhs.surface ); } - bool operator!=( WaylandSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( WaylandSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWaylandSurfaceCreateInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags = {}; - struct wl_display* display = {}; - struct wl_surface* surface = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWaylandSurfaceCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags = {}; + struct wl_display * display = {}; + struct wl_surface * surface = {}; }; - static_assert( sizeof( WaylandSurfaceCreateInfoKHR ) == sizeof( VkWaylandSurfaceCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( WaylandSurfaceCreateInfoKHR ) == sizeof( VkWaylandSurfaceCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ #ifdef VK_USE_PLATFORM_WIN32_KHR struct Win32KeyedMutexAcquireReleaseInfoKHR { - VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoKHR( uint32_t acquireCount_ = {}, - const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs_ = {}, - const uint64_t* pAcquireKeys_ = {}, - const uint32_t* pAcquireTimeouts_ = {}, - uint32_t releaseCount_ = {}, - const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs_ = {}, - const uint64_t* pReleaseKeys_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + Win32KeyedMutexAcquireReleaseInfoKHR( uint32_t acquireCount_ = {}, + const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ = {}, + const uint64_t * pAcquireKeys_ = {}, + const uint32_t * pAcquireTimeouts_ = {}, + uint32_t releaseCount_ = {}, + const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ = {}, + const uint64_t * pReleaseKeys_ = {} ) VULKAN_HPP_NOEXCEPT : acquireCount( acquireCount_ ) , pAcquireSyncs( pAcquireSyncs_ ) , pAcquireKeys( pAcquireKeys_ ) @@ -65912,9 +73260,13 @@ namespace VULKAN_HPP_NAMESPACE , pReleaseKeys( pReleaseKeys_ ) {} - Win32KeyedMutexAcquireReleaseInfoKHR & operator=( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + Win32KeyedMutexAcquireReleaseInfoKHR & + operator=( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( Win32KeyedMutexAcquireReleaseInfoKHR ) - offsetof( Win32KeyedMutexAcquireReleaseInfoKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( Win32KeyedMutexAcquireReleaseInfoKHR ) - + offsetof( Win32KeyedMutexAcquireReleaseInfoKHR, pNext ) ); return *this; } @@ -65923,13 +73275,14 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - Win32KeyedMutexAcquireReleaseInfoKHR& operator=( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + Win32KeyedMutexAcquireReleaseInfoKHR & + operator=( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - Win32KeyedMutexAcquireReleaseInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + Win32KeyedMutexAcquireReleaseInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -65941,19 +73294,20 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT + Win32KeyedMutexAcquireReleaseInfoKHR & + setPAcquireSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT { pAcquireSyncs = pAcquireSyncs_; return *this; } - Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireKeys( const uint64_t* pAcquireKeys_ ) VULKAN_HPP_NOEXCEPT + Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireKeys( const uint64_t * pAcquireKeys_ ) VULKAN_HPP_NOEXCEPT { pAcquireKeys = pAcquireKeys_; return *this; } - Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireTimeouts( const uint32_t* pAcquireTimeouts_ ) VULKAN_HPP_NOEXCEPT + Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireTimeouts( const uint32_t * pAcquireTimeouts_ ) VULKAN_HPP_NOEXCEPT { pAcquireTimeouts = pAcquireTimeouts_; return *this; @@ -65965,75 +73319,74 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - Win32KeyedMutexAcquireReleaseInfoKHR & setPReleaseSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT + Win32KeyedMutexAcquireReleaseInfoKHR & + setPReleaseSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT { pReleaseSyncs = pReleaseSyncs_; return *this; } - Win32KeyedMutexAcquireReleaseInfoKHR & setPReleaseKeys( const uint64_t* pReleaseKeys_ ) VULKAN_HPP_NOEXCEPT + Win32KeyedMutexAcquireReleaseInfoKHR & setPReleaseKeys( const uint64_t * pReleaseKeys_ ) VULKAN_HPP_NOEXCEPT { pReleaseKeys = pReleaseKeys_; return *this; } - operator VkWin32KeyedMutexAcquireReleaseInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkWin32KeyedMutexAcquireReleaseInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkWin32KeyedMutexAcquireReleaseInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( Win32KeyedMutexAcquireReleaseInfoKHR const& ) const = default; -#else - bool operator==( Win32KeyedMutexAcquireReleaseInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Win32KeyedMutexAcquireReleaseInfoKHR const & ) const = default; +# else + bool operator==( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( acquireCount == rhs.acquireCount ) - && ( pAcquireSyncs == rhs.pAcquireSyncs ) - && ( pAcquireKeys == rhs.pAcquireKeys ) - && ( pAcquireTimeouts == rhs.pAcquireTimeouts ) - && ( releaseCount == rhs.releaseCount ) - && ( pReleaseSyncs == rhs.pReleaseSyncs ) - && ( pReleaseKeys == rhs.pReleaseKeys ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( acquireCount == rhs.acquireCount ) && + ( pAcquireSyncs == rhs.pAcquireSyncs ) && ( pAcquireKeys == rhs.pAcquireKeys ) && + ( pAcquireTimeouts == rhs.pAcquireTimeouts ) && ( releaseCount == rhs.releaseCount ) && + ( pReleaseSyncs == rhs.pReleaseSyncs ) && ( pReleaseKeys == rhs.pReleaseKeys ); } - bool operator!=( Win32KeyedMutexAcquireReleaseInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR; - const void* pNext = {}; - uint32_t acquireCount = {}; - const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs = {}; - const uint64_t* pAcquireKeys = {}; - const uint32_t* pAcquireTimeouts = {}; - uint32_t releaseCount = {}; - const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs = {}; - const uint64_t* pReleaseKeys = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR; + const void * pNext = {}; + uint32_t acquireCount = {}; + const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs = {}; + const uint64_t * pAcquireKeys = {}; + const uint32_t * pAcquireTimeouts = {}; + uint32_t releaseCount = {}; + const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs = {}; + const uint64_t * pReleaseKeys = {}; }; - static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoKHR ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoKHR ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #ifdef VK_USE_PLATFORM_WIN32_KHR struct Win32KeyedMutexAcquireReleaseInfoNV { - VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoNV( uint32_t acquireCount_ = {}, - const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs_ = {}, - const uint64_t* pAcquireKeys_ = {}, - const uint32_t* pAcquireTimeoutMilliseconds_ = {}, - uint32_t releaseCount_ = {}, - const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs_ = {}, - const uint64_t* pReleaseKeys_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + Win32KeyedMutexAcquireReleaseInfoNV( uint32_t acquireCount_ = {}, + const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ = {}, + const uint64_t * pAcquireKeys_ = {}, + const uint32_t * pAcquireTimeoutMilliseconds_ = {}, + uint32_t releaseCount_ = {}, + const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ = {}, + const uint64_t * pReleaseKeys_ = {} ) VULKAN_HPP_NOEXCEPT : acquireCount( acquireCount_ ) , pAcquireSyncs( pAcquireSyncs_ ) , pAcquireKeys( pAcquireKeys_ ) @@ -66043,9 +73396,12 @@ namespace VULKAN_HPP_NAMESPACE , pReleaseKeys( pReleaseKeys_ ) {} - Win32KeyedMutexAcquireReleaseInfoNV & operator=( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + Win32KeyedMutexAcquireReleaseInfoNV & + operator=( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( Win32KeyedMutexAcquireReleaseInfoNV ) - offsetof( Win32KeyedMutexAcquireReleaseInfoNV, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( Win32KeyedMutexAcquireReleaseInfoNV ) - offsetof( Win32KeyedMutexAcquireReleaseInfoNV, pNext ) ); return *this; } @@ -66054,13 +73410,14 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - Win32KeyedMutexAcquireReleaseInfoNV& operator=( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + Win32KeyedMutexAcquireReleaseInfoNV & + operator=( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - Win32KeyedMutexAcquireReleaseInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + Win32KeyedMutexAcquireReleaseInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -66072,19 +73429,21 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT + Win32KeyedMutexAcquireReleaseInfoNV & + setPAcquireSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT { pAcquireSyncs = pAcquireSyncs_; return *this; } - Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireKeys( const uint64_t* pAcquireKeys_ ) VULKAN_HPP_NOEXCEPT + Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireKeys( const uint64_t * pAcquireKeys_ ) VULKAN_HPP_NOEXCEPT { pAcquireKeys = pAcquireKeys_; return *this; } - Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireTimeoutMilliseconds( const uint32_t* pAcquireTimeoutMilliseconds_ ) VULKAN_HPP_NOEXCEPT + Win32KeyedMutexAcquireReleaseInfoNV & + setPAcquireTimeoutMilliseconds( const uint32_t * pAcquireTimeoutMilliseconds_ ) VULKAN_HPP_NOEXCEPT { pAcquireTimeoutMilliseconds = pAcquireTimeoutMilliseconds_; return *this; @@ -66096,70 +73455,69 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - Win32KeyedMutexAcquireReleaseInfoNV & setPReleaseSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT + Win32KeyedMutexAcquireReleaseInfoNV & + setPReleaseSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT { pReleaseSyncs = pReleaseSyncs_; return *this; } - Win32KeyedMutexAcquireReleaseInfoNV & setPReleaseKeys( const uint64_t* pReleaseKeys_ ) VULKAN_HPP_NOEXCEPT + Win32KeyedMutexAcquireReleaseInfoNV & setPReleaseKeys( const uint64_t * pReleaseKeys_ ) VULKAN_HPP_NOEXCEPT { pReleaseKeys = pReleaseKeys_; return *this; } - operator VkWin32KeyedMutexAcquireReleaseInfoNV const&() const VULKAN_HPP_NOEXCEPT + operator VkWin32KeyedMutexAcquireReleaseInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkWin32KeyedMutexAcquireReleaseInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( Win32KeyedMutexAcquireReleaseInfoNV const& ) const = default; -#else - bool operator==( Win32KeyedMutexAcquireReleaseInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Win32KeyedMutexAcquireReleaseInfoNV const & ) const = default; +# else + bool operator==( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( acquireCount == rhs.acquireCount ) - && ( pAcquireSyncs == rhs.pAcquireSyncs ) - && ( pAcquireKeys == rhs.pAcquireKeys ) - && ( pAcquireTimeoutMilliseconds == rhs.pAcquireTimeoutMilliseconds ) - && ( releaseCount == rhs.releaseCount ) - && ( pReleaseSyncs == rhs.pReleaseSyncs ) - && ( pReleaseKeys == rhs.pReleaseKeys ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( acquireCount == rhs.acquireCount ) && + ( pAcquireSyncs == rhs.pAcquireSyncs ) && ( pAcquireKeys == rhs.pAcquireKeys ) && + ( pAcquireTimeoutMilliseconds == rhs.pAcquireTimeoutMilliseconds ) && + ( releaseCount == rhs.releaseCount ) && ( pReleaseSyncs == rhs.pReleaseSyncs ) && + ( pReleaseKeys == rhs.pReleaseKeys ); } - bool operator!=( Win32KeyedMutexAcquireReleaseInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoNV; - const void* pNext = {}; - uint32_t acquireCount = {}; - const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs = {}; - const uint64_t* pAcquireKeys = {}; - const uint32_t* pAcquireTimeoutMilliseconds = {}; - uint32_t releaseCount = {}; - const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs = {}; - const uint64_t* pReleaseKeys = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoNV; + const void * pNext = {}; + uint32_t acquireCount = {}; + const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs = {}; + const uint64_t * pAcquireKeys = {}; + const uint32_t * pAcquireTimeoutMilliseconds = {}; + uint32_t releaseCount = {}; + const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs = {}; + const uint64_t * pReleaseKeys = {}; }; - static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoNV ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoNV ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #ifdef VK_USE_PLATFORM_WIN32_KHR struct Win32SurfaceCreateInfoKHR { - VULKAN_HPP_CONSTEXPR Win32SurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags_ = {}, - HINSTANCE hinstance_ = {}, + VULKAN_HPP_CONSTEXPR Win32SurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags_ = {}, + HINSTANCE hinstance_ = {}, HWND hwnd_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , hinstance( hinstance_ ) @@ -66177,13 +73535,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - Win32SurfaceCreateInfoKHR& operator=( VkWin32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + Win32SurfaceCreateInfoKHR & operator=( VkWin32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - Win32SurfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + Win32SurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -66207,55 +73565,55 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkWin32SurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkWin32SurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkWin32SurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( Win32SurfaceCreateInfoKHR const& ) const = default; -#else - bool operator==( Win32SurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Win32SurfaceCreateInfoKHR const & ) const = default; +# else + bool operator==( Win32SurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( hinstance == rhs.hinstance ) - && ( hwnd == rhs.hwnd ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( hinstance == rhs.hinstance ) && ( hwnd == rhs.hwnd ); } - bool operator!=( Win32SurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( Win32SurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32SurfaceCreateInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags = {}; - HINSTANCE hinstance = {}; - HWND hwnd = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32SurfaceCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags = {}; + HINSTANCE hinstance = {}; + HWND hwnd = {}; }; - static_assert( sizeof( Win32SurfaceCreateInfoKHR ) == sizeof( VkWin32SurfaceCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( Win32SurfaceCreateInfoKHR ) == sizeof( VkWin32SurfaceCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ struct WriteDescriptorSet { - VULKAN_HPP_CONSTEXPR WriteDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, - uint32_t dstBinding_ = {}, - uint32_t dstArrayElement_ = {}, - uint32_t descriptorCount_ = {}, - VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, - const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo_ = {}, - const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo_ = {}, - const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR WriteDescriptorSet( + VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, + uint32_t dstBinding_ = {}, + uint32_t dstArrayElement_ = {}, + uint32_t descriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, + const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pImageInfo_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo_ = {}, + const VULKAN_HPP_NAMESPACE::BufferView * pTexelBufferView_ = {} ) VULKAN_HPP_NOEXCEPT : dstSet( dstSet_ ) , dstBinding( dstBinding_ ) , dstArrayElement( dstArrayElement_ ) @@ -66277,13 +73635,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - WriteDescriptorSet& operator=( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT + WriteDescriptorSet & operator=( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - WriteDescriptorSet & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + WriteDescriptorSet & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -66319,137 +73677,144 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - WriteDescriptorSet & setPImageInfo( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo_ ) VULKAN_HPP_NOEXCEPT + WriteDescriptorSet & + setPImageInfo( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pImageInfo_ ) VULKAN_HPP_NOEXCEPT { pImageInfo = pImageInfo_; return *this; } - WriteDescriptorSet & setPBufferInfo( const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo_ ) VULKAN_HPP_NOEXCEPT + WriteDescriptorSet & + setPBufferInfo( const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo_ ) VULKAN_HPP_NOEXCEPT { pBufferInfo = pBufferInfo_; return *this; } - WriteDescriptorSet & setPTexelBufferView( const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView_ ) VULKAN_HPP_NOEXCEPT + WriteDescriptorSet & + setPTexelBufferView( const VULKAN_HPP_NAMESPACE::BufferView * pTexelBufferView_ ) VULKAN_HPP_NOEXCEPT { pTexelBufferView = pTexelBufferView_; return *this; } - operator VkWriteDescriptorSet const&() const VULKAN_HPP_NOEXCEPT + operator VkWriteDescriptorSet const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkWriteDescriptorSet &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( WriteDescriptorSet const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( WriteDescriptorSet const & ) const = default; #else - bool operator==( WriteDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( WriteDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( dstSet == rhs.dstSet ) - && ( dstBinding == rhs.dstBinding ) - && ( dstArrayElement == rhs.dstArrayElement ) - && ( descriptorCount == rhs.descriptorCount ) - && ( descriptorType == rhs.descriptorType ) - && ( pImageInfo == rhs.pImageInfo ) - && ( pBufferInfo == rhs.pBufferInfo ) - && ( pTexelBufferView == rhs.pTexelBufferView ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dstSet == rhs.dstSet ) && + ( dstBinding == rhs.dstBinding ) && ( dstArrayElement == rhs.dstArrayElement ) && + ( descriptorCount == rhs.descriptorCount ) && ( descriptorType == rhs.descriptorType ) && + ( pImageInfo == rhs.pImageInfo ) && ( pBufferInfo == rhs.pBufferInfo ) && + ( pTexelBufferView == rhs.pTexelBufferView ); } - bool operator!=( WriteDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( WriteDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSet; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {}; - uint32_t dstBinding = {}; - uint32_t dstArrayElement = {}; - uint32_t descriptorCount = {}; - VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; - const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo = {}; - const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo = {}; - const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSet; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {}; + uint32_t dstBinding = {}; + uint32_t dstArrayElement = {}; + uint32_t descriptorCount = {}; + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; + const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pImageInfo = {}; + const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo = {}; + const VULKAN_HPP_NAMESPACE::BufferView * pTexelBufferView = {}; }; - static_assert( sizeof( WriteDescriptorSet ) == sizeof( VkWriteDescriptorSet ), "struct and wrapper have different size!" ); + static_assert( sizeof( WriteDescriptorSet ) == sizeof( VkWriteDescriptorSet ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct WriteDescriptorSetAccelerationStructureKHR { - VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureKHR( uint32_t accelerationStructureCount_ = {}, - const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureKHR( + uint32_t accelerationStructureCount_ = {}, + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures_ = {} ) VULKAN_HPP_NOEXCEPT : accelerationStructureCount( accelerationStructureCount_ ) , pAccelerationStructures( pAccelerationStructures_ ) {} - WriteDescriptorSetAccelerationStructureKHR & operator=( WriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT + WriteDescriptorSetAccelerationStructureKHR & + operator=( WriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( WriteDescriptorSetAccelerationStructureKHR ) - offsetof( WriteDescriptorSetAccelerationStructureKHR, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( WriteDescriptorSetAccelerationStructureKHR ) - + offsetof( WriteDescriptorSetAccelerationStructureKHR, pNext ) ); return *this; } - WriteDescriptorSetAccelerationStructureKHR( VkWriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT + WriteDescriptorSetAccelerationStructureKHR( VkWriteDescriptorSetAccelerationStructureKHR const & rhs ) + VULKAN_HPP_NOEXCEPT { *this = rhs; } - WriteDescriptorSetAccelerationStructureKHR& operator=( VkWriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT + WriteDescriptorSetAccelerationStructureKHR & + operator=( VkWriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - WriteDescriptorSetAccelerationStructureKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + WriteDescriptorSetAccelerationStructureKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - WriteDescriptorSetAccelerationStructureKHR & setAccelerationStructureCount( uint32_t accelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT + WriteDescriptorSetAccelerationStructureKHR & + setAccelerationStructureCount( uint32_t accelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT { accelerationStructureCount = accelerationStructureCount_; return *this; } - WriteDescriptorSetAccelerationStructureKHR & setPAccelerationStructures( const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT + WriteDescriptorSetAccelerationStructureKHR & setPAccelerationStructures( + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT { pAccelerationStructures = pAccelerationStructures_; return *this; } - operator VkWriteDescriptorSetAccelerationStructureKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkWriteDescriptorSetAccelerationStructureKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkWriteDescriptorSetAccelerationStructureKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( WriteDescriptorSetAccelerationStructureKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( WriteDescriptorSetAccelerationStructureKHR const & ) const = default; #else - bool operator==( WriteDescriptorSetAccelerationStructureKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( WriteDescriptorSetAccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( accelerationStructureCount == rhs.accelerationStructureCount ) - && ( pAccelerationStructures == rhs.pAccelerationStructures ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( accelerationStructureCount == rhs.accelerationStructureCount ) && + ( pAccelerationStructures == rhs.pAccelerationStructures ); } - bool operator!=( WriteDescriptorSetAccelerationStructureKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( WriteDescriptorSetAccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -66457,24 +73822,31 @@ namespace VULKAN_HPP_NAMESPACE public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetAccelerationStructureKHR; - const void* pNext = {}; - uint32_t accelerationStructureCount = {}; - const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures = {}; + const void * pNext = {}; + uint32_t accelerationStructureCount = {}; + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures = {}; }; - static_assert( sizeof( WriteDescriptorSetAccelerationStructureKHR ) == sizeof( VkWriteDescriptorSetAccelerationStructureKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( WriteDescriptorSetAccelerationStructureKHR ) == + sizeof( VkWriteDescriptorSetAccelerationStructureKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct WriteDescriptorSetInlineUniformBlockEXT { - VULKAN_HPP_CONSTEXPR WriteDescriptorSetInlineUniformBlockEXT( uint32_t dataSize_ = {}, - const void* pData_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR WriteDescriptorSetInlineUniformBlockEXT( uint32_t dataSize_ = {}, + const void * pData_ = {} ) VULKAN_HPP_NOEXCEPT : dataSize( dataSize_ ) , pData( pData_ ) {} - WriteDescriptorSetInlineUniformBlockEXT & operator=( WriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT + WriteDescriptorSetInlineUniformBlockEXT & + operator=( WriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( WriteDescriptorSetInlineUniformBlockEXT ) - offsetof( WriteDescriptorSetInlineUniformBlockEXT, pNext ) ); + memcpy( &pNext, + &rhs.pNext, + sizeof( WriteDescriptorSetInlineUniformBlockEXT ) - + offsetof( WriteDescriptorSetInlineUniformBlockEXT, pNext ) ); return *this; } @@ -66483,13 +73855,14 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - WriteDescriptorSetInlineUniformBlockEXT& operator=( VkWriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT + WriteDescriptorSetInlineUniformBlockEXT & + operator=( VkWriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - WriteDescriptorSetInlineUniformBlockEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + WriteDescriptorSetInlineUniformBlockEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -66501,53 +73874,53 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - WriteDescriptorSetInlineUniformBlockEXT & setPData( const void* pData_ ) VULKAN_HPP_NOEXCEPT + WriteDescriptorSetInlineUniformBlockEXT & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT { pData = pData_; return *this; } - operator VkWriteDescriptorSetInlineUniformBlockEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkWriteDescriptorSetInlineUniformBlockEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkWriteDescriptorSetInlineUniformBlockEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( WriteDescriptorSetInlineUniformBlockEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( WriteDescriptorSetInlineUniformBlockEXT const & ) const = default; #else - bool operator==( WriteDescriptorSetInlineUniformBlockEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( WriteDescriptorSetInlineUniformBlockEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( dataSize == rhs.dataSize ) - && ( pData == rhs.pData ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dataSize == rhs.dataSize ) && ( pData == rhs.pData ); } - bool operator!=( WriteDescriptorSetInlineUniformBlockEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( WriteDescriptorSetInlineUniformBlockEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetInlineUniformBlockEXT; - const void* pNext = {}; - uint32_t dataSize = {}; - const void* pData = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetInlineUniformBlockEXT; + const void * pNext = {}; + uint32_t dataSize = {}; + const void * pData = {}; }; - static_assert( sizeof( WriteDescriptorSetInlineUniformBlockEXT ) == sizeof( VkWriteDescriptorSetInlineUniformBlockEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( WriteDescriptorSetInlineUniformBlockEXT ) == + sizeof( VkWriteDescriptorSetInlineUniformBlockEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); #ifdef VK_USE_PLATFORM_XCB_KHR struct XcbSurfaceCreateInfoKHR { - VULKAN_HPP_CONSTEXPR XcbSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags_ = {}, - xcb_connection_t* connection_ = {}, + VULKAN_HPP_CONSTEXPR XcbSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags_ = {}, + xcb_connection_t * connection_ = {}, xcb_window_t window_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , connection( connection_ ) @@ -66565,13 +73938,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - XcbSurfaceCreateInfoKHR& operator=( VkXcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + XcbSurfaceCreateInfoKHR & operator=( VkXcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - XcbSurfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + XcbSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -66583,7 +73956,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - XcbSurfaceCreateInfoKHR & setConnection( xcb_connection_t* connection_ ) VULKAN_HPP_NOEXCEPT + XcbSurfaceCreateInfoKHR & setConnection( xcb_connection_t * connection_ ) VULKAN_HPP_NOEXCEPT { connection = connection_; return *this; @@ -66595,42 +73968,40 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkXcbSurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkXcbSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkXcbSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( XcbSurfaceCreateInfoKHR const& ) const = default; -#else - bool operator==( XcbSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( XcbSurfaceCreateInfoKHR const & ) const = default; +# else + bool operator==( XcbSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( connection == rhs.connection ) - && ( window == rhs.window ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( connection == rhs.connection ) && ( window == rhs.window ); } - bool operator!=( XcbSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( XcbSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eXcbSurfaceCreateInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags = {}; - xcb_connection_t* connection = {}; - xcb_window_t window = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eXcbSurfaceCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags = {}; + xcb_connection_t * connection = {}; + xcb_window_t window = {}; }; - static_assert( sizeof( XcbSurfaceCreateInfoKHR ) == sizeof( VkXcbSurfaceCreateInfoKHR ), "struct and wrapper have different size!" ); + static_assert( sizeof( XcbSurfaceCreateInfoKHR ) == sizeof( VkXcbSurfaceCreateInfoKHR ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); #endif /*VK_USE_PLATFORM_XCB_KHR*/ @@ -66638,7 +74009,7 @@ namespace VULKAN_HPP_NAMESPACE struct XlibSurfaceCreateInfoKHR { VULKAN_HPP_CONSTEXPR XlibSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags_ = {}, - Display* dpy_ = {}, + Display * dpy_ = {}, Window window_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , dpy( dpy_ ) @@ -66656,13 +74027,13 @@ namespace VULKAN_HPP_NAMESPACE *this = rhs; } - XlibSurfaceCreateInfoKHR& operator=( VkXlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + XlibSurfaceCreateInfoKHR & operator=( VkXlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - XlibSurfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + XlibSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -66674,7 +74045,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - XlibSurfaceCreateInfoKHR & setDpy( Display* dpy_ ) VULKAN_HPP_NOEXCEPT + XlibSurfaceCreateInfoKHR & setDpy( Display * dpy_ ) VULKAN_HPP_NOEXCEPT { dpy = dpy_; return *this; @@ -66686,90 +74057,112 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - operator VkXlibSurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkXlibSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkXlibSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( XlibSurfaceCreateInfoKHR const& ) const = default; -#else - bool operator==( XlibSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( XlibSurfaceCreateInfoKHR const & ) const = default; +# else + bool operator==( XlibSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( dpy == rhs.dpy ) - && ( window == rhs.window ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( dpy == rhs.dpy ) && + ( window == rhs.window ); } - bool operator!=( XlibSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( XlibSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eXlibSurfaceCreateInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags = {}; - Display* dpy = {}; - Window window = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eXlibSurfaceCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags = {}; + Display * dpy = {}; + Window window = {}; }; - static_assert( sizeof( XlibSurfaceCreateInfoKHR ) == sizeof( VkXlibSurfaceCreateInfoKHR ), "struct and wrapper have different size!" ); + static_assert( sizeof( XlibSurfaceCreateInfoKHR ) == sizeof( VkXlibSurfaceCreateInfoKHR ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); #endif /*VK_USE_PLATFORM_XLIB_KHR*/ - template - VULKAN_HPP_INLINE Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Instance* pInstance, Dispatch const &d) VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Instance * pInstance, + Dispatch const & d ) VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateInstance( reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pInstance ) ) ); + return static_cast( d.vkCreateInstance( reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pInstance ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type createInstance( const InstanceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) + template + VULKAN_HPP_INLINE typename ResultValueType::type createInstance( + const InstanceCreateInfo & createInfo, Optional allocator, Dispatch const & d ) { VULKAN_HPP_NAMESPACE::Instance instance; - Result result = static_cast( d.vkCreateInstance( reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &instance ) ) ); - return createResultValue( result, instance, VULKAN_HPP_NAMESPACE_STRING"::createInstance" ); + Result result = static_cast( d.vkCreateInstance( + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &instance ) ) ); + return createResultValue( result, instance, VULKAN_HPP_NAMESPACE_STRING "::createInstance" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type createInstanceUnique( const InstanceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type createInstanceUnique( + const InstanceCreateInfo & createInfo, Optional allocator, Dispatch const & d ) { VULKAN_HPP_NAMESPACE::Instance instance; - Result result = static_cast( d.vkCreateInstance( reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &instance ) ) ); + Result result = static_cast( d.vkCreateInstance( + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &instance ) ) ); - ObjectDestroy deleter( allocator, d ); - return createResultValue( result, instance, VULKAN_HPP_NAMESPACE_STRING"::createInstanceUnique", deleter ); + ObjectDestroy deleter( allocator, d ); + return createResultValue( + result, instance, VULKAN_HPP_NAMESPACE_STRING "::createInstanceUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result enumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties* pProperties, Dispatch const &d) VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + enumerateInstanceExtensionProperties( const char * pLayerName, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, + Dispatch const & d ) VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, reinterpret_cast( pProperties ) ) ); + return static_cast( d.vkEnumerateInstanceExtensionProperties( + pLayerName, pPropertyCount, reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type enumerateInstanceExtensionProperties( Optional layerName, Dispatch const &d ) + template + VULKAN_HPP_INLINE typename ResultValueType>::type + enumerateInstanceExtensionProperties( Optional layerName, Dispatch const & d ) { - std::vector properties; - uint32_t propertyCount; - Result result; + std::vector properties; + uint32_t propertyCount; + Result result; do { - result = static_cast( d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); + result = static_cast( + d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); if ( ( result == Result::eSuccess ) && propertyCount ) { properties.resize( propertyCount ); - result = static_cast( d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ) ); + result = static_cast( + d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, + &propertyCount, + reinterpret_cast( properties.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -66777,21 +74170,29 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); properties.resize( propertyCount ); } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceExtensionProperties" ); + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type enumerateInstanceExtensionProperties( Optional layerName, Allocator const& vectorAllocator, Dispatch const &d ) + template + VULKAN_HPP_INLINE typename ResultValueType>::type + enumerateInstanceExtensionProperties( Optional layerName, + Allocator const & vectorAllocator, + Dispatch const & d ) { - std::vector properties( vectorAllocator ); - uint32_t propertyCount; - Result result; + std::vector properties( vectorAllocator ); + uint32_t propertyCount; + Result result; do { - result = static_cast( d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); + result = static_cast( + d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); if ( ( result == Result::eSuccess ) && propertyCount ) { properties.resize( propertyCount ); - result = static_cast( d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ) ); + result = static_cast( + d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, + &propertyCount, + reinterpret_cast( properties.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -66799,29 +74200,35 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); properties.resize( propertyCount ); } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceExtensionProperties" ); + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result enumerateInstanceLayerProperties( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties* pProperties, Dispatch const &d) VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result enumerateInstanceLayerProperties( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, + Dispatch const & d ) VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast( pProperties ) ) ); + return static_cast( + d.vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type enumerateInstanceLayerProperties(Dispatch const &d ) + template + VULKAN_HPP_INLINE typename ResultValueType>::type + enumerateInstanceLayerProperties( Dispatch const & d ) { - std::vector properties; - uint32_t propertyCount; - Result result; + std::vector properties; + uint32_t propertyCount; + Result result; do { result = static_cast( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) ); if ( ( result == Result::eSuccess ) && propertyCount ) { properties.resize( propertyCount ); - result = static_cast( d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast( properties.data() ) ) ); + result = static_cast( d.vkEnumerateInstanceLayerProperties( + &propertyCount, reinterpret_cast( properties.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -66829,21 +74236,23 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); properties.resize( propertyCount ); } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceLayerProperties" ); + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type enumerateInstanceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d ) + template + VULKAN_HPP_INLINE typename ResultValueType>::type + enumerateInstanceLayerProperties( Allocator const & vectorAllocator, Dispatch const & d ) { - std::vector properties( vectorAllocator ); - uint32_t propertyCount; - Result result; + std::vector properties( vectorAllocator ); + uint32_t propertyCount; + Result result; do { result = static_cast( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) ); if ( ( result == Result::eSuccess ) && propertyCount ) { properties.resize( propertyCount ); - result = static_cast( d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast( properties.data() ) ) ); + result = static_cast( d.vkEnumerateInstanceLayerProperties( + &propertyCount, reinterpret_cast( properties.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -66851,4947 +74260,8717 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); properties.resize( propertyCount ); } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceLayerProperties" ); + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result enumerateInstanceVersion( uint32_t* pApiVersion, Dispatch const &d) VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result enumerateInstanceVersion( uint32_t * pApiVersion, Dispatch const & d ) VULKAN_HPP_NOEXCEPT { return static_cast( d.vkEnumerateInstanceVersion( pApiVersion ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type enumerateInstanceVersion(Dispatch const &d ) + template + VULKAN_HPP_INLINE typename ResultValueType::type enumerateInstanceVersion( Dispatch const & d ) { uint32_t apiVersion; - Result result = static_cast( d.vkEnumerateInstanceVersion( &apiVersion ) ); - return createResultValue( result, apiVersion, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceVersion" ); + Result result = static_cast( d.vkEnumerateInstanceVersion( &apiVersion ) ); + return createResultValue( result, apiVersion, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceVersion" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo* pBeginInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo * pBeginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast( pBeginInfo ) ) ); + return static_cast( + d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast( pBeginInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::begin( const CommandBufferBeginInfo & beginInfo, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::begin( const CommandBufferBeginInfo & beginInfo, + Dispatch const & d ) const { - Result result = static_cast( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast( &beginInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::begin" ); + Result result = static_cast( + d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast( &beginInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( + const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast( pConditionalRenderingBegin ) ); + d.vkCmdBeginConditionalRenderingEXT( + m_commandBuffer, reinterpret_cast( pConditionalRenderingBegin ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + CommandBuffer::beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast( &conditionalRenderingBegin ) ); + d.vkCmdBeginConditionalRenderingEXT( + m_commandBuffer, reinterpret_cast( &conditionalRenderingBegin ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( pLabelInfo ) ); + d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( pLabelInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( &labelInfo ) ); + d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( &labelInfo ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + VULKAN_HPP_NAMESPACE::QueryControlFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBeginQuery( m_commandBuffer, static_cast( queryPool ), query, static_cast( flags ) ); + d.vkCmdBeginQuery( + m_commandBuffer, static_cast( queryPool ), query, static_cast( flags ) ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + VULKAN_HPP_NAMESPACE::QueryControlFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBeginQuery( m_commandBuffer, static_cast( queryPool ), query, static_cast( flags ) ); + d.vkCmdBeginQuery( + m_commandBuffer, static_cast( queryPool ), query, static_cast( flags ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, uint32_t index, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + VULKAN_HPP_NAMESPACE::QueryControlFlags flags, + uint32_t index, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBeginQueryIndexedEXT( m_commandBuffer, static_cast( queryPool ), query, static_cast( flags ), index ); + d.vkCmdBeginQueryIndexedEXT( + m_commandBuffer, static_cast( queryPool ), query, static_cast( flags ), index ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, uint32_t index, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + VULKAN_HPP_NAMESPACE::QueryControlFlags flags, + uint32_t index, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBeginQueryIndexedEXT( m_commandBuffer, static_cast( queryPool ), query, static_cast( flags ), index ); + d.vkCmdBeginQueryIndexedEXT( + m_commandBuffer, static_cast( queryPool ), query, static_cast( flags ), index ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, + VULKAN_HPP_NAMESPACE::SubpassContents contents, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast( pRenderPassBegin ), static_cast( contents ) ); + d.vkCmdBeginRenderPass( m_commandBuffer, + reinterpret_cast( pRenderPassBegin ), + static_cast( contents ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const RenderPassBeginInfo & renderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const RenderPassBeginInfo & renderPassBegin, + VULKAN_HPP_NAMESPACE::SubpassContents contents, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast( &renderPassBegin ), static_cast( contents ) ); + d.vkCmdBeginRenderPass( m_commandBuffer, + reinterpret_cast( &renderPassBegin ), + static_cast( contents ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBeginRenderPass2( m_commandBuffer, reinterpret_cast( pRenderPassBegin ), reinterpret_cast( pSubpassBeginInfo ) ); + d.vkCmdBeginRenderPass2( m_commandBuffer, + reinterpret_cast( pRenderPassBegin ), + reinterpret_cast( pSubpassBeginInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfo & subpassBeginInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const RenderPassBeginInfo & renderPassBegin, + const SubpassBeginInfo & subpassBeginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBeginRenderPass2( m_commandBuffer, reinterpret_cast( &renderPassBegin ), reinterpret_cast( &subpassBeginInfo ) ); + d.vkCmdBeginRenderPass2( m_commandBuffer, + reinterpret_cast( &renderPassBegin ), + reinterpret_cast( &subpassBeginInfo ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBeginRenderPass2KHR( m_commandBuffer, reinterpret_cast( pRenderPassBegin ), reinterpret_cast( pSubpassBeginInfo ) ); + d.vkCmdBeginRenderPass2KHR( m_commandBuffer, + reinterpret_cast( pRenderPassBegin ), + reinterpret_cast( pSubpassBeginInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfo & subpassBeginInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin, + const SubpassBeginInfo & subpassBeginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBeginRenderPass2KHR( m_commandBuffer, reinterpret_cast( &renderPassBegin ), reinterpret_cast( &subpassBeginInfo ) ); + d.vkCmdBeginRenderPass2KHR( m_commandBuffer, + reinterpret_cast( &renderPassBegin ), + reinterpret_cast( &subpassBeginInfo ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBufferCount, reinterpret_cast( pCounterBuffers ), reinterpret_cast( pCounterBufferOffsets ) ); + d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, + firstCounterBuffer, + counterBufferCount, + reinterpret_cast( pCounterBuffers ), + reinterpret_cast( pCounterBufferOffsets ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy counterBuffers, ArrayProxy counterBufferOffsets, Dispatch const &d ) const + template + VULKAN_HPP_INLINE void + CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, + ArrayProxy counterBuffers, + ArrayProxy counterBufferOffsets, + Dispatch const & d ) const { -#ifdef VULKAN_HPP_NO_EXCEPTIONS +# ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_ASSERT( counterBuffers.size() == counterBufferOffsets.size() ); -#else +# else if ( counterBuffers.size() != counterBufferOffsets.size() ) { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::VkCommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBuffers.size() , reinterpret_cast( counterBuffers.data() ), reinterpret_cast( counterBufferOffsets.data() ) ); +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, + firstCounterBuffer, + counterBuffers.size(), + reinterpret_cast( counterBuffers.data() ), + reinterpret_cast( counterBufferOffsets.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + uint32_t descriptorSetCount, + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + uint32_t dynamicOffsetCount, + const uint32_t * pDynamicOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBindDescriptorSets( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( layout ), firstSet, descriptorSetCount, reinterpret_cast( pDescriptorSets ), dynamicOffsetCount, pDynamicOffsets ); + d.vkCmdBindDescriptorSets( m_commandBuffer, + static_cast( pipelineBindPoint ), + static_cast( layout ), + firstSet, + descriptorSetCount, + reinterpret_cast( pDescriptorSets ), + dynamicOffsetCount, + pDynamicOffsets ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, ArrayProxy descriptorSets, ArrayProxy dynamicOffsets, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + ArrayProxy descriptorSets, + ArrayProxy dynamicOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBindDescriptorSets( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( layout ), firstSet, descriptorSets.size() , reinterpret_cast( descriptorSets.data() ), dynamicOffsets.size() , dynamicOffsets.data() ); + d.vkCmdBindDescriptorSets( m_commandBuffer, + static_cast( pipelineBindPoint ), + static_cast( layout ), + firstSet, + descriptorSets.size(), + reinterpret_cast( descriptorSets.data() ), + dynamicOffsets.size(), + dynamicOffsets.data() ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::IndexType indexType, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBindIndexBuffer( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( indexType ) ); + d.vkCmdBindIndexBuffer( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( indexType ) ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::IndexType indexType, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBindIndexBuffer( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( indexType ) ); + d.vkCmdBindIndexBuffer( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( indexType ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBindPipeline( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( pipeline ) ); + d.vkCmdBindPipeline( + m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( pipeline ) ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBindPipeline( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( pipeline ) ); + d.vkCmdBindPipeline( + m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( pipeline ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t groupIndex, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t groupIndex, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBindPipelineShaderGroupNV( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( pipeline ), groupIndex ); + d.vkCmdBindPipelineShaderGroupNV( m_commandBuffer, + static_cast( pipelineBindPoint ), + static_cast( pipeline ), + groupIndex ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t groupIndex, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t groupIndex, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBindPipelineShaderGroupNV( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( pipeline ), groupIndex ); + d.vkCmdBindPipelineShaderGroupNV( m_commandBuffer, + static_cast( pipelineBindPoint ), + static_cast( pipeline ), + groupIndex ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBindShadingRateImageNV( m_commandBuffer, static_cast( imageView ), static_cast( imageLayout ) ); + d.vkCmdBindShadingRateImageNV( + m_commandBuffer, static_cast( imageView ), static_cast( imageLayout ) ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBindShadingRateImageNV( m_commandBuffer, static_cast( imageView ), static_cast( imageLayout ) ); + d.vkCmdBindShadingRateImageNV( + m_commandBuffer, static_cast( imageView ), static_cast( imageLayout ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast( pBuffers ), reinterpret_cast( pOffsets ), reinterpret_cast( pSizes ) ); + d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, + firstBinding, + bindingCount, + reinterpret_cast( pBuffers ), + reinterpret_cast( pOffsets ), + reinterpret_cast( pSizes ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, ArrayProxy buffers, ArrayProxy offsets, ArrayProxy sizes, Dispatch const &d ) const + template + VULKAN_HPP_INLINE void + CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, + ArrayProxy buffers, + ArrayProxy offsets, + ArrayProxy sizes, + Dispatch const & d ) const { -#ifdef VULKAN_HPP_NO_EXCEPTIONS +# ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); -#else +# else if ( buffers.size() != offsets.size() ) { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" ); + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::VkCommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" ); } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ -#ifdef VULKAN_HPP_NO_EXCEPTIONS +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ +# ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_ASSERT( buffers.size() == sizes.size() ); -#else +# else if ( buffers.size() != sizes.size() ) { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" ); + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::VkCommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" ); } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ -#ifdef VULKAN_HPP_NO_EXCEPTIONS +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ +# ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_ASSERT( offsets.size() == sizes.size() ); -#else +# else if ( offsets.size() != sizes.size() ) { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindTransformFeedbackBuffersEXT: offsets.size() != sizes.size()" ); + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::VkCommandBuffer::bindTransformFeedbackBuffersEXT: offsets.size() != sizes.size()" ); } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, firstBinding, buffers.size() , reinterpret_cast( buffers.data() ), reinterpret_cast( offsets.data() ), reinterpret_cast( sizes.data() ) ); +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, + firstBinding, + buffers.size(), + reinterpret_cast( buffers.data() ), + reinterpret_cast( offsets.data() ), + reinterpret_cast( sizes.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast( pBuffers ), reinterpret_cast( pOffsets ) ); + d.vkCmdBindVertexBuffers( m_commandBuffer, + firstBinding, + bindingCount, + reinterpret_cast( pBuffers ), + reinterpret_cast( pOffsets ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, ArrayProxy buffers, ArrayProxy offsets, Dispatch const &d ) const + template + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, + ArrayProxy buffers, + ArrayProxy offsets, + Dispatch const & d ) const { -#ifdef VULKAN_HPP_NO_EXCEPTIONS +# ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); -#else +# else if ( buffers.size() != offsets.size() ) { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" ); + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::VkCommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" ); } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - d.vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, buffers.size() , reinterpret_cast( buffers.data() ), reinterpret_cast( offsets.data() ) ); +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + d.vkCmdBindVertexBuffers( m_commandBuffer, + firstBinding, + buffers.size(), + reinterpret_cast( buffers.data() ), + reinterpret_cast( offsets.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageBlit* pRegions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::ImageBlit * pRegions, + VULKAN_HPP_NAMESPACE::Filter filter, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBlitImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regionCount, reinterpret_cast( pRegions ), static_cast( filter ) ); + d.vkCmdBlitImage( m_commandBuffer, + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regionCount, + reinterpret_cast( pRegions ), + static_cast( filter ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy regions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + ArrayProxy regions, + VULKAN_HPP_NAMESPACE::Filter filter, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBlitImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regions.size() , reinterpret_cast( regions.data() ), static_cast( filter ) ); + d.vkCmdBlitImage( m_commandBuffer, + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regions.size(), + reinterpret_cast( regions.data() ), + static_cast( filter ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureIndirectKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfo, VULKAN_HPP_NAMESPACE::Buffer indirectBuffer, VULKAN_HPP_NAMESPACE::DeviceSize indirectOffset, uint32_t indirectStride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureIndirectKHR( + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfo, + VULKAN_HPP_NAMESPACE::Buffer indirectBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize indirectOffset, + uint32_t indirectStride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBuildAccelerationStructureIndirectKHR( m_commandBuffer, reinterpret_cast( pInfo ), static_cast( indirectBuffer ), static_cast( indirectOffset ), indirectStride ); + d.vkCmdBuildAccelerationStructureIndirectKHR( + m_commandBuffer, + reinterpret_cast( pInfo ), + static_cast( indirectBuffer ), + static_cast( indirectOffset ), + indirectStride ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureIndirectKHR( const AccelerationStructureBuildGeometryInfoKHR & info, VULKAN_HPP_NAMESPACE::Buffer indirectBuffer, VULKAN_HPP_NAMESPACE::DeviceSize indirectOffset, uint32_t indirectStride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::buildAccelerationStructureIndirectKHR( const AccelerationStructureBuildGeometryInfoKHR & info, + VULKAN_HPP_NAMESPACE::Buffer indirectBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize indirectOffset, + uint32_t indirectStride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBuildAccelerationStructureIndirectKHR( m_commandBuffer, reinterpret_cast( &info ), static_cast( indirectBuffer ), static_cast( indirectOffset ), indirectStride ); + d.vkCmdBuildAccelerationStructureIndirectKHR( + m_commandBuffer, + reinterpret_cast( &info ), + static_cast( indirectBuffer ), + static_cast( indirectOffset ), + indirectStride ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureKHR( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfos, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildOffsetInfoKHR* const* ppOffsetInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureKHR( + uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildOffsetInfoKHR * const * ppOffsetInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBuildAccelerationStructureKHR( m_commandBuffer, infoCount, reinterpret_cast( pInfos ), reinterpret_cast( ppOffsetInfos ) ); + d.vkCmdBuildAccelerationStructureKHR( + m_commandBuffer, + infoCount, + reinterpret_cast( pInfos ), + reinterpret_cast( ppOffsetInfos ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureKHR( ArrayProxy infos, ArrayProxy pOffsetInfos, Dispatch const &d ) const +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureKHR( + ArrayProxy infos, + ArrayProxy pOffsetInfos, + Dispatch const & d ) const { -#ifdef VULKAN_HPP_NO_EXCEPTIONS +# ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_ASSERT( infos.size() == pOffsetInfos.size() ); -#else +# else if ( infos.size() != pOffsetInfos.size() ) { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::buildAccelerationStructureKHR: infos.size() != pOffsetInfos.size()" ); + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::VkCommandBuffer::buildAccelerationStructureKHR: infos.size() != pOffsetInfos.size()" ); } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - d.vkCmdBuildAccelerationStructureKHR( m_commandBuffer, infos.size() , reinterpret_cast( infos.data() ), reinterpret_cast( pOffsetInfos.data() ) ); +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + d.vkCmdBuildAccelerationStructureKHR( + m_commandBuffer, + infos.size(), + reinterpret_cast( infos.data() ), + reinterpret_cast( pOffsetInfos.data() ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template - VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV* pInfo, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::Buffer instanceData, + VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, + VULKAN_HPP_NAMESPACE::Bool32 update, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, + VULKAN_HPP_NAMESPACE::Buffer scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, reinterpret_cast( pInfo ), static_cast( instanceData ), static_cast( instanceOffset ), static_cast( update ), static_cast( dst ), static_cast( src ), static_cast( scratch ), static_cast( scratchOffset ) ); + d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, + reinterpret_cast( pInfo ), + static_cast( instanceData ), + static_cast( instanceOffset ), + static_cast( update ), + static_cast( dst ), + static_cast( src ), + static_cast( scratch ), + static_cast( scratchOffset ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + CommandBuffer::buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, + VULKAN_HPP_NAMESPACE::Buffer instanceData, + VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, + VULKAN_HPP_NAMESPACE::Bool32 update, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, + VULKAN_HPP_NAMESPACE::Buffer scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, reinterpret_cast( &info ), static_cast( instanceData ), static_cast( instanceOffset ), static_cast( update ), static_cast( dst ), static_cast( src ), static_cast( scratch ), static_cast( scratchOffset ) ); + d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, + reinterpret_cast( &info ), + static_cast( instanceData ), + static_cast( instanceOffset ), + static_cast( update ), + static_cast( dst ), + static_cast( src ), + static_cast( scratch ), + static_cast( scratchOffset ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ClearAttachment* pAttachments, uint32_t rectCount, const VULKAN_HPP_NAMESPACE::ClearRect* pRects, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::ClearAttachment * pAttachments, + uint32_t rectCount, + const VULKAN_HPP_NAMESPACE::ClearRect * pRects, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdClearAttachments( m_commandBuffer, attachmentCount, reinterpret_cast( pAttachments ), rectCount, reinterpret_cast( pRects ) ); + d.vkCmdClearAttachments( m_commandBuffer, + attachmentCount, + reinterpret_cast( pAttachments ), + rectCount, + reinterpret_cast( pRects ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( ArrayProxy attachments, ArrayProxy rects, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + CommandBuffer::clearAttachments( ArrayProxy attachments, + ArrayProxy rects, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdClearAttachments( m_commandBuffer, attachments.size() , reinterpret_cast( attachments.data() ), rects.size() , reinterpret_cast( rects.data() ) ); + d.vkCmdClearAttachments( m_commandBuffer, + attachments.size(), + reinterpret_cast( attachments.data() ), + rects.size(), + reinterpret_cast( rects.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearColorValue* pColor, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearColorValue * pColor, + uint32_t rangeCount, + const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdClearColorImage( m_commandBuffer, static_cast( image ), static_cast( imageLayout ), reinterpret_cast( pColor ), rangeCount, reinterpret_cast( pRanges ) ); + d.vkCmdClearColorImage( m_commandBuffer, + static_cast( image ), + static_cast( imageLayout ), + reinterpret_cast( pColor ), + rangeCount, + reinterpret_cast( pRanges ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy ranges, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const ClearColorValue & color, + ArrayProxy ranges, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdClearColorImage( m_commandBuffer, static_cast( image ), static_cast( imageLayout ), reinterpret_cast( &color ), ranges.size() , reinterpret_cast( ranges.data() ) ); + d.vkCmdClearColorImage( m_commandBuffer, + static_cast( image ), + static_cast( imageLayout ), + reinterpret_cast( &color ), + ranges.size(), + reinterpret_cast( ranges.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue * pDepthStencil, + uint32_t rangeCount, + const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdClearDepthStencilImage( m_commandBuffer, static_cast( image ), static_cast( imageLayout ), reinterpret_cast( pDepthStencil ), rangeCount, reinterpret_cast( pRanges ) ); + d.vkCmdClearDepthStencilImage( m_commandBuffer, + static_cast( image ), + static_cast( imageLayout ), + reinterpret_cast( pDepthStencil ), + rangeCount, + reinterpret_cast( pRanges ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy ranges, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const ClearDepthStencilValue & depthStencil, + ArrayProxy ranges, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdClearDepthStencilImage( m_commandBuffer, static_cast( image ), static_cast( imageLayout ), reinterpret_cast( &depthStencil ), ranges.size() , reinterpret_cast( ranges.data() ) ); + d.vkCmdClearDepthStencilImage( m_commandBuffer, + static_cast( image ), + static_cast( imageLayout ), + reinterpret_cast( &depthStencil ), + ranges.size(), + reinterpret_cast( ranges.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast( pInfo ) ); + d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, + reinterpret_cast( pInfo ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast( &info ) ); + d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, + reinterpret_cast( &info ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, static_cast( dst ), static_cast( src ), static_cast( mode ) ); + d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, + static_cast( dst ), + static_cast( src ), + static_cast( mode ) ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, static_cast( dst ), static_cast( src ), static_cast( mode ) ); + d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, + static_cast( dst ), + static_cast( src ), + static_cast( mode ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast( pInfo ) ); + d.vkCmdCopyAccelerationStructureToMemoryKHR( + m_commandBuffer, reinterpret_cast( pInfo ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast( &info ) ); + d.vkCmdCopyAccelerationStructureToMemoryKHR( + m_commandBuffer, reinterpret_cast( &info ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template - VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferCopy* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::BufferCopy * pRegions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdCopyBuffer( m_commandBuffer, static_cast( srcBuffer ), static_cast( dstBuffer ), regionCount, reinterpret_cast( pRegions ) ); + d.vkCmdCopyBuffer( m_commandBuffer, + static_cast( srcBuffer ), + static_cast( dstBuffer ), + regionCount, + reinterpret_cast( pRegions ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + ArrayProxy regions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdCopyBuffer( m_commandBuffer, static_cast( srcBuffer ), static_cast( dstBuffer ), regions.size() , reinterpret_cast( regions.data() ) ); + d.vkCmdCopyBuffer( m_commandBuffer, + static_cast( srcBuffer ), + static_cast( dstBuffer ), + regions.size(), + reinterpret_cast( regions.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdCopyBufferToImage( m_commandBuffer, static_cast( srcBuffer ), static_cast( dstImage ), static_cast( dstImageLayout ), regionCount, reinterpret_cast( pRegions ) ); + d.vkCmdCopyBufferToImage( m_commandBuffer, + static_cast( srcBuffer ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regionCount, + reinterpret_cast( pRegions ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + ArrayProxy regions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdCopyBufferToImage( m_commandBuffer, static_cast( srcBuffer ), static_cast( dstImage ), static_cast( dstImageLayout ), regions.size() , reinterpret_cast( regions.data() ) ); + d.vkCmdCopyBufferToImage( m_commandBuffer, + static_cast( srcBuffer ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regions.size(), + reinterpret_cast( regions.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageCopy* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::ImageCopy * pRegions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdCopyImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regionCount, reinterpret_cast( pRegions ) ); + d.vkCmdCopyImage( m_commandBuffer, + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regionCount, + reinterpret_cast( pRegions ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + ArrayProxy regions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdCopyImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regions.size() , reinterpret_cast( regions.data() ) ); + d.vkCmdCopyImage( m_commandBuffer, + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regions.size(), + reinterpret_cast( regions.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdCopyImageToBuffer( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstBuffer ), regionCount, reinterpret_cast( pRegions ) ); + d.vkCmdCopyImageToBuffer( m_commandBuffer, + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstBuffer ), + regionCount, + reinterpret_cast( pRegions ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + ArrayProxy regions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdCopyImageToBuffer( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstBuffer ), regions.size() , reinterpret_cast( regions.data() ) ); + d.vkCmdCopyImageToBuffer( m_commandBuffer, + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstBuffer ), + regions.size(), + reinterpret_cast( regions.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( + const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast( pInfo ) ); + d.vkCmdCopyMemoryToAccelerationStructureKHR( + m_commandBuffer, reinterpret_cast( pInfo ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast( &info ) ); + d.vkCmdCopyMemoryToAccelerationStructureKHR( + m_commandBuffer, reinterpret_cast( &info ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdCopyQueryPoolResults( m_commandBuffer, static_cast( queryPool ), firstQuery, queryCount, static_cast( dstBuffer ), static_cast( dstOffset ), static_cast( stride ), static_cast( flags ) ); + d.vkCmdCopyQueryPoolResults( m_commandBuffer, + static_cast( queryPool ), + firstQuery, + queryCount, + static_cast( dstBuffer ), + static_cast( dstOffset ), + static_cast( stride ), + static_cast( flags ) ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdCopyQueryPoolResults( m_commandBuffer, static_cast( queryPool ), firstQuery, queryCount, static_cast( dstBuffer ), static_cast( dstOffset ), static_cast( stride ), static_cast( flags ) ); + d.vkCmdCopyQueryPoolResults( m_commandBuffer, + static_cast( queryPool ), + firstQuery, + queryCount, + static_cast( dstBuffer ), + static_cast( dstOffset ), + static_cast( stride ), + static_cast( flags ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ); + d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast( &markerInfo ) ); + d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast( &markerInfo ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT(Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdDebugMarkerEndEXT( m_commandBuffer ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdDebugMarkerEndEXT( m_commandBuffer ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ); + d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast( &markerInfo ) ); + d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast( &markerInfo ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdDispatchBaseKHR( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + d.vkCmdDispatchBaseKHR( + m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdDispatchBaseKHR( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + d.vkCmdDispatchBaseKHR( + m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdDispatchIndirect( m_commandBuffer, static_cast( buffer ), static_cast( offset ) ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdDispatchIndirect( m_commandBuffer, static_cast( buffer ), static_cast( offset ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t vertexCount, + uint32_t instanceCount, + uint32_t firstVertex, + uint32_t firstInstance, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t vertexCount, + uint32_t instanceCount, + uint32_t firstVertex, + uint32_t firstInstance, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, + uint32_t instanceCount, + uint32_t firstIndex, + int32_t vertexOffset, + uint32_t firstInstance, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, + uint32_t instanceCount, + uint32_t firstIndex, + int32_t vertexOffset, + uint32_t firstInstance, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); + d.vkCmdDrawIndexedIndirect( + m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); + d.vkCmdDrawIndexedIndirect( + m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdDrawIndexedIndirectCount( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); + d.vkCmdDrawIndexedIndirectCount( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdDrawIndexedIndirectCount( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); + d.vkCmdDrawIndexedIndirectCount( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); + d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); + d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); + d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); + d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdDrawIndirect( m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); + d.vkCmdDrawIndirect( + m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdDrawIndirect( m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); + d.vkCmdDrawIndirect( + m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, VULKAN_HPP_NAMESPACE::Buffer counterBuffer, VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, + uint32_t firstInstance, + VULKAN_HPP_NAMESPACE::Buffer counterBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, + uint32_t counterOffset, + uint32_t vertexStride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer, instanceCount, firstInstance, static_cast( counterBuffer ), static_cast( counterBufferOffset ), counterOffset, vertexStride ); + d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer, + instanceCount, + firstInstance, + static_cast( counterBuffer ), + static_cast( counterBufferOffset ), + counterOffset, + vertexStride ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, VULKAN_HPP_NAMESPACE::Buffer counterBuffer, VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, + uint32_t firstInstance, + VULKAN_HPP_NAMESPACE::Buffer counterBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, + uint32_t counterOffset, + uint32_t vertexStride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer, instanceCount, firstInstance, static_cast( counterBuffer ), static_cast( counterBufferOffset ), counterOffset, vertexStride ); + d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer, + instanceCount, + firstInstance, + static_cast( counterBuffer ), + static_cast( counterBufferOffset ), + counterOffset, + vertexStride ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdDrawIndirectCount( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); + d.vkCmdDrawIndirectCount( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdDrawIndirectCount( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); + d.vkCmdDrawIndirectCount( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdDrawIndirectCountAMD( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); + d.vkCmdDrawIndirectCountAMD( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdDrawIndirectCountAMD( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); + d.vkCmdDrawIndirectCountAMD( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdDrawIndirectCountKHR( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); + d.vkCmdDrawIndirectCountKHR( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdDrawIndirectCountKHR( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); + d.vkCmdDrawIndirectCountKHR( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); + d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); + d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdDrawMeshTasksIndirectNV( m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); + d.vkCmdDrawMeshTasksIndirectNV( + m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdDrawMeshTasksIndirectNV( m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); + d.vkCmdDrawMeshTasksIndirectNV( + m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, + uint32_t firstTask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdDrawMeshTasksNV( m_commandBuffer, taskCount, firstTask ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, + uint32_t firstTask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdDrawMeshTasksNV( m_commandBuffer, taskCount, firstTask ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT(Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdEndConditionalRenderingEXT( m_commandBuffer ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdEndConditionalRenderingEXT( m_commandBuffer ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT(Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdEndQuery( m_commandBuffer, static_cast( queryPool ), query ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdEndQuery( m_commandBuffer, static_cast( queryPool ), query ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + uint32_t index, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast( queryPool ), query, index ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + uint32_t index, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast( queryPool ), query, index ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::endRenderPass(Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdEndRenderPass( m_commandBuffer ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::endRenderPass(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdEndRenderPass( m_commandBuffer ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast( pSubpassEndInfo ) ); + d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast( pSubpassEndInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const SubpassEndInfo & subpassEndInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const SubpassEndInfo & subpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast( &subpassEndInfo ) ); + d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast( &subpassEndInfo ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast( pSubpassEndInfo ) ); + d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast( pSubpassEndInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const SubpassEndInfo & subpassEndInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const SubpassEndInfo & subpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast( &subpassEndInfo ) ); + d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast( &subpassEndInfo ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBufferCount, reinterpret_cast( pCounterBuffers ), reinterpret_cast( pCounterBufferOffsets ) ); + d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, + firstCounterBuffer, + counterBufferCount, + reinterpret_cast( pCounterBuffers ), + reinterpret_cast( pCounterBufferOffsets ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy counterBuffers, ArrayProxy counterBufferOffsets, Dispatch const &d ) const + template + VULKAN_HPP_INLINE void + CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, + ArrayProxy counterBuffers, + ArrayProxy counterBufferOffsets, + Dispatch const & d ) const { -#ifdef VULKAN_HPP_NO_EXCEPTIONS +# ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_ASSERT( counterBuffers.size() == counterBufferOffsets.size() ); -#else +# else if ( counterBuffers.size() != counterBufferOffsets.size() ) { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::VkCommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBuffers.size() , reinterpret_cast( counterBuffers.data() ), reinterpret_cast( counterBufferOffsets.data() ) ); +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, + firstCounterBuffer, + counterBuffers.size(), + reinterpret_cast( counterBuffers.data() ), + reinterpret_cast( counterBufferOffsets.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::executeCommands( uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::executeCommands( uint32_t commandBufferCount, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdExecuteCommands( m_commandBuffer, commandBufferCount, reinterpret_cast( pCommandBuffers ) ); + d.vkCmdExecuteCommands( + m_commandBuffer, commandBufferCount, reinterpret_cast( pCommandBuffers ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::executeCommands( ArrayProxy commandBuffers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + CommandBuffer::executeCommands( ArrayProxy commandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdExecuteCommands( m_commandBuffer, commandBuffers.size() , reinterpret_cast( commandBuffers.data() ) ); + d.vkCmdExecuteCommands( + m_commandBuffer, commandBuffers.size(), reinterpret_cast( commandBuffers.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( + VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdExecuteGeneratedCommandsNV( m_commandBuffer, static_cast( isPreprocessed ), reinterpret_cast( pGeneratedCommandsInfo ) ); + d.vkCmdExecuteGeneratedCommandsNV( m_commandBuffer, + static_cast( isPreprocessed ), + reinterpret_cast( pGeneratedCommandsInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const GeneratedCommandsInfoNV & generatedCommandsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdExecuteGeneratedCommandsNV( m_commandBuffer, static_cast( isPreprocessed ), reinterpret_cast( &generatedCommandsInfo ) ); + d.vkCmdExecuteGeneratedCommandsNV( m_commandBuffer, + static_cast( isPreprocessed ), + reinterpret_cast( &generatedCommandsInfo ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + uint32_t data, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdFillBuffer( m_commandBuffer, static_cast( dstBuffer ), static_cast( dstOffset ), static_cast( size ), data ); + d.vkCmdFillBuffer( m_commandBuffer, + static_cast( dstBuffer ), + static_cast( dstOffset ), + static_cast( size ), + data ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + uint32_t data, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdFillBuffer( m_commandBuffer, static_cast( dstBuffer ), static_cast( dstOffset ), static_cast( size ), data ); + d.vkCmdFillBuffer( m_commandBuffer, + static_cast( dstBuffer ), + static_cast( dstOffset ), + static_cast( size ), + data ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( pLabelInfo ) ); + d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( pLabelInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( &labelInfo ) ); + d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( &labelInfo ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdNextSubpass( m_commandBuffer, static_cast( contents ) ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdNextSubpass( m_commandBuffer, static_cast( contents ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdNextSubpass2( m_commandBuffer, reinterpret_cast( pSubpassBeginInfo ), reinterpret_cast( pSubpassEndInfo ) ); + d.vkCmdNextSubpass2( m_commandBuffer, + reinterpret_cast( pSubpassBeginInfo ), + reinterpret_cast( pSubpassEndInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const SubpassBeginInfo & subpassBeginInfo, const SubpassEndInfo & subpassEndInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const SubpassBeginInfo & subpassBeginInfo, + const SubpassEndInfo & subpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdNextSubpass2( m_commandBuffer, reinterpret_cast( &subpassBeginInfo ), reinterpret_cast( &subpassEndInfo ) ); + d.vkCmdNextSubpass2( m_commandBuffer, + reinterpret_cast( &subpassBeginInfo ), + reinterpret_cast( &subpassEndInfo ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdNextSubpass2KHR( m_commandBuffer, reinterpret_cast( pSubpassBeginInfo ), reinterpret_cast( pSubpassEndInfo ) ); + d.vkCmdNextSubpass2KHR( m_commandBuffer, + reinterpret_cast( pSubpassBeginInfo ), + reinterpret_cast( pSubpassEndInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const SubpassBeginInfo & subpassBeginInfo, const SubpassEndInfo & subpassEndInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const SubpassBeginInfo & subpassBeginInfo, + const SubpassEndInfo & subpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdNextSubpass2KHR( m_commandBuffer, reinterpret_cast( &subpassBeginInfo ), reinterpret_cast( &subpassEndInfo ) ); + d.vkCmdNextSubpass2KHR( m_commandBuffer, + reinterpret_cast( &subpassBeginInfo ), + reinterpret_cast( &subpassEndInfo ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, + uint32_t memoryBarrierCount, + const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdPipelineBarrier( m_commandBuffer, static_cast( srcStageMask ), static_cast( dstStageMask ), static_cast( dependencyFlags ), memoryBarrierCount, reinterpret_cast( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast( pImageMemoryBarriers ) ); + d.vkCmdPipelineBarrier( m_commandBuffer, + static_cast( srcStageMask ), + static_cast( dstStageMask ), + static_cast( dependencyFlags ), + memoryBarrierCount, + reinterpret_cast( pMemoryBarriers ), + bufferMemoryBarrierCount, + reinterpret_cast( pBufferMemoryBarriers ), + imageMemoryBarrierCount, + reinterpret_cast( pImageMemoryBarriers ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, ArrayProxy memoryBarriers, ArrayProxy bufferMemoryBarriers, ArrayProxy imageMemoryBarriers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, + ArrayProxy memoryBarriers, + ArrayProxy bufferMemoryBarriers, + ArrayProxy imageMemoryBarriers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdPipelineBarrier( m_commandBuffer, static_cast( srcStageMask ), static_cast( dstStageMask ), static_cast( dependencyFlags ), memoryBarriers.size() , reinterpret_cast( memoryBarriers.data() ), bufferMemoryBarriers.size() , reinterpret_cast( bufferMemoryBarriers.data() ), imageMemoryBarriers.size() , reinterpret_cast( imageMemoryBarriers.data() ) ); + d.vkCmdPipelineBarrier( m_commandBuffer, + static_cast( srcStageMask ), + static_cast( dstStageMask ), + static_cast( dependencyFlags ), + memoryBarriers.size(), + reinterpret_cast( memoryBarriers.data() ), + bufferMemoryBarriers.size(), + reinterpret_cast( bufferMemoryBarriers.data() ), + imageMemoryBarriers.size(), + reinterpret_cast( imageMemoryBarriers.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast( pGeneratedCommandsInfo ) ); + d.vkCmdPreprocessGeneratedCommandsNV( + m_commandBuffer, reinterpret_cast( pGeneratedCommandsInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + CommandBuffer::preprocessGeneratedCommandsNV( const GeneratedCommandsInfoNV & generatedCommandsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast( &generatedCommandsInfo ) ); + d.vkCmdPreprocessGeneratedCommandsNV( + m_commandBuffer, reinterpret_cast( &generatedCommandsInfo ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, + uint32_t offset, + uint32_t size, + const void * pValues, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdPushConstants( m_commandBuffer, static_cast( layout ), static_cast( stageFlags ), offset, size, pValues ); + d.vkCmdPushConstants( m_commandBuffer, + static_cast( layout ), + static_cast( stageFlags ), + offset, + size, + pValues ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy values, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, + uint32_t offset, + ArrayProxy values, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdPushConstants( m_commandBuffer, static_cast( layout ), static_cast( stageFlags ), offset, values.size() * sizeof( T ) , reinterpret_cast( values.data() ) ); + d.vkCmdPushConstants( m_commandBuffer, + static_cast( layout ), + static_cast( stageFlags ), + offset, + values.size() * sizeof( T ), + reinterpret_cast( values.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdPushDescriptorSetKHR( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( layout ), set, descriptorWriteCount, reinterpret_cast( pDescriptorWrites ) ); + d.vkCmdPushDescriptorSetKHR( m_commandBuffer, + static_cast( pipelineBindPoint ), + static_cast( layout ), + set, + descriptorWriteCount, + reinterpret_cast( pDescriptorWrites ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, ArrayProxy descriptorWrites, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + ArrayProxy descriptorWrites, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdPushDescriptorSetKHR( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( layout ), set, descriptorWrites.size() , reinterpret_cast( descriptorWrites.data() ) ); + d.vkCmdPushDescriptorSetKHR( m_commandBuffer, + static_cast( pipelineBindPoint ), + static_cast( layout ), + set, + descriptorWrites.size(), + reinterpret_cast( descriptorWrites.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + const void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, static_cast( descriptorUpdateTemplate ), static_cast( layout ), set, pData ); + d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, + static_cast( descriptorUpdateTemplate ), + static_cast( layout ), + set, + pData ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + const void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, static_cast( descriptorUpdateTemplate ), static_cast( layout ), set, pData ); + d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, + static_cast( descriptorUpdateTemplate ), + static_cast( layout ), + set, + pData ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdResetEvent( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdResetEvent( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdResetQueryPool( m_commandBuffer, static_cast( queryPool ), firstQuery, queryCount ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdResetQueryPool( m_commandBuffer, static_cast( queryPool ), firstQuery, queryCount ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageResolve* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::ImageResolve * pRegions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdResolveImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regionCount, reinterpret_cast( pRegions ) ); + d.vkCmdResolveImage( m_commandBuffer, + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regionCount, + reinterpret_cast( pRegions ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + ArrayProxy regions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdResolveImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regions.size() , reinterpret_cast( regions.data() ) ); + d.vkCmdResolveImage( m_commandBuffer, + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regions.size(), + reinterpret_cast( regions.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void* pCheckpointMarker, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void * pCheckpointMarker, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetCheckpointNV( m_commandBuffer, pCheckpointMarker ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void* pCheckpointMarker, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void * pCheckpointMarker, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetCheckpointNV( m_commandBuffer, pCheckpointMarker ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, + uint32_t customSampleOrderCount, + const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, static_cast( sampleOrderType ), customSampleOrderCount, reinterpret_cast( pCustomSampleOrders ) ); + d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, + static_cast( sampleOrderType ), + customSampleOrderCount, + reinterpret_cast( pCustomSampleOrders ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, ArrayProxy customSampleOrders, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( + VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, + ArrayProxy customSampleOrders, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, static_cast( sampleOrderType ), customSampleOrders.size() , reinterpret_cast( customSampleOrders.data() ) ); + d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, + static_cast( sampleOrderType ), + customSampleOrders.size(), + reinterpret_cast( customSampleOrders.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor, + float depthBiasClamp, + float depthBiasSlopeFactor, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor, + float depthBiasClamp, + float depthBiasSlopeFactor, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, + float maxDepthBounds, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, + float maxDepthBounds, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetDeviceMaskKHR( m_commandBuffer, deviceMask ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetDeviceMaskKHR( m_commandBuffer, deviceMask ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, + uint32_t discardRectangleCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangleCount, reinterpret_cast( pDiscardRectangles ) ); + d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, + firstDiscardRectangle, + discardRectangleCount, + reinterpret_cast( pDiscardRectangles ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy discardRectangles, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, + ArrayProxy discardRectangles, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangles.size() , reinterpret_cast( discardRectangles.data() ) ); + d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, + firstDiscardRectangle, + discardRectangles.size(), + reinterpret_cast( discardRectangles.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetEvent( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetEvent( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast( pExclusiveScissors ) ); + d.vkCmdSetExclusiveScissorNV( m_commandBuffer, + firstExclusiveScissor, + exclusiveScissorCount, + reinterpret_cast( pExclusiveScissors ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, ArrayProxy exclusiveScissors, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, + ArrayProxy exclusiveScissors, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissors.size() , reinterpret_cast( exclusiveScissors.data() ) ); + d.vkCmdSetExclusiveScissorNV( m_commandBuffer, + firstExclusiveScissor, + exclusiveScissors.size(), + reinterpret_cast( exclusiveScissors.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, + uint16_t lineStipplePattern, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetLineStippleEXT( m_commandBuffer, lineStippleFactor, lineStipplePattern ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, + uint16_t lineStipplePattern, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetLineStippleEXT( m_commandBuffer, lineStippleFactor, lineStipplePattern ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetLineWidth( m_commandBuffer, lineWidth ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetLineWidth( m_commandBuffer, lineWidth ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL* pMarkerInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceMarkerINTEL( + const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL * pMarkerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ) ); + return static_cast( d.vkCmdSetPerformanceMarkerINTEL( + m_commandBuffer, reinterpret_cast( pMarkerInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + CommandBuffer::setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, Dispatch const & d ) const { - Result result = static_cast( d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast( &markerInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::setPerformanceMarkerINTEL" ); + Result result = static_cast( d.vkCmdSetPerformanceMarkerINTEL( + m_commandBuffer, reinterpret_cast( &markerInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceMarkerINTEL" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL* pOverrideInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceOverrideINTEL( + const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast( pOverrideInfo ) ) ); + return static_cast( d.vkCmdSetPerformanceOverrideINTEL( + m_commandBuffer, reinterpret_cast( pOverrideInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + CommandBuffer::setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo, + Dispatch const & d ) const { - Result result = static_cast( d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast( &overrideInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::setPerformanceOverrideINTEL" ); + Result result = static_cast( d.vkCmdSetPerformanceOverrideINTEL( + m_commandBuffer, reinterpret_cast( &overrideInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL* pMarkerInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceStreamMarkerINTEL( + const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ) ); + return static_cast( d.vkCmdSetPerformanceStreamMarkerINTEL( + m_commandBuffer, reinterpret_cast( pMarkerInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + CommandBuffer::setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo, + Dispatch const & d ) const { - Result result = static_cast( d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast( &markerInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::setPerformanceStreamMarkerINTEL" ); + Result result = static_cast( d.vkCmdSetPerformanceStreamMarkerINTEL( + m_commandBuffer, reinterpret_cast( &markerInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceStreamMarkerINTEL" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT* pSampleLocationsInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT * pSampleLocationsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast( pSampleLocationsInfo ) ); + d.vkCmdSetSampleLocationsEXT( m_commandBuffer, + reinterpret_cast( pSampleLocationsInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast( &sampleLocationsInfo ) ); + d.vkCmdSetSampleLocationsEXT( m_commandBuffer, + reinterpret_cast( &sampleLocationsInfo ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, + uint32_t scissorCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast( pScissors ) ); + d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast( pScissors ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, ArrayProxy scissors, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, + ArrayProxy scissors, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissors.size() , reinterpret_cast( scissors.data() ) ); + d.vkCmdSetScissor( + m_commandBuffer, firstScissor, scissors.size(), reinterpret_cast( scissors.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t compareMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast( faceMask ), compareMask ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t compareMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast( faceMask ), compareMask ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t reference, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetStencilReference( m_commandBuffer, static_cast( faceMask ), reference ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t reference, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetStencilReference( m_commandBuffer, static_cast( faceMask ), reference ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t writeMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast( faceMask ), writeMask ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t writeMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast( faceMask ), writeMask ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::Viewport * pViewports, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast( pViewports ) ); + d.vkCmdSetViewport( + m_commandBuffer, firstViewport, viewportCount, reinterpret_cast( pViewports ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, ArrayProxy viewports, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, + ArrayProxy viewports, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewports.size() , reinterpret_cast( viewports.data() ) ); + d.vkCmdSetViewport( + m_commandBuffer, firstViewport, viewports.size(), reinterpret_cast( viewports.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( + uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdSetViewportShadingRatePaletteNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast( pShadingRatePalettes ) ); + d.vkCmdSetViewportShadingRatePaletteNV( m_commandBuffer, + firstViewport, + viewportCount, + reinterpret_cast( pShadingRatePalettes ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, ArrayProxy shadingRatePalettes, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( + uint32_t firstViewport, + ArrayProxy shadingRatePalettes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdSetViewportShadingRatePaletteNV( m_commandBuffer, firstViewport, shadingRatePalettes.size() , reinterpret_cast( shadingRatePalettes.data() ) ); + d.vkCmdSetViewportShadingRatePaletteNV( + m_commandBuffer, + firstViewport, + shadingRatePalettes.size(), + reinterpret_cast( shadingRatePalettes.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast( pViewportWScalings ) ); + d.vkCmdSetViewportWScalingNV( m_commandBuffer, + firstViewport, + viewportCount, + reinterpret_cast( pViewportWScalings ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, ArrayProxy viewportWScalings, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, + ArrayProxy viewportWScalings, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportWScalings.size() , reinterpret_cast( viewportWScalings.data() ) ); + d.vkCmdSetViewportWScalingNV( m_commandBuffer, + firstViewport, + viewportWScalings.size(), + reinterpret_cast( viewportWScalings.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pCallableShaderBindingTable, VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( + const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR * pRaygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR * pMissShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR * pHitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR * pCallableShaderBindingTable, + VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdTraceRaysIndirectKHR( m_commandBuffer, reinterpret_cast( pRaygenShaderBindingTable ), reinterpret_cast( pMissShaderBindingTable ), reinterpret_cast( pHitShaderBindingTable ), reinterpret_cast( pCallableShaderBindingTable ), static_cast( buffer ), static_cast( offset ) ); + d.vkCmdTraceRaysIndirectKHR( m_commandBuffer, + reinterpret_cast( pRaygenShaderBindingTable ), + reinterpret_cast( pMissShaderBindingTable ), + reinterpret_cast( pHitShaderBindingTable ), + reinterpret_cast( pCallableShaderBindingTable ), + static_cast( buffer ), + static_cast( offset ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const StridedBufferRegionKHR & raygenShaderBindingTable, const StridedBufferRegionKHR & missShaderBindingTable, const StridedBufferRegionKHR & hitShaderBindingTable, const StridedBufferRegionKHR & callableShaderBindingTable, VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const StridedBufferRegionKHR & raygenShaderBindingTable, + const StridedBufferRegionKHR & missShaderBindingTable, + const StridedBufferRegionKHR & hitShaderBindingTable, + const StridedBufferRegionKHR & callableShaderBindingTable, + VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdTraceRaysIndirectKHR( m_commandBuffer, reinterpret_cast( &raygenShaderBindingTable ), reinterpret_cast( &missShaderBindingTable ), reinterpret_cast( &hitShaderBindingTable ), reinterpret_cast( &callableShaderBindingTable ), static_cast( buffer ), static_cast( offset ) ); + d.vkCmdTraceRaysIndirectKHR( m_commandBuffer, + reinterpret_cast( &raygenShaderBindingTable ), + reinterpret_cast( &missShaderBindingTable ), + reinterpret_cast( &hitShaderBindingTable ), + reinterpret_cast( &callableShaderBindingTable ), + static_cast( buffer ), + static_cast( offset ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR * pRaygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR * pMissShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR * pHitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR * pCallableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdTraceRaysKHR( m_commandBuffer, reinterpret_cast( pRaygenShaderBindingTable ), reinterpret_cast( pMissShaderBindingTable ), reinterpret_cast( pHitShaderBindingTable ), reinterpret_cast( pCallableShaderBindingTable ), width, height, depth ); + d.vkCmdTraceRaysKHR( m_commandBuffer, + reinterpret_cast( pRaygenShaderBindingTable ), + reinterpret_cast( pMissShaderBindingTable ), + reinterpret_cast( pHitShaderBindingTable ), + reinterpret_cast( pCallableShaderBindingTable ), + width, + height, + depth ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const StridedBufferRegionKHR & raygenShaderBindingTable, const StridedBufferRegionKHR & missShaderBindingTable, const StridedBufferRegionKHR & hitShaderBindingTable, const StridedBufferRegionKHR & callableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const StridedBufferRegionKHR & raygenShaderBindingTable, + const StridedBufferRegionKHR & missShaderBindingTable, + const StridedBufferRegionKHR & hitShaderBindingTable, + const StridedBufferRegionKHR & callableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdTraceRaysKHR( m_commandBuffer, reinterpret_cast( &raygenShaderBindingTable ), reinterpret_cast( &missShaderBindingTable ), reinterpret_cast( &hitShaderBindingTable ), reinterpret_cast( &callableShaderBindingTable ), width, height, depth ); + d.vkCmdTraceRaysKHR( m_commandBuffer, + reinterpret_cast( &raygenShaderBindingTable ), + reinterpret_cast( &missShaderBindingTable ), + reinterpret_cast( &hitShaderBindingTable ), + reinterpret_cast( &callableShaderBindingTable ), + width, + height, + depth ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, + VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, + VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, + VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdTraceRaysNV( m_commandBuffer, static_cast( raygenShaderBindingTableBuffer ), static_cast( raygenShaderBindingOffset ), static_cast( missShaderBindingTableBuffer ), static_cast( missShaderBindingOffset ), static_cast( missShaderBindingStride ), static_cast( hitShaderBindingTableBuffer ), static_cast( hitShaderBindingOffset ), static_cast( hitShaderBindingStride ), static_cast( callableShaderBindingTableBuffer ), static_cast( callableShaderBindingOffset ), static_cast( callableShaderBindingStride ), width, height, depth ); + d.vkCmdTraceRaysNV( m_commandBuffer, + static_cast( raygenShaderBindingTableBuffer ), + static_cast( raygenShaderBindingOffset ), + static_cast( missShaderBindingTableBuffer ), + static_cast( missShaderBindingOffset ), + static_cast( missShaderBindingStride ), + static_cast( hitShaderBindingTableBuffer ), + static_cast( hitShaderBindingOffset ), + static_cast( hitShaderBindingStride ), + static_cast( callableShaderBindingTableBuffer ), + static_cast( callableShaderBindingOffset ), + static_cast( callableShaderBindingStride ), + width, + height, + depth ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, + VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, + VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, + VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdTraceRaysNV( m_commandBuffer, static_cast( raygenShaderBindingTableBuffer ), static_cast( raygenShaderBindingOffset ), static_cast( missShaderBindingTableBuffer ), static_cast( missShaderBindingOffset ), static_cast( missShaderBindingStride ), static_cast( hitShaderBindingTableBuffer ), static_cast( hitShaderBindingOffset ), static_cast( hitShaderBindingStride ), static_cast( callableShaderBindingTableBuffer ), static_cast( callableShaderBindingOffset ), static_cast( callableShaderBindingStride ), width, height, depth ); + d.vkCmdTraceRaysNV( m_commandBuffer, + static_cast( raygenShaderBindingTableBuffer ), + static_cast( raygenShaderBindingOffset ), + static_cast( missShaderBindingTableBuffer ), + static_cast( missShaderBindingOffset ), + static_cast( missShaderBindingStride ), + static_cast( hitShaderBindingTableBuffer ), + static_cast( hitShaderBindingOffset ), + static_cast( hitShaderBindingStride ), + static_cast( callableShaderBindingTableBuffer ), + static_cast( callableShaderBindingOffset ), + static_cast( callableShaderBindingStride ), + width, + height, + depth ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize dataSize, const void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize dataSize, + const void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdUpdateBuffer( m_commandBuffer, static_cast( dstBuffer ), static_cast( dstOffset ), static_cast( dataSize ), pData ); + d.vkCmdUpdateBuffer( m_commandBuffer, + static_cast( dstBuffer ), + static_cast( dstOffset ), + static_cast( dataSize ), + pData ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, ArrayProxy data, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + ArrayProxy data, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdUpdateBuffer( m_commandBuffer, static_cast( dstBuffer ), static_cast( dstOffset ), data.size() * sizeof( T ) , reinterpret_cast( data.data() ) ); + d.vkCmdUpdateBuffer( m_commandBuffer, + static_cast( dstBuffer ), + static_cast( dstOffset ), + data.size() * sizeof( T ), + reinterpret_cast( data.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::waitEvents( uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event* pEvents, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + CommandBuffer::waitEvents( uint32_t eventCount, + const VULKAN_HPP_NAMESPACE::Event * pEvents, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + uint32_t memoryBarrierCount, + const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdWaitEvents( m_commandBuffer, eventCount, reinterpret_cast( pEvents ), static_cast( srcStageMask ), static_cast( dstStageMask ), memoryBarrierCount, reinterpret_cast( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast( pImageMemoryBarriers ) ); + d.vkCmdWaitEvents( m_commandBuffer, + eventCount, + reinterpret_cast( pEvents ), + static_cast( srcStageMask ), + static_cast( dstStageMask ), + memoryBarrierCount, + reinterpret_cast( pMemoryBarriers ), + bufferMemoryBarrierCount, + reinterpret_cast( pBufferMemoryBarriers ), + imageMemoryBarrierCount, + reinterpret_cast( pImageMemoryBarriers ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::waitEvents( ArrayProxy events, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, ArrayProxy memoryBarriers, ArrayProxy bufferMemoryBarriers, ArrayProxy imageMemoryBarriers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + CommandBuffer::waitEvents( ArrayProxy events, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + ArrayProxy memoryBarriers, + ArrayProxy bufferMemoryBarriers, + ArrayProxy imageMemoryBarriers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdWaitEvents( m_commandBuffer, events.size() , reinterpret_cast( events.data() ), static_cast( srcStageMask ), static_cast( dstStageMask ), memoryBarriers.size() , reinterpret_cast( memoryBarriers.data() ), bufferMemoryBarriers.size() , reinterpret_cast( bufferMemoryBarriers.data() ), imageMemoryBarriers.size() , reinterpret_cast( imageMemoryBarriers.data() ) ); + d.vkCmdWaitEvents( m_commandBuffer, + events.size(), + reinterpret_cast( events.data() ), + static_cast( srcStageMask ), + static_cast( dstStageMask ), + memoryBarriers.size(), + reinterpret_cast( memoryBarriers.data() ), + bufferMemoryBarriers.size(), + reinterpret_cast( bufferMemoryBarriers.data() ), + imageMemoryBarriers.size(), + reinterpret_cast( imageMemoryBarriers.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( + uint32_t accelerationStructureCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer, accelerationStructureCount, reinterpret_cast( pAccelerationStructures ), static_cast( queryType ), static_cast( queryPool ), firstQuery ); + d.vkCmdWriteAccelerationStructuresPropertiesKHR( + m_commandBuffer, + accelerationStructureCount, + reinterpret_cast( pAccelerationStructures ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( ArrayProxy accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( + ArrayProxy accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer, accelerationStructures.size() , reinterpret_cast( accelerationStructures.data() ), static_cast( queryType ), static_cast( queryPool ), firstQuery ); + d.vkCmdWriteAccelerationStructuresPropertiesKHR( + m_commandBuffer, + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( + uint32_t accelerationStructureCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, accelerationStructureCount, reinterpret_cast( pAccelerationStructures ), static_cast( queryType ), static_cast( queryPool ), firstQuery ); + d.vkCmdWriteAccelerationStructuresPropertiesNV( + m_commandBuffer, + accelerationStructureCount, + reinterpret_cast( pAccelerationStructures ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( ArrayProxy accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( + ArrayProxy accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, accelerationStructures.size() , reinterpret_cast( accelerationStructures.data() ), static_cast( queryType ), static_cast( queryPool ), firstQuery ); + d.vkCmdWriteAccelerationStructuresPropertiesNV( + m_commandBuffer, + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdWriteBufferMarkerAMD( m_commandBuffer, static_cast( pipelineStage ), static_cast( dstBuffer ), static_cast( dstOffset ), marker ); + d.vkCmdWriteBufferMarkerAMD( m_commandBuffer, + static_cast( pipelineStage ), + static_cast( dstBuffer ), + static_cast( dstOffset ), + marker ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdWriteBufferMarkerAMD( m_commandBuffer, static_cast( pipelineStage ), static_cast( dstBuffer ), static_cast( dstOffset ), marker ); + d.vkCmdWriteBufferMarkerAMD( m_commandBuffer, + static_cast( pipelineStage ), + static_cast( dstBuffer ), + static_cast( dstOffset ), + marker ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdWriteTimestamp( m_commandBuffer, static_cast( pipelineStage ), static_cast( queryPool ), query ); + d.vkCmdWriteTimestamp( m_commandBuffer, + static_cast( pipelineStage ), + static_cast( queryPool ), + query ); } #else - template - VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdWriteTimestamp( m_commandBuffer, static_cast( pipelineStage ), static_cast( queryPool ), query ); + d.vkCmdWriteTimestamp( m_commandBuffer, + static_cast( pipelineStage ), + static_cast( queryPool ), + query ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result CommandBuffer::end(Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result CommandBuffer::end( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkEndCommandBuffer( m_commandBuffer ) ); } #else - template - VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::end(Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::end( Dispatch const & d ) const { Result result = static_cast( d.vkEndCommandBuffer( m_commandBuffer ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::end" ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkResetCommandBuffer( m_commandBuffer, static_cast( flags ) ) ); + return static_cast( + d.vkResetCommandBuffer( m_commandBuffer, static_cast( flags ) ) ); } #else - template - VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d ) const { - Result result = static_cast( d.vkResetCommandBuffer( m_commandBuffer, static_cast( flags ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::reset" ); + Result result = + static_cast( d.vkResetCommandBuffer( m_commandBuffer, static_cast( flags ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_USE_PLATFORM_WIN32_KHR -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d) const VULKAN_HPP_NOEXCEPT +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); + return static_cast( + d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); } -#else - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const +# else + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const { - Result result = static_cast( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::acquireFullScreenExclusiveModeEXT" ); + Result result = static_cast( + d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireFullScreenExclusiveModeEXT" ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template - VULKAN_HPP_INLINE Result Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR * pAcquireInfo, + uint32_t * pImageIndex, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast( pAcquireInfo ), pImageIndex ) ); + return static_cast( d.vkAcquireNextImage2KHR( + m_device, reinterpret_cast( pAcquireInfo ), pImageIndex ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValue Device::acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo, Dispatch const &d ) const + template + VULKAN_HPP_INLINE ResultValue Device::acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo, + Dispatch const & d ) const { uint32_t imageIndex; - Result result = static_cast( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast( &acquireInfo ), &imageIndex ) ); - return createResultValue( result, imageIndex, VULKAN_HPP_NAMESPACE_STRING"::Device::acquireNextImage2KHR", { Result::eSuccess, Result::eTimeout, Result::eNotReady, Result::eSuboptimalKHR } ); + Result result = static_cast( d.vkAcquireNextImage2KHR( + m_device, reinterpret_cast( &acquireInfo ), &imageIndex ) ); + return createResultValue( result, + imageIndex, + VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImage2KHR", + { Result::eSuccess, Result::eTimeout, Result::eNotReady, Result::eSuboptimalKHR } ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, uint32_t* pImageIndex, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint64_t timeout, + VULKAN_HPP_NAMESPACE::Semaphore semaphore, + VULKAN_HPP_NAMESPACE::Fence fence, + uint32_t * pImageIndex, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkAcquireNextImageKHR( m_device, static_cast( swapchain ), timeout, static_cast( semaphore ), static_cast( fence ), pImageIndex ) ); + return static_cast( d.vkAcquireNextImageKHR( m_device, + static_cast( swapchain ), + timeout, + static_cast( semaphore ), + static_cast( fence ), + pImageIndex ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValue Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d ) const + template + VULKAN_HPP_INLINE ResultValue Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint64_t timeout, + VULKAN_HPP_NAMESPACE::Semaphore semaphore, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const { uint32_t imageIndex; - Result result = static_cast( d.vkAcquireNextImageKHR( m_device, static_cast( swapchain ), timeout, static_cast( semaphore ), static_cast( fence ), &imageIndex ) ); - return createResultValue( result, imageIndex, VULKAN_HPP_NAMESPACE_STRING"::Device::acquireNextImageKHR", { Result::eSuccess, Result::eTimeout, Result::eNotReady, Result::eSuboptimalKHR } ); + Result result = static_cast( d.vkAcquireNextImageKHR( m_device, + static_cast( swapchain ), + timeout, + static_cast( semaphore ), + static_cast( fence ), + &imageIndex ) ); + return createResultValue( result, + imageIndex, + VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImageKHR", + { Result::eSuccess, Result::eTimeout, Result::eNotReady, Result::eSuboptimalKHR } ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL* pConfiguration, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::acquirePerformanceConfigurationINTEL( + const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL * pAcquireInfo, + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL * pConfiguration, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkAcquirePerformanceConfigurationINTEL( m_device, reinterpret_cast( pAcquireInfo ), reinterpret_cast( pConfiguration ) ) ); + return static_cast( d.vkAcquirePerformanceConfigurationINTEL( + m_device, + reinterpret_cast( pAcquireInfo ), + reinterpret_cast( pConfiguration ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::acquirePerformanceConfigurationINTEL( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::acquirePerformanceConfigurationINTEL( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration; - Result result = static_cast( d.vkAcquirePerformanceConfigurationINTEL( m_device, reinterpret_cast( &acquireInfo ), reinterpret_cast( &configuration ) ) ); - return createResultValue( result, configuration, VULKAN_HPP_NAMESPACE_STRING"::Device::acquirePerformanceConfigurationINTEL" ); + Result result = static_cast( d.vkAcquirePerformanceConfigurationINTEL( + m_device, + reinterpret_cast( &acquireInfo ), + reinterpret_cast( &configuration ) ) ); + return createResultValue( + result, configuration, VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTEL" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::acquireProfilingLockKHR( + const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast( pInfo ) ) ); + return static_cast( + d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::acquireProfilingLockKHR( const AcquireProfilingLockInfoKHR & info, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::acquireProfilingLockKHR( const AcquireProfilingLockInfoKHR & info, Dispatch const & d ) const { - Result result = static_cast( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast( &info ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::acquireProfilingLockKHR" ); + Result result = static_cast( + d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast( &info ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo* pAllocateInfo, VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo * pAllocateInfo, + VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkAllocateCommandBuffers( m_device, reinterpret_cast( pAllocateInfo ), reinterpret_cast( pCommandBuffers ) ) ); + return static_cast( + d.vkAllocateCommandBuffers( m_device, + reinterpret_cast( pAllocateInfo ), + reinterpret_cast( pCommandBuffers ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const { - std::vector commandBuffers( allocateInfo.commandBufferCount ); - Result result = static_cast( d.vkAllocateCommandBuffers( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( commandBuffers.data() ) ) ); - return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateCommandBuffers" ); + std::vector commandBuffers( allocateInfo.commandBufferCount ); + Result result = static_cast( + d.vkAllocateCommandBuffers( m_device, + reinterpret_cast( &allocateInfo ), + reinterpret_cast( commandBuffers.data() ) ) ); + return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, + Allocator const & vectorAllocator, + Dispatch const & d ) const { - std::vector commandBuffers( allocateInfo.commandBufferCount, vectorAllocator ); - Result result = static_cast( d.vkAllocateCommandBuffers( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( commandBuffers.data() ) ) ); - return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateCommandBuffers" ); + std::vector commandBuffers( allocateInfo.commandBufferCount, vectorAllocator ); + Result result = static_cast( + d.vkAllocateCommandBuffers( m_device, + reinterpret_cast( &allocateInfo ), + reinterpret_cast( commandBuffers.data() ) ) ); + return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType, Allocator>>::type + Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const { std::vector, Allocator> uniqueCommandBuffers; - std::vector commandBuffers( allocateInfo.commandBufferCount ); - Result result = static_cast( d.vkAllocateCommandBuffers( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast(commandBuffers.data()) ) ); + std::vector commandBuffers( allocateInfo.commandBufferCount ); + Result result = static_cast( + d.vkAllocateCommandBuffers( m_device, + reinterpret_cast( &allocateInfo ), + reinterpret_cast( commandBuffers.data() ) ) ); if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) { uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount ); - PoolFree deleter( *this, allocateInfo.commandPool, d ); - for ( size_t i=0 ; i deleter( *this, allocateInfo.commandPool, d ); + for ( size_t i = 0; i < allocateInfo.commandBufferCount; i++ ) { uniqueCommandBuffers.push_back( UniqueHandle( commandBuffers[i], deleter ) ); } } - return createResultValue( result, uniqueCommandBuffers, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); + return createResultValue( + result, uniqueCommandBuffers, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); } - template - VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType, Allocator>>::type + Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, + Allocator const & vectorAllocator, + Dispatch const & d ) const { std::vector, Allocator> uniqueCommandBuffers( vectorAllocator ); - std::vector commandBuffers( allocateInfo.commandBufferCount ); - Result result = static_cast( d.vkAllocateCommandBuffers( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast(commandBuffers.data()) ) ); + std::vector commandBuffers( allocateInfo.commandBufferCount ); + Result result = static_cast( + d.vkAllocateCommandBuffers( m_device, + reinterpret_cast( &allocateInfo ), + reinterpret_cast( commandBuffers.data() ) ) ); if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) { uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount ); - PoolFree deleter( *this, allocateInfo.commandPool, d ); - for ( size_t i=0 ; i deleter( *this, allocateInfo.commandPool, d ); + for ( size_t i = 0; i < allocateInfo.commandBufferCount; i++ ) { uniqueCommandBuffers.push_back( UniqueHandle( commandBuffers[i], deleter ) ); } } - return createResultValue( result, uniqueCommandBuffers, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); + return createResultValue( + result, uniqueCommandBuffers, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo* pAllocateInfo, VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo * pAllocateInfo, + VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkAllocateDescriptorSets( m_device, reinterpret_cast( pAllocateInfo ), reinterpret_cast( pDescriptorSets ) ) ); + return static_cast( + d.vkAllocateDescriptorSets( m_device, + reinterpret_cast( pAllocateInfo ), + reinterpret_cast( pDescriptorSets ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const { - std::vector descriptorSets( allocateInfo.descriptorSetCount ); - Result result = static_cast( d.vkAllocateDescriptorSets( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( descriptorSets.data() ) ) ); - return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateDescriptorSets" ); + std::vector descriptorSets( allocateInfo.descriptorSetCount ); + Result result = static_cast( + d.vkAllocateDescriptorSets( m_device, + reinterpret_cast( &allocateInfo ), + reinterpret_cast( descriptorSets.data() ) ) ); + return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, + Allocator const & vectorAllocator, + Dispatch const & d ) const { - std::vector descriptorSets( allocateInfo.descriptorSetCount, vectorAllocator ); - Result result = static_cast( d.vkAllocateDescriptorSets( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( descriptorSets.data() ) ) ); - return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateDescriptorSets" ); + std::vector descriptorSets( allocateInfo.descriptorSetCount, vectorAllocator ); + Result result = static_cast( + d.vkAllocateDescriptorSets( m_device, + reinterpret_cast( &allocateInfo ), + reinterpret_cast( descriptorSets.data() ) ) ); + return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType, Allocator>>::type + Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const { std::vector, Allocator> uniqueDescriptorSets; - std::vector descriptorSets( allocateInfo.descriptorSetCount ); - Result result = static_cast( d.vkAllocateDescriptorSets( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast(descriptorSets.data()) ) ); + std::vector descriptorSets( allocateInfo.descriptorSetCount ); + Result result = static_cast( + d.vkAllocateDescriptorSets( m_device, + reinterpret_cast( &allocateInfo ), + reinterpret_cast( descriptorSets.data() ) ) ); if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) { uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount ); - PoolFree deleter( *this, allocateInfo.descriptorPool, d ); - for ( size_t i=0 ; i deleter( *this, allocateInfo.descriptorPool, d ); + for ( size_t i = 0; i < allocateInfo.descriptorSetCount; i++ ) { uniqueDescriptorSets.push_back( UniqueHandle( descriptorSets[i], deleter ) ); } } - return createResultValue( result, uniqueDescriptorSets, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); + return createResultValue( + result, uniqueDescriptorSets, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); } - template - VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType, Allocator>>::type + Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, + Allocator const & vectorAllocator, + Dispatch const & d ) const { std::vector, Allocator> uniqueDescriptorSets( vectorAllocator ); - std::vector descriptorSets( allocateInfo.descriptorSetCount ); - Result result = static_cast( d.vkAllocateDescriptorSets( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast(descriptorSets.data()) ) ); + std::vector descriptorSets( allocateInfo.descriptorSetCount ); + Result result = static_cast( + d.vkAllocateDescriptorSets( m_device, + reinterpret_cast( &allocateInfo ), + reinterpret_cast( descriptorSets.data() ) ) ); if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) { uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount ); - PoolFree deleter( *this, allocateInfo.descriptorPool, d ); - for ( size_t i=0 ; i deleter( *this, allocateInfo.descriptorPool, d ); + for ( size_t i = 0; i < allocateInfo.descriptorSetCount; i++ ) { uniqueDescriptorSets.push_back( UniqueHandle( descriptorSets[i], deleter ) ); } } - return createResultValue( result, uniqueDescriptorSets, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); + return createResultValue( + result, uniqueDescriptorSets, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo* pAllocateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DeviceMemory* pMemory, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo * pAllocateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DeviceMemory * pMemory, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkAllocateMemory( m_device, reinterpret_cast( pAllocateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pMemory ) ) ); + return static_cast( d.vkAllocateMemory( m_device, + reinterpret_cast( pAllocateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pMemory ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::allocateMemory( const MemoryAllocateInfo & allocateInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::allocateMemory( + const MemoryAllocateInfo & allocateInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::DeviceMemory memory; - Result result = static_cast( d.vkAllocateMemory( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &memory ) ) ); - return createResultValue( result, memory, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateMemory" ); + Result result = static_cast( d.vkAllocateMemory( + m_device, + reinterpret_cast( &allocateInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &memory ) ) ); + return createResultValue( result, memory, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemory" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateMemoryUnique( + const MemoryAllocateInfo & allocateInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::DeviceMemory memory; - Result result = static_cast( d.vkAllocateMemory( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &memory ) ) ); + Result result = static_cast( d.vkAllocateMemory( + m_device, + reinterpret_cast( &allocateInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &memory ) ) ); - ObjectFree deleter( *this, allocator, d ); - return createResultValue( result, memory, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateMemoryUnique", deleter ); + ObjectFree deleter( *this, allocator, d ); + return createResultValue( + result, memory, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemoryUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryKHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoKHR* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryKHR( + uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoKHR * pBindInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkBindAccelerationStructureMemoryKHR( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + return static_cast( d.vkBindAccelerationStructureMemoryKHR( + m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::bindAccelerationStructureMemoryKHR( ArrayProxy bindInfos, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::bindAccelerationStructureMemoryKHR( + ArrayProxy bindInfos, Dispatch const & d ) const { - Result result = static_cast( d.vkBindAccelerationStructureMemoryKHR( m_device, bindInfos.size() , reinterpret_cast( bindInfos.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindAccelerationStructureMemoryKHR" ); + Result result = static_cast( d.vkBindAccelerationStructureMemoryKHR( + m_device, + bindInfos.size(), + reinterpret_cast( bindInfos.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindAccelerationStructureMemoryKHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryNV( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoKHR* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryNV( + uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoKHR * pBindInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkBindAccelerationStructureMemoryNV( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + return static_cast( d.vkBindAccelerationStructureMemoryNV( + m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::bindAccelerationStructureMemoryNV( ArrayProxy bindInfos, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::bindAccelerationStructureMemoryNV( + ArrayProxy bindInfos, Dispatch const & d ) const { - Result result = static_cast( d.vkBindAccelerationStructureMemoryNV( m_device, bindInfos.size() , reinterpret_cast( bindInfos.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindAccelerationStructureMemoryNV" ); + Result result = static_cast( d.vkBindAccelerationStructureMemoryNV( + m_device, + bindInfos.size(), + reinterpret_cast( bindInfos.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindAccelerationStructureMemoryNV" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkBindBufferMemory( m_device, static_cast( buffer ), static_cast( memory ), static_cast( memoryOffset ) ) ); + return static_cast( d.vkBindBufferMemory( m_device, + static_cast( buffer ), + static_cast( memory ), + static_cast( memoryOffset ) ) ); } #else - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d ) const { - Result result = static_cast( d.vkBindBufferMemory( m_device, static_cast( buffer ), static_cast( memory ), static_cast( memoryOffset ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindBufferMemory" ); + Result result = static_cast( d.vkBindBufferMemory( m_device, + static_cast( buffer ), + static_cast( memory ), + static_cast( memoryOffset ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::bindBufferMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::bindBufferMemory2( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkBindBufferMemory2( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + return static_cast( d.vkBindBufferMemory2( + m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::bindBufferMemory2( ArrayProxy bindInfos, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::bindBufferMemory2( ArrayProxy bindInfos, + Dispatch const & d ) const { - Result result = static_cast( d.vkBindBufferMemory2( m_device, bindInfos.size() , reinterpret_cast( bindInfos.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindBufferMemory2" ); + Result result = static_cast( d.vkBindBufferMemory2( + m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::bindBufferMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::bindBufferMemory2KHR( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkBindBufferMemory2KHR( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + return static_cast( d.vkBindBufferMemory2KHR( + m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::bindBufferMemory2KHR( ArrayProxy bindInfos, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::bindBufferMemory2KHR( ArrayProxy bindInfos, + Dispatch const & d ) const { - Result result = static_cast( d.vkBindBufferMemory2KHR( m_device, bindInfos.size() , reinterpret_cast( bindInfos.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindBufferMemory2KHR" ); + Result result = static_cast( d.vkBindBufferMemory2KHR( + m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2KHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkBindImageMemory( m_device, static_cast( image ), static_cast( memory ), static_cast( memoryOffset ) ) ); + return static_cast( d.vkBindImageMemory( m_device, + static_cast( image ), + static_cast( memory ), + static_cast( memoryOffset ) ) ); } #else - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d ) const { - Result result = static_cast( d.vkBindImageMemory( m_device, static_cast( image ), static_cast( memory ), static_cast( memoryOffset ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindImageMemory" ); + Result result = static_cast( d.vkBindImageMemory( m_device, + static_cast( image ), + static_cast( memory ), + static_cast( memoryOffset ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::bindImageMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::bindImageMemory2( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkBindImageMemory2( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + return static_cast( + d.vkBindImageMemory2( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::bindImageMemory2( ArrayProxy bindInfos, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::bindImageMemory2( ArrayProxy bindInfos, + Dispatch const & d ) const { - Result result = static_cast( d.vkBindImageMemory2( m_device, bindInfos.size() , reinterpret_cast( bindInfos.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindImageMemory2" ); + Result result = static_cast( d.vkBindImageMemory2( + m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::bindImageMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::bindImageMemory2KHR( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkBindImageMemory2KHR( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + return static_cast( d.vkBindImageMemory2KHR( + m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::bindImageMemory2KHR( ArrayProxy bindInfos, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::bindImageMemory2KHR( ArrayProxy bindInfos, + Dispatch const & d ) const { - Result result = static_cast( d.vkBindImageMemory2KHR( m_device, bindInfos.size() , reinterpret_cast( bindInfos.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindImageMemory2KHR" ); + Result result = static_cast( d.vkBindImageMemory2KHR( + m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2KHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE Result Device::buildAccelerationStructureKHR( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfos, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildOffsetInfoKHR* const* ppOffsetInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::buildAccelerationStructureKHR( + uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildOffsetInfoKHR * const * ppOffsetInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkBuildAccelerationStructureKHR( m_device, infoCount, reinterpret_cast( pInfos ), reinterpret_cast( ppOffsetInfos ) ) ); + return static_cast( d.vkBuildAccelerationStructureKHR( + m_device, + infoCount, + reinterpret_cast( pInfos ), + reinterpret_cast( ppOffsetInfos ) ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::buildAccelerationStructureKHR( ArrayProxy infos, ArrayProxy pOffsetInfos, Dispatch const &d ) const +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::buildAccelerationStructureKHR( + ArrayProxy infos, + ArrayProxy pOffsetInfos, + Dispatch const & d ) const { -#ifdef VULKAN_HPP_NO_EXCEPTIONS +# ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_ASSERT( infos.size() == pOffsetInfos.size() ); -#else +# else if ( infos.size() != pOffsetInfos.size() ) { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkDevice::buildAccelerationStructureKHR: infos.size() != pOffsetInfos.size()" ); + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::VkDevice::buildAccelerationStructureKHR: infos.size() != pOffsetInfos.size()" ); } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - Result result = static_cast( d.vkBuildAccelerationStructureKHR( m_device, infos.size() , reinterpret_cast( infos.data() ), reinterpret_cast( pOffsetInfos.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::buildAccelerationStructureKHR", { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR } ); +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + Result result = static_cast( d.vkBuildAccelerationStructureKHR( + m_device, + infos.size(), + reinterpret_cast( infos.data() ), + reinterpret_cast( pOffsetInfos.data() ) ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructureKHR", + { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR } ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t shader, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCompileDeferredNV( m_device, static_cast( pipeline ), shader ) ); } #else - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d ) const { - Result result = static_cast( d.vkCompileDeferredNV( m_device, static_cast( pipeline ), shader ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::compileDeferredNV" ); + Result result = + static_cast( d.vkCompileDeferredNV( m_device, static_cast( pipeline ), shader ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::compileDeferredNV" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE Result Device::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::copyAccelerationStructureKHR( + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCopyAccelerationStructureKHR( m_device, reinterpret_cast( pInfo ) ) ); + return static_cast( d.vkCopyAccelerationStructureKHR( + m_device, reinterpret_cast( pInfo ) ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info, Dispatch const &d ) const +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info, + Dispatch const & d ) const { - Result result = static_cast( d.vkCopyAccelerationStructureKHR( m_device, reinterpret_cast( &info ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::copyAccelerationStructureKHR", { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR } ); + Result result = static_cast( d.vkCopyAccelerationStructureKHR( + m_device, reinterpret_cast( &info ) ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureKHR", + { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR } ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE Result Device::copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::copyAccelerationStructureToMemoryKHR( + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCopyAccelerationStructureToMemoryKHR( m_device, reinterpret_cast( pInfo ) ) ); + return static_cast( d.vkCopyAccelerationStructureToMemoryKHR( + m_device, reinterpret_cast( pInfo ) ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info, Dispatch const &d ) const +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::copyAccelerationStructureToMemoryKHR( + const CopyAccelerationStructureToMemoryInfoKHR & info, Dispatch const & d ) const { - Result result = static_cast( d.vkCopyAccelerationStructureToMemoryKHR( m_device, reinterpret_cast( &info ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::copyAccelerationStructureToMemoryKHR", { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR } ); + Result result = static_cast( d.vkCopyAccelerationStructureToMemoryKHR( + m_device, reinterpret_cast( &info ) ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureToMemoryKHR", + { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR } ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE Result Device::copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::copyMemoryToAccelerationStructureKHR( + const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCopyMemoryToAccelerationStructureKHR( m_device, reinterpret_cast( pInfo ) ) ); + return static_cast( d.vkCopyMemoryToAccelerationStructureKHR( + m_device, reinterpret_cast( pInfo ) ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info, Dispatch const &d ) const +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::copyMemoryToAccelerationStructureKHR( + const CopyMemoryToAccelerationStructureInfoKHR & info, Dispatch const & d ) const { - Result result = static_cast( d.vkCopyMemoryToAccelerationStructureKHR( m_device, reinterpret_cast( &info ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::copyMemoryToAccelerationStructureKHR", { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR } ); + Result result = static_cast( d.vkCopyMemoryToAccelerationStructureKHR( + m_device, reinterpret_cast( &info ) ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToAccelerationStructureKHR", + { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR } ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE Result Device::createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructure, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::createAccelerationStructureKHR( + const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructure, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateAccelerationStructureKHR( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pAccelerationStructure ) ) ); + return static_cast( + d.vkCreateAccelerationStructureKHR( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pAccelerationStructure ) ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createAccelerationStructureKHR( const AccelerationStructureCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::createAccelerationStructureKHR( const AccelerationStructureCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure; - Result result = static_cast( d.vkCreateAccelerationStructureKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &accelerationStructure ) ) ); - return createResultValue( result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING"::Device::createAccelerationStructureKHR" ); + Result result = static_cast( d.vkCreateAccelerationStructureKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &accelerationStructure ) ) ); + return createResultValue( + result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHR" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createAccelerationStructureKHRUnique( const AccelerationStructureCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createAccelerationStructureKHRUnique( const AccelerationStructureCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure; - Result result = static_cast( d.vkCreateAccelerationStructureKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &accelerationStructure ) ) ); + Result result = static_cast( d.vkCreateAccelerationStructureKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &accelerationStructure ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING"::Device::createAccelerationStructureKHRUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, + accelerationStructure, + VULKAN_HPP_NAMESPACE_STRING + "::Device::createAccelerationStructureKHRUnique", + deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template - VULKAN_HPP_INLINE Result Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructure, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructure, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateAccelerationStructureNV( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pAccelerationStructure ) ) ); + return static_cast( + d.vkCreateAccelerationStructureNV( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pAccelerationStructure ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createAccelerationStructureNV( const AccelerationStructureCreateInfoNV & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::createAccelerationStructureNV( const AccelerationStructureCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure; - Result result = static_cast( d.vkCreateAccelerationStructureNV( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &accelerationStructure ) ) ); - return createResultValue( result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING"::Device::createAccelerationStructureNV" ); + Result result = static_cast( d.vkCreateAccelerationStructureNV( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &accelerationStructure ) ) ); + return createResultValue( + result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNV" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createAccelerationStructureNVUnique( const AccelerationStructureCreateInfoNV & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createAccelerationStructureNVUnique( const AccelerationStructureCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure; - Result result = static_cast( d.vkCreateAccelerationStructureNV( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &accelerationStructure ) ) ); + Result result = static_cast( d.vkCreateAccelerationStructureNV( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &accelerationStructure ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING"::Device::createAccelerationStructureNVUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, + accelerationStructure, + VULKAN_HPP_NAMESPACE_STRING + "::Device::createAccelerationStructureNVUnique", + deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Buffer* pBuffer, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Buffer * pBuffer, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateBuffer( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pBuffer ) ) ); + return static_cast( d.vkCreateBuffer( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pBuffer ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createBuffer( const BufferCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createBuffer( + const BufferCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::Buffer buffer; - Result result = static_cast( d.vkCreateBuffer( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &buffer ) ) ); - return createResultValue( result, buffer, VULKAN_HPP_NAMESPACE_STRING"::Device::createBuffer" ); + Result result = static_cast( d.vkCreateBuffer( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &buffer ) ) ); + return createResultValue( result, buffer, VULKAN_HPP_NAMESPACE_STRING "::Device::createBuffer" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createBufferUnique( const BufferCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createBufferUnique( + const BufferCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::Buffer buffer; - Result result = static_cast( d.vkCreateBuffer( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &buffer ) ) ); + Result result = static_cast( d.vkCreateBuffer( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &buffer ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, buffer, VULKAN_HPP_NAMESPACE_STRING"::Device::createBufferUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, buffer, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::BufferView* pView, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::BufferView * pView, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateBufferView( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pView ) ) ); + return static_cast( d.vkCreateBufferView( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pView ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createBufferView( const BufferViewCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createBufferView( + const BufferViewCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::BufferView view; - Result result = static_cast( d.vkCreateBufferView( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &view ) ) ); - return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING"::Device::createBufferView" ); + Result result = static_cast( d.vkCreateBufferView( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &view ) ) ); + return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferView" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createBufferViewUnique( const BufferViewCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createBufferViewUnique( + const BufferViewCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::BufferView view; - Result result = static_cast( d.vkCreateBufferView( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &view ) ) ); + Result result = static_cast( d.vkCreateBufferView( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &view ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING"::Device::createBufferViewUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, view, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferViewUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::CommandPool* pCommandPool, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CommandPool * pCommandPool, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateCommandPool( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pCommandPool ) ) ); + return static_cast( d.vkCreateCommandPool( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pCommandPool ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createCommandPool( const CommandPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createCommandPool( + const CommandPoolCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::CommandPool commandPool; - Result result = static_cast( d.vkCreateCommandPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &commandPool ) ) ); - return createResultValue( result, commandPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createCommandPool" ); + Result result = static_cast( d.vkCreateCommandPool( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &commandPool ) ) ); + return createResultValue( result, commandPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPool" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createCommandPoolUnique( const CommandPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createCommandPoolUnique( + const CommandPoolCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::CommandPool commandPool; - Result result = static_cast( d.vkCreateCommandPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &commandPool ) ) ); + Result result = static_cast( d.vkCreateCommandPool( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &commandPool ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, commandPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createCommandPoolUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, commandPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPoolUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), createInfoCount, reinterpret_cast( pCreateInfos ), reinterpret_cast( pAllocator ), reinterpret_cast( pPipelines ) ) ); + return static_cast( + d.vkCreateComputePipelines( m_device, + static_cast( pipelineCache ), + createInfoCount, + reinterpret_cast( pCreateInfos ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPipelines ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValue> Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE ResultValue> + Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy createInfos, + Optional allocator, + Dispatch const & d ) const { - std::vector pipelines( createInfos.size() ); - Result result = static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipelines", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); + std::vector pipelines( createInfos.size() ); + Result result = static_cast( d.vkCreateComputePipelines( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( result, + pipelines, + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", + { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); } - template - VULKAN_HPP_INLINE ResultValue> Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE ResultValue> + Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy createInfos, + Optional allocator, + Allocator const & vectorAllocator, + Dispatch const & d ) const { - std::vector pipelines( createInfos.size(), vectorAllocator ); - Result result = static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipelines", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); + std::vector pipelines( createInfos.size(), vectorAllocator ); + Result result = static_cast( d.vkCreateComputePipelines( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( result, + pipelines, + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", + { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); } - template - VULKAN_HPP_INLINE ResultValue Device::createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE ResultValue + Device::createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const ComputePipelineCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const { Pipeline pipeline; - Result result = static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); - return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipeline", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); + Result result = static_cast( d.vkCreateComputePipelines( + m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + return createResultValue( result, + pipeline, + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipeline", + { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE ResultValue,Allocator>> Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValue, Allocator>> + Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy createInfos, + Optional allocator, + Dispatch const & d ) const { std::vector, Allocator> uniquePipelines; - std::vector pipelines( createInfos.size() ); - Result result = static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast(pipelines.data()) ) ); - if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + std::vector pipelines( createInfos.size() ); + Result result = static_cast( d.vkCreateComputePipelines( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || + ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) { uniquePipelines.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); - for ( size_t i=0 ; i deleter( *this, allocator, d ); + for ( size_t i = 0; i < createInfos.size(); i++ ) { uniquePipelines.push_back( UniqueHandle( pipelines[i], deleter ) ); } } - return createResultValue( result, uniquePipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + return createResultValue( + result, + uniquePipelines, + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); } - template - VULKAN_HPP_INLINE ResultValue,Allocator>> Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE ResultValue, Allocator>> + Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy createInfos, + Optional allocator, + Allocator const & vectorAllocator, + Dispatch const & d ) const { std::vector, Allocator> uniquePipelines( vectorAllocator ); - std::vector pipelines( createInfos.size() ); - Result result = static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast(pipelines.data()) ) ); - if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + std::vector pipelines( createInfos.size() ); + Result result = static_cast( d.vkCreateComputePipelines( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || + ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) { uniquePipelines.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); - for ( size_t i=0 ; i deleter( *this, allocator, d ); + for ( size_t i = 0; i < createInfos.size(); i++ ) { uniquePipelines.push_back( UniqueHandle( pipelines[i], deleter ) ); } } - return createResultValue( result, uniquePipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + return createResultValue( + result, + uniquePipelines, + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); } - template - VULKAN_HPP_INLINE ResultValue> Device::createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE ResultValue> + Device::createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const ComputePipelineCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const { Pipeline pipeline; - Result result = static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); + Result result = static_cast( d.vkCreateComputePipelines( + m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipelineUnique", { Result::eSuccess, Result::ePipelineCompileRequiredEXT }, deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, + pipeline, + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelineUnique", + { Result::eSuccess, Result::ePipelineCompileRequiredEXT }, + deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE Result Device::createDeferredOperationKHR( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DeferredOperationKHR* pDeferredOperation, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Device::createDeferredOperationKHR( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DeferredOperationKHR * pDeferredOperation, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateDeferredOperationKHR( m_device, reinterpret_cast( pAllocator ), reinterpret_cast( pDeferredOperation ) ) ); + return static_cast( + d.vkCreateDeferredOperationKHR( m_device, + reinterpret_cast( pAllocator ), + reinterpret_cast( pDeferredOperation ) ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createDeferredOperationKHR( Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::createDeferredOperationKHR( Optional allocator, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation; - Result result = static_cast( d.vkCreateDeferredOperationKHR( m_device, reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &deferredOperation ) ) ); - return createResultValue( result, deferredOperation, VULKAN_HPP_NAMESPACE_STRING"::Device::createDeferredOperationKHR" ); + Result result = static_cast( d.vkCreateDeferredOperationKHR( + m_device, + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &deferredOperation ) ) ); + return createResultValue( + result, deferredOperation, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHR" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createDeferredOperationKHRUnique( Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createDeferredOperationKHRUnique( Optional allocator, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation; - Result result = static_cast( d.vkCreateDeferredOperationKHR( m_device, reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &deferredOperation ) ) ); + Result result = static_cast( d.vkCreateDeferredOperationKHR( + m_device, + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &deferredOperation ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, deferredOperation, VULKAN_HPP_NAMESPACE_STRING"::Device::createDeferredOperationKHRUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, deferredOperation, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHRUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template - VULKAN_HPP_INLINE Result Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorPool* pDescriptorPool, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorPool * pDescriptorPool, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateDescriptorPool( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pDescriptorPool ) ) ); + return static_cast( + d.vkCreateDescriptorPool( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pDescriptorPool ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool; - Result result = static_cast( d.vkCreateDescriptorPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorPool ) ) ); - return createResultValue( result, descriptorPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorPool" ); + Result result = static_cast( d.vkCreateDescriptorPool( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &descriptorPool ) ) ); + return createResultValue( result, descriptorPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPool" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool; - Result result = static_cast( d.vkCreateDescriptorPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorPool ) ) ); + Result result = static_cast( d.vkCreateDescriptorPool( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &descriptorPool ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, descriptorPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorPoolUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, descriptorPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPoolUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSetLayout ) ) ); + return static_cast( + d.vkCreateDescriptorSetLayout( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSetLayout ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout; - Result result = static_cast( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &setLayout ) ) ); - return createResultValue( result, setLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorSetLayout" ); + Result result = static_cast( d.vkCreateDescriptorSetLayout( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &setLayout ) ) ); + return createResultValue( result, setLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayout" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout; - Result result = static_cast( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &setLayout ) ) ); + Result result = static_cast( d.vkCreateDescriptorSetLayout( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &setLayout ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, setLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorSetLayoutUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, setLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayoutUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplate( + const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pDescriptorUpdateTemplate ) ) ); + return static_cast( d.vkCreateDescriptorUpdateTemplate( + m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pDescriptorUpdateTemplate ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; - Result result = static_cast( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorUpdateTemplate ) ) ); - return createResultValue( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorUpdateTemplate" ); + Result result = static_cast( d.vkCreateDescriptorUpdateTemplate( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &descriptorUpdateTemplate ) ) ); + return createResultValue( + result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplate" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; - Result result = static_cast( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorUpdateTemplate ) ) ); + Result result = static_cast( d.vkCreateDescriptorUpdateTemplate( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &descriptorUpdateTemplate ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorUpdateTemplateUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, + descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE_STRING + "::Device::createDescriptorUpdateTemplateUnique", + deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplateKHR( + const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pDescriptorUpdateTemplate ) ) ); + return static_cast( d.vkCreateDescriptorUpdateTemplateKHR( + m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pDescriptorUpdateTemplate ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; - Result result = static_cast( d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorUpdateTemplate ) ) ); - return createResultValue( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorUpdateTemplateKHR" ); + Result result = static_cast( d.vkCreateDescriptorUpdateTemplateKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &descriptorUpdateTemplate ) ) ); + return createResultValue( + result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHR" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; - Result result = static_cast( d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorUpdateTemplate ) ) ); + Result result = static_cast( d.vkCreateDescriptorUpdateTemplateKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &descriptorUpdateTemplate ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorUpdateTemplateKHRUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, + descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE_STRING + "::Device::createDescriptorUpdateTemplateKHRUnique", + deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Event* pEvent, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Event * pEvent, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateEvent( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pEvent ) ) ); + return static_cast( d.vkCreateEvent( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pEvent ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createEvent( const EventCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createEvent( + const EventCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::Event event; - Result result = static_cast( d.vkCreateEvent( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &event ) ) ); - return createResultValue( result, event, VULKAN_HPP_NAMESPACE_STRING"::Device::createEvent" ); + Result result = static_cast( d.vkCreateEvent( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &event ) ) ); + return createResultValue( result, event, VULKAN_HPP_NAMESPACE_STRING "::Device::createEvent" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createEventUnique( const EventCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createEventUnique( + const EventCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::Event event; - Result result = static_cast( d.vkCreateEvent( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &event ) ) ); + Result result = static_cast( d.vkCreateEvent( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &event ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, event, VULKAN_HPP_NAMESPACE_STRING"::Device::createEventUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, event, VULKAN_HPP_NAMESPACE_STRING "::Device::createEventUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Fence * pFence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateFence( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pFence ) ) ); + return static_cast( d.vkCreateFence( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pFence ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createFence( const FenceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createFence( + const FenceCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::Fence fence; - Result result = static_cast( d.vkCreateFence( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &fence ) ) ); - return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::createFence" ); + Result result = static_cast( d.vkCreateFence( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &fence ) ) ); + return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::createFence" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createFenceUnique( const FenceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createFenceUnique( + const FenceCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::Fence fence; - Result result = static_cast( d.vkCreateFence( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &fence ) ) ); + Result result = static_cast( d.vkCreateFence( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &fence ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::createFenceUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::createFenceUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Framebuffer* pFramebuffer, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Framebuffer * pFramebuffer, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateFramebuffer( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pFramebuffer ) ) ); + return static_cast( d.vkCreateFramebuffer( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pFramebuffer ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createFramebuffer( const FramebufferCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createFramebuffer( + const FramebufferCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::Framebuffer framebuffer; - Result result = static_cast( d.vkCreateFramebuffer( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &framebuffer ) ) ); - return createResultValue( result, framebuffer, VULKAN_HPP_NAMESPACE_STRING"::Device::createFramebuffer" ); + Result result = static_cast( d.vkCreateFramebuffer( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &framebuffer ) ) ); + return createResultValue( result, framebuffer, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebuffer" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createFramebufferUnique( const FramebufferCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createFramebufferUnique( + const FramebufferCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::Framebuffer framebuffer; - Result result = static_cast( d.vkCreateFramebuffer( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &framebuffer ) ) ); + Result result = static_cast( d.vkCreateFramebuffer( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &framebuffer ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, framebuffer, VULKAN_HPP_NAMESPACE_STRING"::Device::createFramebufferUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, framebuffer, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebufferUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), createInfoCount, reinterpret_cast( pCreateInfos ), reinterpret_cast( pAllocator ), reinterpret_cast( pPipelines ) ) ); + return static_cast( + d.vkCreateGraphicsPipelines( m_device, + static_cast( pipelineCache ), + createInfoCount, + reinterpret_cast( pCreateInfos ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPipelines ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValue> Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE ResultValue> + Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy createInfos, + Optional allocator, + Dispatch const & d ) const { - std::vector pipelines( createInfos.size() ); - Result result = static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipelines", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); + std::vector pipelines( createInfos.size() ); + Result result = static_cast( d.vkCreateGraphicsPipelines( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( result, + pipelines, + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", + { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); } - template - VULKAN_HPP_INLINE ResultValue> Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE ResultValue> + Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy createInfos, + Optional allocator, + Allocator const & vectorAllocator, + Dispatch const & d ) const { - std::vector pipelines( createInfos.size(), vectorAllocator ); - Result result = static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipelines", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); + std::vector pipelines( createInfos.size(), vectorAllocator ); + Result result = static_cast( d.vkCreateGraphicsPipelines( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( result, + pipelines, + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", + { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); } - template - VULKAN_HPP_INLINE ResultValue Device::createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE ResultValue + Device::createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const GraphicsPipelineCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const { Pipeline pipeline; - Result result = static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); - return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipeline", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); + Result result = static_cast( d.vkCreateGraphicsPipelines( + m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + return createResultValue( result, + pipeline, + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipeline", + { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE ResultValue,Allocator>> Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValue, Allocator>> + Device::createGraphicsPipelinesUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy createInfos, + Optional allocator, + Dispatch const & d ) const { std::vector, Allocator> uniquePipelines; - std::vector pipelines( createInfos.size() ); - Result result = static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast(pipelines.data()) ) ); - if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + std::vector pipelines( createInfos.size() ); + Result result = static_cast( d.vkCreateGraphicsPipelines( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || + ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) { uniquePipelines.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); - for ( size_t i=0 ; i deleter( *this, allocator, d ); + for ( size_t i = 0; i < createInfos.size(); i++ ) { uniquePipelines.push_back( UniqueHandle( pipelines[i], deleter ) ); } } - return createResultValue( result, uniquePipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + return createResultValue( + result, + uniquePipelines, + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); } - template - VULKAN_HPP_INLINE ResultValue,Allocator>> Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE ResultValue, Allocator>> + Device::createGraphicsPipelinesUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy createInfos, + Optional allocator, + Allocator const & vectorAllocator, + Dispatch const & d ) const { std::vector, Allocator> uniquePipelines( vectorAllocator ); - std::vector pipelines( createInfos.size() ); - Result result = static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast(pipelines.data()) ) ); - if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + std::vector pipelines( createInfos.size() ); + Result result = static_cast( d.vkCreateGraphicsPipelines( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || + ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) { uniquePipelines.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); - for ( size_t i=0 ; i deleter( *this, allocator, d ); + for ( size_t i = 0; i < createInfos.size(); i++ ) { uniquePipelines.push_back( UniqueHandle( pipelines[i], deleter ) ); } } - return createResultValue( result, uniquePipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + return createResultValue( + result, + uniquePipelines, + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); } - template - VULKAN_HPP_INLINE ResultValue> Device::createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE ResultValue> + Device::createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const GraphicsPipelineCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const { Pipeline pipeline; - Result result = static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); + Result result = static_cast( d.vkCreateGraphicsPipelines( + m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipelineUnique", { Result::eSuccess, Result::ePipelineCompileRequiredEXT }, deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, + pipeline, + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelineUnique", + { Result::eSuccess, Result::ePipelineCompileRequiredEXT }, + deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Image* pImage, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Image * pImage, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateImage( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pImage ) ) ); + return static_cast( d.vkCreateImage( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pImage ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createImage( const ImageCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createImage( + const ImageCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::Image image; - Result result = static_cast( d.vkCreateImage( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &image ) ) ); - return createResultValue( result, image, VULKAN_HPP_NAMESPACE_STRING"::Device::createImage" ); + Result result = static_cast( d.vkCreateImage( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &image ) ) ); + return createResultValue( result, image, VULKAN_HPP_NAMESPACE_STRING "::Device::createImage" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createImageUnique( const ImageCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createImageUnique( + const ImageCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::Image image; - Result result = static_cast( d.vkCreateImage( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &image ) ) ); + Result result = static_cast( d.vkCreateImage( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &image ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, image, VULKAN_HPP_NAMESPACE_STRING"::Device::createImageUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, image, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ImageView* pView, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ImageView * pView, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateImageView( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pView ) ) ); + return static_cast( d.vkCreateImageView( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pView ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createImageView( const ImageViewCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createImageView( + const ImageViewCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::ImageView view; - Result result = static_cast( d.vkCreateImageView( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &view ) ) ); - return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING"::Device::createImageView" ); + Result result = static_cast( d.vkCreateImageView( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &view ) ) ); + return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageView" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createImageViewUnique( const ImageViewCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createImageViewUnique( + const ImageViewCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::ImageView view; - Result result = static_cast( d.vkCreateImageView( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &view ) ) ); + Result result = static_cast( d.vkCreateImageView( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &view ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING"::Device::createImageViewUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, view, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageViewUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV* pIndirectCommandsLayout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::createIndirectCommandsLayoutNV( + const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV * pIndirectCommandsLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateIndirectCommandsLayoutNV( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pIndirectCommandsLayout ) ) ); + return static_cast( + d.vkCreateIndirectCommandsLayoutNV( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pIndirectCommandsLayout ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createIndirectCommandsLayoutNV( const IndirectCommandsLayoutCreateInfoNV & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::createIndirectCommandsLayoutNV( const IndirectCommandsLayoutCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout; - Result result = static_cast( d.vkCreateIndirectCommandsLayoutNV( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &indirectCommandsLayout ) ) ); - return createResultValue( result, indirectCommandsLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createIndirectCommandsLayoutNV" ); + Result result = static_cast( d.vkCreateIndirectCommandsLayoutNV( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &indirectCommandsLayout ) ) ); + return createResultValue( + result, indirectCommandsLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNV" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createIndirectCommandsLayoutNVUnique( const IndirectCommandsLayoutCreateInfoNV & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createIndirectCommandsLayoutNVUnique( const IndirectCommandsLayoutCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout; - Result result = static_cast( d.vkCreateIndirectCommandsLayoutNV( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &indirectCommandsLayout ) ) ); + Result result = static_cast( d.vkCreateIndirectCommandsLayoutNV( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &indirectCommandsLayout ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, indirectCommandsLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createIndirectCommandsLayoutNVUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, + indirectCommandsLayout, + VULKAN_HPP_NAMESPACE_STRING + "::Device::createIndirectCommandsLayoutNVUnique", + deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PipelineCache* pPipelineCache, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PipelineCache * pPipelineCache, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreatePipelineCache( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pPipelineCache ) ) ); + return static_cast( + d.vkCreatePipelineCache( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPipelineCache ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createPipelineCache( const PipelineCacheCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::createPipelineCache( const PipelineCacheCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache; - Result result = static_cast( d.vkCreatePipelineCache( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipelineCache ) ) ); - return createResultValue( result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineCache" ); + Result result = static_cast( d.vkCreatePipelineCache( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipelineCache ) ) ); + return createResultValue( result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCache" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache; - Result result = static_cast( d.vkCreatePipelineCache( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipelineCache ) ) ); + Result result = static_cast( d.vkCreatePipelineCache( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipelineCache ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineCacheUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCacheUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PipelineLayout* pPipelineLayout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PipelineLayout * pPipelineLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreatePipelineLayout( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pPipelineLayout ) ) ); + return static_cast( + d.vkCreatePipelineLayout( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPipelineLayout ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout; - Result result = static_cast( d.vkCreatePipelineLayout( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipelineLayout ) ) ); - return createResultValue( result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineLayout" ); + Result result = static_cast( d.vkCreatePipelineLayout( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipelineLayout ) ) ); + return createResultValue( result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayout" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout; - Result result = static_cast( d.vkCreatePipelineLayout( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipelineLayout ) ) ); + Result result = static_cast( d.vkCreatePipelineLayout( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipelineLayout ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineLayoutUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayoutUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::QueryPool* pQueryPool, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::QueryPool * pQueryPool, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateQueryPool( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pQueryPool ) ) ); + return static_cast( d.vkCreateQueryPool( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pQueryPool ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createQueryPool( const QueryPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createQueryPool( + const QueryPoolCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::QueryPool queryPool; - Result result = static_cast( d.vkCreateQueryPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &queryPool ) ) ); - return createResultValue( result, queryPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createQueryPool" ); + Result result = static_cast( d.vkCreateQueryPool( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &queryPool ) ) ); + return createResultValue( result, queryPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPool" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createQueryPoolUnique( const QueryPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createQueryPoolUnique( + const QueryPoolCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::QueryPool queryPool; - Result result = static_cast( d.vkCreateQueryPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &queryPool ) ) ); + Result result = static_cast( d.vkCreateQueryPool( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &queryPool ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, queryPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createQueryPoolUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, queryPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPoolUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE Result Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast( pipelineCache ), createInfoCount, reinterpret_cast( pCreateInfos ), reinterpret_cast( pAllocator ), reinterpret_cast( pPipelines ) ) ); + return static_cast( + d.vkCreateRayTracingPipelinesKHR( m_device, + static_cast( pipelineCache ), + createInfoCount, + reinterpret_cast( pCreateInfos ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPipelines ) ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValue> Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValue> Device::createRayTracingPipelinesKHR( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy createInfos, + Optional allocator, + Dispatch const & d ) const { - std::vector pipelines( createInfos.size() ); - Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelinesKHR", { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR, Result::ePipelineCompileRequiredEXT } ); + std::vector pipelines( createInfos.size() ); + Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( result, + pipelines, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR", + { Result::eSuccess, + Result::eOperationDeferredKHR, + Result::eOperationNotDeferredKHR, + Result::ePipelineCompileRequiredEXT } ); } - template - VULKAN_HPP_INLINE ResultValue> Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE ResultValue> Device::createRayTracingPipelinesKHR( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy createInfos, + Optional allocator, + Allocator const & vectorAllocator, + Dispatch const & d ) const { - std::vector pipelines( createInfos.size(), vectorAllocator ); - Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelinesKHR", { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR, Result::ePipelineCompileRequiredEXT } ); + std::vector pipelines( createInfos.size(), vectorAllocator ); + Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( result, + pipelines, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR", + { Result::eSuccess, + Result::eOperationDeferredKHR, + Result::eOperationNotDeferredKHR, + Result::ePipelineCompileRequiredEXT } ); } - template - VULKAN_HPP_INLINE ResultValue Device::createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE ResultValue + Device::createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const RayTracingPipelineCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const { Pipeline pipeline; - Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); - return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelineKHR", { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR, Result::ePipelineCompileRequiredEXT } ); + Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( + m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + return createResultValue( result, + pipeline, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHR", + { Result::eSuccess, + Result::eOperationDeferredKHR, + Result::eOperationNotDeferredKHR, + Result::ePipelineCompileRequiredEXT } ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE ResultValue,Allocator>> Device::createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValue, Allocator>> + Device::createRayTracingPipelinesKHRUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy createInfos, + Optional allocator, + Dispatch const & d ) const { std::vector, Allocator> uniquePipelines; - std::vector pipelines( createInfos.size() ); - Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast(pipelines.data()) ) ); - if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) || ( result == VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + std::vector pipelines( createInfos.size() ); + Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || + ( result == VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) || + ( result == VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) || + ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) { uniquePipelines.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); - for ( size_t i=0 ; i deleter( *this, allocator, d ); + for ( size_t i = 0; i < createInfos.size(); i++ ) { uniquePipelines.push_back( UniqueHandle( pipelines[i], deleter ) ); } } - return createResultValue( result, uniquePipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + return createResultValue( result, + uniquePipelines, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); } - template - VULKAN_HPP_INLINE ResultValue,Allocator>> Device::createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE ResultValue, Allocator>> + Device::createRayTracingPipelinesKHRUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy createInfos, + Optional allocator, + Allocator const & vectorAllocator, + Dispatch const & d ) const { std::vector, Allocator> uniquePipelines( vectorAllocator ); - std::vector pipelines( createInfos.size() ); - Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast(pipelines.data()) ) ); - if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) || ( result == VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + std::vector pipelines( createInfos.size() ); + Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || + ( result == VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) || + ( result == VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) || + ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) { uniquePipelines.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); - for ( size_t i=0 ; i deleter( *this, allocator, d ); + for ( size_t i = 0; i < createInfos.size(); i++ ) { uniquePipelines.push_back( UniqueHandle( pipelines[i], deleter ) ); } } - return createResultValue( result, uniquePipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + return createResultValue( result, + uniquePipelines, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); } - template - VULKAN_HPP_INLINE ResultValue> Device::createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE ResultValue> + Device::createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const RayTracingPipelineCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const { Pipeline pipeline; - Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); + Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( + m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelineKHRUnique", { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR, Result::ePipelineCompileRequiredEXT }, deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, + pipeline, + VULKAN_HPP_NAMESPACE_STRING + "::Device::createRayTracingPipelineKHRUnique", + { Result::eSuccess, + Result::eOperationDeferredKHR, + Result::eOperationNotDeferredKHR, + Result::ePipelineCompileRequiredEXT }, + deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template - VULKAN_HPP_INLINE Result Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateRayTracingPipelinesNV( m_device, static_cast( pipelineCache ), createInfoCount, reinterpret_cast( pCreateInfos ), reinterpret_cast( pAllocator ), reinterpret_cast( pPipelines ) ) ); + return static_cast( + d.vkCreateRayTracingPipelinesNV( m_device, + static_cast( pipelineCache ), + createInfoCount, + reinterpret_cast( pCreateInfos ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPipelines ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValue> Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE ResultValue> Device::createRayTracingPipelinesNV( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy createInfos, + Optional allocator, + Dispatch const & d ) const { - std::vector pipelines( createInfos.size() ); - Result result = static_cast( d.vkCreateRayTracingPipelinesNV( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelinesNV", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); + std::vector pipelines( createInfos.size() ); + Result result = static_cast( d.vkCreateRayTracingPipelinesNV( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( result, + pipelines, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV", + { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); } - template - VULKAN_HPP_INLINE ResultValue> Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE ResultValue> Device::createRayTracingPipelinesNV( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy createInfos, + Optional allocator, + Allocator const & vectorAllocator, + Dispatch const & d ) const { - std::vector pipelines( createInfos.size(), vectorAllocator ); - Result result = static_cast( d.vkCreateRayTracingPipelinesNV( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelinesNV", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); + std::vector pipelines( createInfos.size(), vectorAllocator ); + Result result = static_cast( d.vkCreateRayTracingPipelinesNV( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( result, + pipelines, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV", + { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); } - template - VULKAN_HPP_INLINE ResultValue Device::createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE ResultValue + Device::createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const RayTracingPipelineCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const { Pipeline pipeline; - Result result = static_cast( d.vkCreateRayTracingPipelinesNV( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); - return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelineNV", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); + Result result = static_cast( d.vkCreateRayTracingPipelinesNV( + m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + return createResultValue( result, + pipeline, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNV", + { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE ResultValue,Allocator>> Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValue, Allocator>> + Device::createRayTracingPipelinesNVUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy createInfos, + Optional allocator, + Dispatch const & d ) const { std::vector, Allocator> uniquePipelines; - std::vector pipelines( createInfos.size() ); - Result result = static_cast( d.vkCreateRayTracingPipelinesNV( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast(pipelines.data()) ) ); - if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + std::vector pipelines( createInfos.size() ); + Result result = static_cast( d.vkCreateRayTracingPipelinesNV( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || + ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) { uniquePipelines.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); - for ( size_t i=0 ; i deleter( *this, allocator, d ); + for ( size_t i = 0; i < createInfos.size(); i++ ) { uniquePipelines.push_back( UniqueHandle( pipelines[i], deleter ) ); } } - return createResultValue( result, uniquePipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + return createResultValue( + result, + uniquePipelines, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); } - template - VULKAN_HPP_INLINE ResultValue,Allocator>> Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE ResultValue, Allocator>> + Device::createRayTracingPipelinesNVUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy createInfos, + Optional allocator, + Allocator const & vectorAllocator, + Dispatch const & d ) const { std::vector, Allocator> uniquePipelines( vectorAllocator ); - std::vector pipelines( createInfos.size() ); - Result result = static_cast( d.vkCreateRayTracingPipelinesNV( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast(pipelines.data()) ) ); - if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + std::vector pipelines( createInfos.size() ); + Result result = static_cast( d.vkCreateRayTracingPipelinesNV( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || + ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) { uniquePipelines.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); - for ( size_t i=0 ; i deleter( *this, allocator, d ); + for ( size_t i = 0; i < createInfos.size(); i++ ) { uniquePipelines.push_back( UniqueHandle( pipelines[i], deleter ) ); } } - return createResultValue( result, uniquePipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + return createResultValue( + result, + uniquePipelines, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); } - template - VULKAN_HPP_INLINE ResultValue> Device::createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE ResultValue> + Device::createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const RayTracingPipelineCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const { Pipeline pipeline; - Result result = static_cast( d.vkCreateRayTracingPipelinesNV( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); + Result result = static_cast( d.vkCreateRayTracingPipelinesNV( + m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelineNVUnique", { Result::eSuccess, Result::ePipelineCompileRequiredEXT }, deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, + pipeline, + VULKAN_HPP_NAMESPACE_STRING + "::Device::createRayTracingPipelineNVUnique", + { Result::eSuccess, Result::ePipelineCompileRequiredEXT }, + deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateRenderPass( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pRenderPass ) ) ); + return static_cast( d.vkCreateRenderPass( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pRenderPass ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createRenderPass( const RenderPassCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createRenderPass( + const RenderPassCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::RenderPass renderPass; - Result result = static_cast( d.vkCreateRenderPass( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &renderPass ) ) ); - return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING"::Device::createRenderPass" ); + Result result = static_cast( d.vkCreateRenderPass( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &renderPass ) ) ); + return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createRenderPassUnique( const RenderPassCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createRenderPassUnique( + const RenderPassCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::RenderPass renderPass; - Result result = static_cast( d.vkCreateRenderPass( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &renderPass ) ) ); + Result result = static_cast( d.vkCreateRenderPass( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &renderPass ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING"::Device::createRenderPassUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPassUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateRenderPass2( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pRenderPass ) ) ); + return static_cast( d.vkCreateRenderPass2( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pRenderPass ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createRenderPass2( const RenderPassCreateInfo2 & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createRenderPass2( + const RenderPassCreateInfo2 & createInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::RenderPass renderPass; - Result result = static_cast( d.vkCreateRenderPass2( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &renderPass ) ) ); - return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING"::Device::createRenderPass2" ); + Result result = static_cast( d.vkCreateRenderPass2( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &renderPass ) ) ); + return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createRenderPass2Unique( const RenderPassCreateInfo2 & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createRenderPass2Unique( + const RenderPassCreateInfo2 & createInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::RenderPass renderPass; - Result result = static_cast( d.vkCreateRenderPass2( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &renderPass ) ) ); + Result result = static_cast( d.vkCreateRenderPass2( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &renderPass ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING"::Device::createRenderPass2Unique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2Unique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateRenderPass2KHR( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pRenderPass ) ) ); + return static_cast( + d.vkCreateRenderPass2KHR( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pRenderPass ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createRenderPass2KHR( const RenderPassCreateInfo2 & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createRenderPass2KHR( + const RenderPassCreateInfo2 & createInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::RenderPass renderPass; - Result result = static_cast( d.vkCreateRenderPass2KHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &renderPass ) ) ); - return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING"::Device::createRenderPass2KHR" ); + Result result = static_cast( d.vkCreateRenderPass2KHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &renderPass ) ) ); + return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHR" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createRenderPass2KHRUnique( const RenderPassCreateInfo2 & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createRenderPass2KHRUnique( const RenderPassCreateInfo2 & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::RenderPass renderPass; - Result result = static_cast( d.vkCreateRenderPass2KHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &renderPass ) ) ); + Result result = static_cast( d.vkCreateRenderPass2KHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &renderPass ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING"::Device::createRenderPass2KHRUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHRUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Sampler* pSampler, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Sampler * pSampler, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateSampler( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSampler ) ) ); + return static_cast( d.vkCreateSampler( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSampler ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createSampler( const SamplerCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createSampler( + const SamplerCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::Sampler sampler; - Result result = static_cast( d.vkCreateSampler( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &sampler ) ) ); - return createResultValue( result, sampler, VULKAN_HPP_NAMESPACE_STRING"::Device::createSampler" ); + Result result = static_cast( d.vkCreateSampler( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &sampler ) ) ); + return createResultValue( result, sampler, VULKAN_HPP_NAMESPACE_STRING "::Device::createSampler" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSamplerUnique( const SamplerCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSamplerUnique( + const SamplerCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::Sampler sampler; - Result result = static_cast( d.vkCreateSampler( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &sampler ) ) ); + Result result = static_cast( d.vkCreateSampler( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &sampler ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, sampler, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, sampler, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pYcbcrConversion ) ) ); + return static_cast( + d.vkCreateSamplerYcbcrConversion( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pYcbcrConversion ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; - Result result = static_cast( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &ycbcrConversion ) ) ); - return createResultValue( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerYcbcrConversion" ); + Result result = static_cast( d.vkCreateSamplerYcbcrConversion( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &ycbcrConversion ) ) ); + return createResultValue( + result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversion" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; - Result result = static_cast( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &ycbcrConversion ) ) ); + Result result = static_cast( d.vkCreateSamplerYcbcrConversion( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &ycbcrConversion ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerYcbcrConversionUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateSamplerYcbcrConversionKHR( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pYcbcrConversion ) ) ); + return static_cast( + d.vkCreateSamplerYcbcrConversionKHR( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pYcbcrConversion ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; - Result result = static_cast( d.vkCreateSamplerYcbcrConversionKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &ycbcrConversion ) ) ); - return createResultValue( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerYcbcrConversionKHR" ); + Result result = static_cast( d.vkCreateSamplerYcbcrConversionKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &ycbcrConversion ) ) ); + return createResultValue( + result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHR" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; - Result result = static_cast( d.vkCreateSamplerYcbcrConversionKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &ycbcrConversion ) ) ); + Result result = static_cast( d.vkCreateSamplerYcbcrConversionKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &ycbcrConversion ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerYcbcrConversionKHRUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHRUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Semaphore* pSemaphore, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Semaphore * pSemaphore, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateSemaphore( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSemaphore ) ) ); + return static_cast( d.vkCreateSemaphore( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSemaphore ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createSemaphore( const SemaphoreCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createSemaphore( + const SemaphoreCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::Semaphore semaphore; - Result result = static_cast( d.vkCreateSemaphore( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &semaphore ) ) ); - return createResultValue( result, semaphore, VULKAN_HPP_NAMESPACE_STRING"::Device::createSemaphore" ); + Result result = static_cast( d.vkCreateSemaphore( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &semaphore ) ) ); + return createResultValue( result, semaphore, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphore" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSemaphoreUnique( const SemaphoreCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSemaphoreUnique( + const SemaphoreCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::Semaphore semaphore; - Result result = static_cast( d.vkCreateSemaphore( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &semaphore ) ) ); + Result result = static_cast( d.vkCreateSemaphore( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &semaphore ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, semaphore, VULKAN_HPP_NAMESPACE_STRING"::Device::createSemaphoreUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, semaphore, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphoreUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ShaderModule* pShaderModule, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ShaderModule * pShaderModule, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateShaderModule( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pShaderModule ) ) ); + return static_cast( + d.vkCreateShaderModule( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pShaderModule ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createShaderModule( const ShaderModuleCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createShaderModule( + const ShaderModuleCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::ShaderModule shaderModule; - Result result = static_cast( d.vkCreateShaderModule( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &shaderModule ) ) ); - return createResultValue( result, shaderModule, VULKAN_HPP_NAMESPACE_STRING"::Device::createShaderModule" ); + Result result = static_cast( d.vkCreateShaderModule( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &shaderModule ) ) ); + return createResultValue( result, shaderModule, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModule" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::ShaderModule shaderModule; - Result result = static_cast( d.vkCreateShaderModule( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &shaderModule ) ) ); + Result result = static_cast( d.vkCreateShaderModule( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &shaderModule ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, shaderModule, VULKAN_HPP_NAMESPACE_STRING"::Device::createShaderModuleUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, shaderModule, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModuleUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::createSharedSwapchainsKHR( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Device::createSharedSwapchainsKHR( uint32_t swapchainCount, + const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateSharedSwapchainsKHR( m_device, swapchainCount, reinterpret_cast( pCreateInfos ), reinterpret_cast( pAllocator ), reinterpret_cast( pSwapchains ) ) ); + return static_cast( + d.vkCreateSharedSwapchainsKHR( m_device, + swapchainCount, + reinterpret_cast( pCreateInfos ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSwapchains ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSharedSwapchainsKHR( ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createSharedSwapchainsKHR( ArrayProxy createInfos, + Optional allocator, + Dispatch const & d ) const { - std::vector swapchains( createInfos.size() ); - Result result = static_cast( d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( swapchains.data() ) ) ); - return createResultValue( result, swapchains, VULKAN_HPP_NAMESPACE_STRING"::Device::createSharedSwapchainsKHR" ); + std::vector swapchains( createInfos.size() ); + Result result = static_cast( d.vkCreateSharedSwapchainsKHR( + m_device, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( swapchains.data() ) ) ); + return createResultValue( result, swapchains, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSharedSwapchainsKHR( ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createSharedSwapchainsKHR( ArrayProxy createInfos, + Optional allocator, + Allocator const & vectorAllocator, + Dispatch const & d ) const { - std::vector swapchains( createInfos.size(), vectorAllocator ); - Result result = static_cast( d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( swapchains.data() ) ) ); - return createResultValue( result, swapchains, VULKAN_HPP_NAMESPACE_STRING"::Device::createSharedSwapchainsKHR" ); + std::vector swapchains( createInfos.size(), vectorAllocator ); + Result result = static_cast( d.vkCreateSharedSwapchainsKHR( + m_device, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( swapchains.data() ) ) ); + return createResultValue( result, swapchains, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" ); } - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createSharedSwapchainKHR( + const SwapchainCreateInfoKHR & createInfo, Optional allocator, Dispatch const & d ) const { SwapchainKHR swapchain; - Result result = static_cast( d.vkCreateSharedSwapchainsKHR( m_device, 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &swapchain ) ) ); - return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING"::Device::createSharedSwapchainKHR" ); + Result result = static_cast( d.vkCreateSharedSwapchainsKHR( + m_device, + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &swapchain ) ) ); + return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHR" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::createSharedSwapchainsKHRUnique( ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType, Allocator>>::type + Device::createSharedSwapchainsKHRUnique( ArrayProxy createInfos, + Optional allocator, + Dispatch const & d ) const { std::vector, Allocator> uniqueSwapchainKHRs; - std::vector swapchainKHRs( createInfos.size() ); - Result result = static_cast( d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast(swapchainKHRs.data()) ) ); + std::vector swapchainKHRs( createInfos.size() ); + Result result = static_cast( d.vkCreateSharedSwapchainsKHR( + m_device, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( swapchainKHRs.data() ) ) ); if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) { uniqueSwapchainKHRs.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); - for ( size_t i=0 ; i deleter( *this, allocator, d ); + for ( size_t i = 0; i < createInfos.size(); i++ ) { uniqueSwapchainKHRs.push_back( UniqueHandle( swapchainKHRs[i], deleter ) ); } } - return createResultValue( result, uniqueSwapchainKHRs, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); + return createResultValue( + result, uniqueSwapchainKHRs, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); } - template - VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::createSharedSwapchainsKHRUnique( ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType, Allocator>>::type + Device::createSharedSwapchainsKHRUnique( ArrayProxy createInfos, + Optional allocator, + Allocator const & vectorAllocator, + Dispatch const & d ) const { std::vector, Allocator> uniqueSwapchainKHRs( vectorAllocator ); - std::vector swapchainKHRs( createInfos.size() ); - Result result = static_cast( d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast(swapchainKHRs.data()) ) ); + std::vector swapchainKHRs( createInfos.size() ); + Result result = static_cast( d.vkCreateSharedSwapchainsKHR( + m_device, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( swapchainKHRs.data() ) ) ); if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) { uniqueSwapchainKHRs.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); - for ( size_t i=0 ; i deleter( *this, allocator, d ); + for ( size_t i = 0; i < createInfos.size(); i++ ) { uniqueSwapchainKHRs.push_back( UniqueHandle( swapchainKHRs[i], deleter ) ); } } - return createResultValue( result, uniqueSwapchainKHRs, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); + return createResultValue( + result, uniqueSwapchainKHRs, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSharedSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createSharedSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const { SwapchainKHR swapchain; - Result result = static_cast( d.vkCreateSharedSwapchainsKHR( m_device, 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &swapchain ) ) ); + Result result = static_cast( d.vkCreateSharedSwapchainsKHR( + m_device, + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &swapchain ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING"::Device::createSharedSwapchainKHRUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, swapchain, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHRUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchain, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchain, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateSwapchainKHR( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSwapchain ) ) ); + return static_cast( + d.vkCreateSwapchainKHR( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSwapchain ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createSwapchainKHR( + const SwapchainCreateInfoKHR & createInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; - Result result = static_cast( d.vkCreateSwapchainKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &swapchain ) ) ); - return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING"::Device::createSwapchainKHR" ); + Result result = static_cast( d.vkCreateSwapchainKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &swapchain ) ) ); + return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHR" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; - Result result = static_cast( d.vkCreateSwapchainKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &swapchain ) ) ); + Result result = static_cast( d.vkCreateSwapchainKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &swapchain ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING"::Device::createSwapchainKHRUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, swapchain, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHRUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ValidationCacheEXT* pValidationCache, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Device::createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pValidationCache, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateValidationCacheEXT( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pValidationCache ) ) ); + return static_cast( + d.vkCreateValidationCacheEXT( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pValidationCache ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createValidationCacheEXT( const ValidationCacheCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::createValidationCacheEXT( const ValidationCacheCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache; - Result result = static_cast( d.vkCreateValidationCacheEXT( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &validationCache ) ) ); - return createResultValue( result, validationCache, VULKAN_HPP_NAMESPACE_STRING"::Device::createValidationCacheEXT" ); + Result result = static_cast( d.vkCreateValidationCacheEXT( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &validationCache ) ) ); + return createResultValue( + result, validationCache, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXT" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createValidationCacheEXTUnique( const ValidationCacheCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createValidationCacheEXTUnique( const ValidationCacheCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache; - Result result = static_cast( d.vkCreateValidationCacheEXT( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &validationCache ) ) ); + Result result = static_cast( d.vkCreateValidationCacheEXT( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &validationCache ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, validationCache, VULKAN_HPP_NAMESPACE_STRING"::Device::createValidationCacheEXTUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, validationCache, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXTUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT* pNameInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT( + const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT * pNameInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast( pNameInfo ) ) ); + return static_cast( d.vkDebugMarkerSetObjectNameEXT( + m_device, reinterpret_cast( pNameInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const & d ) const { - Result result = static_cast( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast( &nameInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::debugMarkerSetObjectNameEXT" ); + Result result = static_cast( d.vkDebugMarkerSetObjectNameEXT( + m_device, reinterpret_cast( &nameInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectNameEXT" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT* pTagInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT( + const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT * pTagInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast( pTagInfo ) ) ); + return static_cast( + d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast( pTagInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const & d ) const { - Result result = static_cast( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast( &tagInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::debugMarkerSetObjectTagEXT" ); + Result result = static_cast( + d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast( &tagInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectTagEXT" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_ENABLE_BETA_EXTENSIONS -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d) const VULKAN_HPP_NOEXCEPT +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkDeferredOperationJoinKHR( m_device, static_cast( operation ) ) ); + return static_cast( + d.vkDeferredOperationJoinKHR( m_device, static_cast( operation ) ) ); } -#else - template - VULKAN_HPP_INLINE Result Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d ) const +# else + template + VULKAN_HPP_INLINE Result Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d ) const { - Result result = static_cast( d.vkDeferredOperationJoinKHR( m_device, static_cast( operation ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::deferredOperationJoinKHR", { Result::eSuccess, Result::eThreadDoneKHR, Result::eThreadIdleKHR } ); + Result result = + static_cast( d.vkDeferredOperationJoinKHR( m_device, static_cast( operation ) ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::deferredOperationJoinKHR", + { Result::eSuccess, Result::eThreadDoneKHR, Result::eThreadIdleKHR } ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template - VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyAccelerationStructureKHR( m_device, static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); + d.vkDestroyAccelerationStructureKHR( m_device, + static_cast( accelerationStructure ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyAccelerationStructureKHR( m_device, static_cast( accelerationStructure ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyAccelerationStructureKHR( + m_device, + static_cast( accelerationStructure ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyAccelerationStructureNV( m_device, static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); + d.vkDestroyAccelerationStructureNV( m_device, + static_cast( accelerationStructure ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyAccelerationStructureNV( m_device, static_cast( accelerationStructure ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyAccelerationStructureNV( + m_device, + static_cast( accelerationStructure ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyAccelerationStructureKHR( m_device, static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); + d.vkDestroyAccelerationStructureKHR( m_device, + static_cast( accelerationStructure ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyAccelerationStructureKHR( m_device, static_cast( accelerationStructure ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyAccelerationStructureKHR( + m_device, + static_cast( accelerationStructure ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyBuffer( m_device, static_cast( buffer ), reinterpret_cast( pAllocator ) ); + d.vkDestroyBuffer( + m_device, static_cast( buffer ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyBuffer( m_device, static_cast( buffer ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyBuffer( + m_device, + static_cast( buffer ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyBuffer( m_device, static_cast( buffer ), reinterpret_cast( pAllocator ) ); + d.vkDestroyBuffer( + m_device, static_cast( buffer ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyBuffer( m_device, static_cast( buffer ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyBuffer( + m_device, + static_cast( buffer ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyBufferView( m_device, static_cast( bufferView ), reinterpret_cast( pAllocator ) ); + d.vkDestroyBufferView( m_device, + static_cast( bufferView ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyBufferView( m_device, static_cast( bufferView ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyBufferView( + m_device, + static_cast( bufferView ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyBufferView( m_device, static_cast( bufferView ), reinterpret_cast( pAllocator ) ); + d.vkDestroyBufferView( m_device, + static_cast( bufferView ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyBufferView( m_device, static_cast( bufferView ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyBufferView( + m_device, + static_cast( bufferView ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyCommandPool( m_device, static_cast( commandPool ), reinterpret_cast( pAllocator ) ); + d.vkDestroyCommandPool( m_device, + static_cast( commandPool ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyCommandPool( m_device, static_cast( commandPool ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyCommandPool( + m_device, + static_cast( commandPool ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyCommandPool( m_device, static_cast( commandPool ), reinterpret_cast( pAllocator ) ); + d.vkDestroyCommandPool( m_device, + static_cast( commandPool ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyCommandPool( m_device, static_cast( commandPool ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyCommandPool( + m_device, + static_cast( commandPool ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDeferredOperationKHR( m_device, static_cast( operation ), reinterpret_cast( pAllocator ) ); + d.vkDestroyDeferredOperationKHR( m_device, + static_cast( operation ), + reinterpret_cast( pAllocator ) ); + } +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDeferredOperationKHR( + m_device, + static_cast( operation ), + reinterpret_cast( static_cast( allocator ) ) ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDeferredOperationKHR( m_device, + static_cast( operation ), + reinterpret_cast( pAllocator ) ); + } +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDeferredOperationKHR( + m_device, + static_cast( operation ), + reinterpret_cast( static_cast( allocator ) ) ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorPool( m_device, + static_cast( descriptorPool ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDeferredOperationKHR( m_device, static_cast( operation ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyDescriptorPool( + m_device, + static_cast( descriptorPool ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDeferredOperationKHR( m_device, static_cast( operation ), reinterpret_cast( pAllocator ) ); + d.vkDestroyDescriptorPool( m_device, + static_cast( descriptorPool ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDeferredOperationKHR( m_device, static_cast( operation ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - template - VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyDescriptorPool( m_device, static_cast( descriptorPool ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyDescriptorPool( m_device, static_cast( descriptorPool ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyDescriptorPool( + m_device, + static_cast( descriptorPool ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDescriptorPool( m_device, static_cast( descriptorPool ), reinterpret_cast( pAllocator ) ); + d.vkDestroyDescriptorSetLayout( m_device, + static_cast( descriptorSetLayout ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDescriptorPool( m_device, static_cast( descriptorPool ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyDescriptorSetLayout( + m_device, + static_cast( descriptorSetLayout ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDescriptorSetLayout( m_device, static_cast( descriptorSetLayout ), reinterpret_cast( pAllocator ) ); + d.vkDestroyDescriptorSetLayout( m_device, + static_cast( descriptorSetLayout ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDescriptorSetLayout( m_device, static_cast( descriptorSetLayout ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyDescriptorSetLayout( + m_device, + static_cast( descriptorSetLayout ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDescriptorSetLayout( m_device, static_cast( descriptorSetLayout ), reinterpret_cast( pAllocator ) ); + d.vkDestroyDescriptorUpdateTemplate( m_device, + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDescriptorSetLayout( m_device, static_cast( descriptorSetLayout ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyDescriptorUpdateTemplate( + m_device, + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( pAllocator ) ); + d.vkDestroyDescriptorUpdateTemplateKHR( m_device, + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyDescriptorUpdateTemplateKHR( + m_device, + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDescriptorUpdateTemplateKHR( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( pAllocator ) ); + d.vkDestroyDescriptorUpdateTemplate( m_device, + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDescriptorUpdateTemplateKHR( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyDescriptorUpdateTemplate( + m_device, + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( pAllocator ) ); + d.vkDestroyDevice( m_device, reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyDevice( + m_device, + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDevice( m_device, reinterpret_cast( pAllocator ) ); + d.vkDestroyEvent( + m_device, static_cast( event ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDevice( m_device, reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyEvent( + m_device, + static_cast( event ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyEvent( m_device, static_cast( event ), reinterpret_cast( pAllocator ) ); + d.vkDestroyEvent( + m_device, static_cast( event ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyEvent( m_device, static_cast( event ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyEvent( + m_device, + static_cast( event ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyEvent( m_device, static_cast( event ), reinterpret_cast( pAllocator ) ); + d.vkDestroyFence( + m_device, static_cast( fence ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyEvent( m_device, static_cast( event ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyFence( + m_device, + static_cast( fence ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyFence( m_device, static_cast( fence ), reinterpret_cast( pAllocator ) ); + d.vkDestroyFence( + m_device, static_cast( fence ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyFence( m_device, static_cast( fence ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyFence( + m_device, + static_cast( fence ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyFence( m_device, static_cast( fence ), reinterpret_cast( pAllocator ) ); + d.vkDestroyFramebuffer( m_device, + static_cast( framebuffer ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyFence( m_device, static_cast( fence ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyFramebuffer( + m_device, + static_cast( framebuffer ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyFramebuffer( m_device, static_cast( framebuffer ), reinterpret_cast( pAllocator ) ); + d.vkDestroyFramebuffer( m_device, + static_cast( framebuffer ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyFramebuffer( m_device, static_cast( framebuffer ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyFramebuffer( + m_device, + static_cast( framebuffer ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyFramebuffer( m_device, static_cast( framebuffer ), reinterpret_cast( pAllocator ) ); + d.vkDestroyImage( + m_device, static_cast( image ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyFramebuffer( m_device, static_cast( framebuffer ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyImage( + m_device, + static_cast( image ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyImage( m_device, static_cast( image ), reinterpret_cast( pAllocator ) ); + d.vkDestroyImage( + m_device, static_cast( image ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyImage( m_device, static_cast( image ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyImage( + m_device, + static_cast( image ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyImage( m_device, static_cast( image ), reinterpret_cast( pAllocator ) ); + d.vkDestroyImageView( + m_device, static_cast( imageView ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyImage( m_device, static_cast( image ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyImageView( + m_device, + static_cast( imageView ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyImageView( m_device, static_cast( imageView ), reinterpret_cast( pAllocator ) ); + d.vkDestroyImageView( + m_device, static_cast( imageView ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyImageView( m_device, static_cast( imageView ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyImageView( + m_device, + static_cast( imageView ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyImageView( m_device, static_cast( imageView ), reinterpret_cast( pAllocator ) ); + d.vkDestroyIndirectCommandsLayoutNV( m_device, + static_cast( indirectCommandsLayout ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyImageView( m_device, static_cast( imageView ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyIndirectCommandsLayoutNV( + m_device, + static_cast( indirectCommandsLayout ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyIndirectCommandsLayoutNV( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( pAllocator ) ); + d.vkDestroyIndirectCommandsLayoutNV( m_device, + static_cast( indirectCommandsLayout ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyIndirectCommandsLayoutNV( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyIndirectCommandsLayoutNV( + m_device, + static_cast( indirectCommandsLayout ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyIndirectCommandsLayoutNV( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( pAllocator ) ); + d.vkDestroyPipeline( + m_device, static_cast( pipeline ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyIndirectCommandsLayoutNV( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyPipeline( + m_device, + static_cast( pipeline ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyPipeline( m_device, static_cast( pipeline ), reinterpret_cast( pAllocator ) ); + d.vkDestroyPipeline( + m_device, static_cast( pipeline ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyPipeline( m_device, static_cast( pipeline ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyPipeline( + m_device, + static_cast( pipeline ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyPipeline( m_device, static_cast( pipeline ), reinterpret_cast( pAllocator ) ); + d.vkDestroyPipelineCache( m_device, + static_cast( pipelineCache ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyPipeline( m_device, static_cast( pipeline ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyPipelineCache( + m_device, + static_cast( pipelineCache ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyPipelineCache( m_device, static_cast( pipelineCache ), reinterpret_cast( pAllocator ) ); + d.vkDestroyPipelineCache( m_device, + static_cast( pipelineCache ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyPipelineCache( m_device, static_cast( pipelineCache ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyPipelineCache( + m_device, + static_cast( pipelineCache ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyPipelineCache( m_device, static_cast( pipelineCache ), reinterpret_cast( pAllocator ) ); + d.vkDestroyPipelineLayout( m_device, + static_cast( pipelineLayout ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyPipelineCache( m_device, static_cast( pipelineCache ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyPipelineLayout( + m_device, + static_cast( pipelineLayout ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyPipelineLayout( m_device, static_cast( pipelineLayout ), reinterpret_cast( pAllocator ) ); + d.vkDestroyPipelineLayout( m_device, + static_cast( pipelineLayout ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyPipelineLayout( m_device, static_cast( pipelineLayout ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyPipelineLayout( + m_device, + static_cast( pipelineLayout ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyPipelineLayout( m_device, static_cast( pipelineLayout ), reinterpret_cast( pAllocator ) ); + d.vkDestroyQueryPool( + m_device, static_cast( queryPool ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyPipelineLayout( m_device, static_cast( pipelineLayout ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyQueryPool( + m_device, + static_cast( queryPool ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyQueryPool( m_device, static_cast( queryPool ), reinterpret_cast( pAllocator ) ); + d.vkDestroyQueryPool( + m_device, static_cast( queryPool ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyQueryPool( m_device, static_cast( queryPool ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyQueryPool( + m_device, + static_cast( queryPool ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyQueryPool( m_device, static_cast( queryPool ), reinterpret_cast( pAllocator ) ); + d.vkDestroyRenderPass( m_device, + static_cast( renderPass ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyQueryPool( m_device, static_cast( queryPool ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyRenderPass( + m_device, + static_cast( renderPass ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyRenderPass( m_device, static_cast( renderPass ), reinterpret_cast( pAllocator ) ); + d.vkDestroyRenderPass( m_device, + static_cast( renderPass ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyRenderPass( m_device, static_cast( renderPass ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyRenderPass( + m_device, + static_cast( renderPass ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyRenderPass( m_device, static_cast( renderPass ), reinterpret_cast( pAllocator ) ); + d.vkDestroySampler( + m_device, static_cast( sampler ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyRenderPass( m_device, static_cast( renderPass ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroySampler( + m_device, + static_cast( sampler ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySampler( m_device, static_cast( sampler ), reinterpret_cast( pAllocator ) ); + d.vkDestroySampler( + m_device, static_cast( sampler ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySampler( m_device, static_cast( sampler ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroySampler( + m_device, + static_cast( sampler ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySampler( m_device, static_cast( sampler ), reinterpret_cast( pAllocator ) ); + d.vkDestroySamplerYcbcrConversion( m_device, + static_cast( ycbcrConversion ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySampler( m_device, static_cast( sampler ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroySamplerYcbcrConversion( + m_device, + static_cast( ycbcrConversion ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySamplerYcbcrConversion( m_device, static_cast( ycbcrConversion ), reinterpret_cast( pAllocator ) ); + d.vkDestroySamplerYcbcrConversionKHR( m_device, + static_cast( ycbcrConversion ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySamplerYcbcrConversion( m_device, static_cast( ycbcrConversion ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroySamplerYcbcrConversionKHR( + m_device, + static_cast( ycbcrConversion ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySamplerYcbcrConversionKHR( m_device, static_cast( ycbcrConversion ), reinterpret_cast( pAllocator ) ); + d.vkDestroySamplerYcbcrConversion( m_device, + static_cast( ycbcrConversion ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySamplerYcbcrConversionKHR( m_device, static_cast( ycbcrConversion ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroySamplerYcbcrConversion( + m_device, + static_cast( ycbcrConversion ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySamplerYcbcrConversion( m_device, static_cast( ycbcrConversion ), reinterpret_cast( pAllocator ) ); + d.vkDestroySemaphore( + m_device, static_cast( semaphore ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySamplerYcbcrConversion( m_device, static_cast( ycbcrConversion ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroySemaphore( + m_device, + static_cast( semaphore ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySemaphore( m_device, static_cast( semaphore ), reinterpret_cast( pAllocator ) ); + d.vkDestroySemaphore( + m_device, static_cast( semaphore ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySemaphore( m_device, static_cast( semaphore ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroySemaphore( + m_device, + static_cast( semaphore ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySemaphore( m_device, static_cast( semaphore ), reinterpret_cast( pAllocator ) ); + d.vkDestroyShaderModule( m_device, + static_cast( shaderModule ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySemaphore( m_device, static_cast( semaphore ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyShaderModule( + m_device, + static_cast( shaderModule ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyShaderModule( m_device, static_cast( shaderModule ), reinterpret_cast( pAllocator ) ); + d.vkDestroyShaderModule( m_device, + static_cast( shaderModule ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyShaderModule( m_device, static_cast( shaderModule ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyShaderModule( + m_device, + static_cast( shaderModule ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyShaderModule( m_device, static_cast( shaderModule ), reinterpret_cast( pAllocator ) ); + d.vkDestroySwapchainKHR( m_device, + static_cast( swapchain ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyShaderModule( m_device, static_cast( shaderModule ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroySwapchainKHR( + m_device, + static_cast( swapchain ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySwapchainKHR( m_device, static_cast( swapchain ), reinterpret_cast( pAllocator ) ); + d.vkDestroySwapchainKHR( m_device, + static_cast( swapchain ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySwapchainKHR( m_device, static_cast( swapchain ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroySwapchainKHR( + m_device, + static_cast( swapchain ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySwapchainKHR( m_device, static_cast( swapchain ), reinterpret_cast( pAllocator ) ); + d.vkDestroyValidationCacheEXT( m_device, + static_cast( validationCache ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySwapchainKHR( m_device, static_cast( swapchain ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyValidationCacheEXT( + m_device, + static_cast( validationCache ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyValidationCacheEXT( m_device, static_cast( validationCache ), reinterpret_cast( pAllocator ) ); + d.vkDestroyValidationCacheEXT( m_device, + static_cast( validationCache ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyValidationCacheEXT( m_device, static_cast( validationCache ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyValidationCacheEXT( m_device, static_cast( validationCache ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyValidationCacheEXT( m_device, static_cast( validationCache ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyValidationCacheEXT( + m_device, + static_cast( validationCache ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::waitIdle(Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkDeviceWaitIdle( m_device ) ); } #else - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::waitIdle(Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::waitIdle( Dispatch const & d ) const { Result result = static_cast( d.vkDeviceWaitIdle( m_device ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::waitIdle" ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT* pDisplayPowerInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT * pDisplayPowerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkDisplayPowerControlEXT( m_device, static_cast( display ), reinterpret_cast( pDisplayPowerInfo ) ) ); + return static_cast( + d.vkDisplayPowerControlEXT( m_device, + static_cast( display ), + reinterpret_cast( pDisplayPowerInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::displayPowerControlEXT( + VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo, Dispatch const & d ) const { - Result result = static_cast( d.vkDisplayPowerControlEXT( m_device, static_cast( display ), reinterpret_cast( &displayPowerInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::displayPowerControlEXT" ); + Result result = static_cast( + d.vkDisplayPowerControlEXT( m_device, + static_cast( display ), + reinterpret_cast( &displayPowerInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::displayPowerControlEXT" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::flushMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange* pMemoryRanges, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Device::flushMappedMemoryRanges( uint32_t memoryRangeCount, + const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkFlushMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast( pMemoryRanges ) ) ); + return static_cast( d.vkFlushMappedMemoryRanges( + m_device, memoryRangeCount, reinterpret_cast( pMemoryRanges ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::flushMappedMemoryRanges( ArrayProxy memoryRanges, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::flushMappedMemoryRanges( ArrayProxy memoryRanges, + Dispatch const & d ) const { - Result result = static_cast( d.vkFlushMappedMemoryRanges( m_device, memoryRanges.size() , reinterpret_cast( memoryRanges.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::flushMappedMemoryRanges" ); + Result result = static_cast( d.vkFlushMappedMemoryRanges( + m_device, memoryRanges.size(), reinterpret_cast( memoryRanges.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + uint32_t commandBufferCount, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkFreeCommandBuffers( m_device, static_cast( commandPool ), commandBufferCount, reinterpret_cast( pCommandBuffers ) ); + d.vkFreeCommandBuffers( m_device, + static_cast( commandPool ), + commandBufferCount, + reinterpret_cast( pCommandBuffers ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy commandBuffers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + ArrayProxy commandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkFreeCommandBuffers( m_device, static_cast( commandPool ), commandBuffers.size() , reinterpret_cast( commandBuffers.data() ) ); + d.vkFreeCommandBuffers( m_device, + static_cast( commandPool ), + commandBuffers.size(), + reinterpret_cast( commandBuffers.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + uint32_t commandBufferCount, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkFreeCommandBuffers( m_device, static_cast( commandPool ), commandBufferCount, reinterpret_cast( pCommandBuffers ) ); + d.vkFreeCommandBuffers( m_device, + static_cast( commandPool ), + commandBufferCount, + reinterpret_cast( pCommandBuffers ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy commandBuffers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + ArrayProxy commandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkFreeCommandBuffers( m_device, static_cast( commandPool ), commandBuffers.size() , reinterpret_cast( commandBuffers.data() ) ); + d.vkFreeCommandBuffers( m_device, + static_cast( commandPool ), + commandBuffers.size(), + reinterpret_cast( commandBuffers.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkFreeDescriptorSets( m_device, static_cast( descriptorPool ), descriptorSetCount, reinterpret_cast( pDescriptorSets ) ) ); + return static_cast( + d.vkFreeDescriptorSets( m_device, + static_cast( descriptorPool ), + descriptorSetCount, + reinterpret_cast( pDescriptorSets ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy descriptorSets, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + ArrayProxy descriptorSets, + Dispatch const & d ) const { - Result result = static_cast( d.vkFreeDescriptorSets( m_device, static_cast( descriptorPool ), descriptorSets.size() , reinterpret_cast( descriptorSets.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::freeDescriptorSets" ); + Result result = static_cast( + d.vkFreeDescriptorSets( m_device, + static_cast( descriptorPool ), + descriptorSets.size(), + reinterpret_cast( descriptorSets.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::freeDescriptorSets" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkFreeDescriptorSets( m_device, static_cast( descriptorPool ), descriptorSetCount, reinterpret_cast( pDescriptorSets ) ) ); + return static_cast( + d.vkFreeDescriptorSets( m_device, + static_cast( descriptorPool ), + descriptorSetCount, + reinterpret_cast( pDescriptorSets ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy descriptorSets, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + ArrayProxy descriptorSets, + Dispatch const & d ) const { - Result result = static_cast( d.vkFreeDescriptorSets( m_device, static_cast( descriptorPool ), descriptorSets.size() , reinterpret_cast( descriptorSets.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::free" ); + Result result = static_cast( + d.vkFreeDescriptorSets( m_device, + static_cast( descriptorPool ), + descriptorSets.size(), + reinterpret_cast( descriptorSets.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::free" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkFreeMemory( m_device, static_cast( memory ), reinterpret_cast( pAllocator ) ); + d.vkFreeMemory( + m_device, static_cast( memory ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkFreeMemory( m_device, static_cast( memory ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkFreeMemory( + m_device, + static_cast( memory ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkFreeMemory( m_device, static_cast( memory ), reinterpret_cast( pAllocator ) ); + d.vkFreeMemory( + m_device, static_cast( memory ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkFreeMemory( m_device, static_cast( memory ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkFreeMemory( + m_device, + static_cast( memory ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR( + const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast( pInfo ) ) ); + return static_cast( d.vkGetAccelerationStructureDeviceAddressKHR( + m_device, reinterpret_cast( pInfo ) ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR( const AccelerationStructureDeviceAddressInfoKHR & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR( + const AccelerationStructureDeviceAddressInfoKHR & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast( &info ) ); + return d.vkGetAccelerationStructureDeviceAddressKHR( + m_device, reinterpret_cast( &info ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template - VULKAN_HPP_INLINE Result Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, size_t dataSize, void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + size_t dataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetAccelerationStructureHandleNV( m_device, static_cast( accelerationStructure ), dataSize, pData ) ); + return static_cast( d.vkGetAccelerationStructureHandleNV( + m_device, static_cast( accelerationStructure ), dataSize, pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, ArrayProxy data, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::getAccelerationStructureHandleNV( + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, ArrayProxy data, Dispatch const & d ) const { - Result result = static_cast( d.vkGetAccelerationStructureHandleNV( m_device, static_cast( accelerationStructure ), data.size() * sizeof( T ) , reinterpret_cast( data.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getAccelerationStructureHandleNV" ); + Result result = static_cast( + d.vkGetAccelerationStructureHandleNV( m_device, + static_cast( accelerationStructure ), + data.size() * sizeof( T ), + reinterpret_cast( data.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE void Device::getAccelerationStructureMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoKHR* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::getAccelerationStructureMemoryRequirementsKHR( + const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoKHR * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetAccelerationStructureMemoryRequirementsKHR( m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + d.vkGetAccelerationStructureMemoryRequirementsKHR( + m_device, + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getAccelerationStructureMemoryRequirementsKHR( const AccelerationStructureMemoryRequirementsInfoKHR & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getAccelerationStructureMemoryRequirementsKHR( const AccelerationStructureMemoryRequirementsInfoKHR & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; - d.vkGetAccelerationStructureMemoryRequirementsKHR( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + d.vkGetAccelerationStructureMemoryRequirementsKHR( + m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); return memoryRequirements; } - template - VULKAN_HPP_INLINE StructureChain Device::getAccelerationStructureMemoryRequirementsKHR( const AccelerationStructureMemoryRequirementsInfoKHR & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE StructureChain + Device::getAccelerationStructureMemoryRequirementsKHR( const AccelerationStructureMemoryRequirementsInfoKHR & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::MemoryRequirements2& memoryRequirements = structureChain.template get(); - d.vkGetAccelerationStructureMemoryRequirementsKHR( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = + structureChain.template get(); + d.vkGetAccelerationStructureMemoryRequirementsKHR( + m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); return structureChain; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template - VULKAN_HPP_INLINE void Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::getAccelerationStructureMemoryRequirementsNV( + const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + d.vkGetAccelerationStructureMemoryRequirementsNV( + m_device, + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR Device::getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR + Device::getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR memoryRequirements; - d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + d.vkGetAccelerationStructureMemoryRequirementsNV( + m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); return memoryRequirements; } - template - VULKAN_HPP_INLINE StructureChain Device::getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE StructureChain + Device::getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR& memoryRequirements = structureChain.template get(); - d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR & memoryRequirements = + structureChain.template get(); + d.vkGetAccelerationStructureMemoryRequirementsNV( + m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); return structureChain; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_USE_PLATFORM_ANDROID_KHR - template - VULKAN_HPP_INLINE Result Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer* buffer, VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::getAndroidHardwareBufferPropertiesANDROID( + const struct AHardwareBuffer * buffer, + VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast( pProperties ) ) ); + return static_cast( d.vkGetAndroidHardwareBufferPropertiesANDROID( + m_device, buffer, reinterpret_cast( pProperties ) ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d ) const +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID properties; - Result result = static_cast( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast( &properties ) ) ); - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::Device::getAndroidHardwareBufferPropertiesANDROID" ); + Result result = static_cast( d.vkGetAndroidHardwareBufferPropertiesANDROID( + m_device, buffer, reinterpret_cast( &properties ) ) ); + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID& properties = structureChain.template get(); - Result result = static_cast( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast( &properties ) ) ); - return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::Device::getAndroidHardwareBufferPropertiesANDROID" ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID & properties = + structureChain.template get(); + Result result = static_cast( d.vkGetAndroidHardwareBufferPropertiesANDROID( + m_device, buffer, reinterpret_cast( &properties ) ) ); + return createResultValue( + result, structureChain, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - template - VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetBufferDeviceAddress( m_device, reinterpret_cast( pInfo ) ) ); + return static_cast( + d.vkGetBufferDeviceAddress( m_device, reinterpret_cast( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddress( const BufferDeviceAddressInfo & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddress( const BufferDeviceAddressInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return d.vkGetBufferDeviceAddress( m_device, reinterpret_cast( &info ) ); + return d.vkGetBufferDeviceAddress( m_device, reinterpret_cast( &info ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( + const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast( pInfo ) ) ); + return static_cast( + d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( const BufferDeviceAddressInfo & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( const BufferDeviceAddressInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast( &info ) ); + return d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast( &info ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR( + const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast( pInfo ) ) ); + return static_cast( + d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR( const BufferDeviceAddressInfo & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR( const BufferDeviceAddressInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast( &info ) ); + return d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast( &info ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::MemoryRequirements* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetBufferMemoryRequirements( m_device, static_cast( buffer ), reinterpret_cast( pMemoryRequirements ) ); + d.vkGetBufferMemoryRequirements( + m_device, static_cast( buffer ), reinterpret_cast( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements + Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements; - d.vkGetBufferMemoryRequirements( m_device, static_cast( buffer ), reinterpret_cast( &memoryRequirements ) ); + d.vkGetBufferMemoryRequirements( + m_device, static_cast( buffer ), reinterpret_cast( &memoryRequirements ) ); return memoryRequirements; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + d.vkGetBufferMemoryRequirements2( m_device, + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; - d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + d.vkGetBufferMemoryRequirements2( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); return memoryRequirements; } - template - VULKAN_HPP_INLINE StructureChain Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE StructureChain + Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::MemoryRequirements2& memoryRequirements = structureChain.template get(); - d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = + structureChain.template get(); + d.vkGetBufferMemoryRequirements2( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); return structureChain; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetBufferMemoryRequirements2KHR( m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + d.vkGetBufferMemoryRequirements2KHR( m_device, + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; - d.vkGetBufferMemoryRequirements2KHR( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + d.vkGetBufferMemoryRequirements2KHR( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); return memoryRequirements; } - template - VULKAN_HPP_INLINE StructureChain Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE StructureChain + Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::MemoryRequirements2& memoryRequirements = structureChain.template get(); - d.vkGetBufferMemoryRequirements2KHR( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = + structureChain.template get(); + d.vkGetBufferMemoryRequirements2KHR( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); return structureChain; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( + const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast( pInfo ) ); + return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast( pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const BufferDeviceAddressInfo & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const BufferDeviceAddressInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast( &info ) ); + return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast( &info ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( + const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return d.vkGetBufferOpaqueCaptureAddressKHR( m_device, reinterpret_cast( pInfo ) ); + return d.vkGetBufferOpaqueCaptureAddressKHR( m_device, + reinterpret_cast( pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const BufferDeviceAddressInfo & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const BufferDeviceAddressInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return d.vkGetBufferOpaqueCaptureAddressKHR( m_device, reinterpret_cast( &info ) ); + return d.vkGetBufferOpaqueCaptureAddressKHR( m_device, + reinterpret_cast( &info ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::getCalibratedTimestampsEXT( uint32_t timestampCount, const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Device::getCalibratedTimestampsEXT( uint32_t timestampCount, + const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT * pTimestampInfos, + uint64_t * pTimestamps, + uint64_t * pMaxDeviation, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetCalibratedTimestampsEXT( m_device, timestampCount, reinterpret_cast( pTimestampInfos ), pTimestamps, pMaxDeviation ) ); + return static_cast( + d.vkGetCalibratedTimestampsEXT( m_device, + timestampCount, + reinterpret_cast( pTimestampInfos ), + pTimestamps, + pMaxDeviation ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getCalibratedTimestampsEXT( ArrayProxy timestampInfos, ArrayProxy timestamps, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::getCalibratedTimestampsEXT( + ArrayProxy timestampInfos, + ArrayProxy timestamps, + Dispatch const & d ) const { -#ifdef VULKAN_HPP_NO_EXCEPTIONS +# ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_ASSERT( timestampInfos.size() == timestamps.size() ); -#else +# else if ( timestampInfos.size() != timestamps.size() ) { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkDevice::getCalibratedTimestampsEXT: timestampInfos.size() != timestamps.size()" ); + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::VkDevice::getCalibratedTimestampsEXT: timestampInfos.size() != timestamps.size()" ); } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ uint64_t maxDeviation; - Result result = static_cast( d.vkGetCalibratedTimestampsEXT( m_device, timestampInfos.size() , reinterpret_cast( timestampInfos.data() ), timestamps.data(), &maxDeviation ) ); - return createResultValue( result, maxDeviation, VULKAN_HPP_NAMESPACE_STRING"::Device::getCalibratedTimestampsEXT" ); + Result result = static_cast( + d.vkGetCalibratedTimestampsEXT( m_device, + timestampInfos.size(), + reinterpret_cast( timestampInfos.data() ), + timestamps.data(), + &maxDeviation ) ); + return createResultValue( + result, maxDeviation, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_ENABLE_BETA_EXTENSIONS -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE uint32_t Device::getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d) const VULKAN_HPP_NOEXCEPT +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE uint32_t Device::getDeferredOperationMaxConcurrencyKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { return d.vkGetDeferredOperationMaxConcurrencyKHR( m_device, static_cast( operation ) ); } -#else - template - VULKAN_HPP_INLINE uint32_t Device::getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT +# else + template + VULKAN_HPP_INLINE uint32_t Device::getDeferredOperationMaxConcurrencyKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { return d.vkGetDeferredOperationMaxConcurrencyKHR( m_device, static_cast( operation ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_ENABLE_BETA_EXTENSIONS -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d) const VULKAN_HPP_NOEXCEPT +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetDeferredOperationResultKHR( m_device, static_cast( operation ) ) ); + return static_cast( + d.vkGetDeferredOperationResultKHR( m_device, static_cast( operation ) ) ); } -#else - template - VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d ) const +# else + template + VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d ) const { - Result result = static_cast( d.vkGetDeferredOperationResultKHR( m_device, static_cast( operation ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getDeferredOperationResultKHR", { Result::eSuccess, Result::eNotReady } ); + Result result = static_cast( + d.vkGetDeferredOperationResultKHR( m_device, static_cast( operation ) ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::getDeferredOperationResultKHR", + { Result::eSuccess, Result::eNotReady } ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template - VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport* pSupport, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pSupport ) ); + d.vkGetDescriptorSetLayoutSupport( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pSupport ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport + Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support; - d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &support ) ); + d.vkGetDescriptorSetLayoutSupport( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( &support ) ); return support; } - template - VULKAN_HPP_INLINE StructureChain Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE StructureChain + Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport& support = structureChain.template get(); - d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &support ) ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = + structureChain.template get(); + d.vkGetDescriptorSetLayoutSupport( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( &support ) ); return structureChain; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport* pSupport, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetDescriptorSetLayoutSupportKHR( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pSupport ) ); + d.vkGetDescriptorSetLayoutSupportKHR( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pSupport ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport + Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support; - d.vkGetDescriptorSetLayoutSupportKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &support ) ); + d.vkGetDescriptorSetLayoutSupportKHR( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( &support ) ); return support; } - template - VULKAN_HPP_INLINE StructureChain Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE StructureChain + Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport& support = structureChain.template get(); - d.vkGetDescriptorSetLayoutSupportKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &support ) ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = + structureChain.template get(); + d.vkGetDescriptorSetLayoutSupportKHR( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( &support ) ); return structureChain; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE Result Device::getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionKHR* version, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::getAccelerationStructureCompatibilityKHR( + const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionKHR * version, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device, reinterpret_cast( version ) ) ); + return static_cast( d.vkGetDeviceAccelerationStructureCompatibilityKHR( + m_device, reinterpret_cast( version ) ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getAccelerationStructureCompatibilityKHR( const AccelerationStructureVersionKHR & version, Dispatch const &d ) const +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::getAccelerationStructureCompatibilityKHR( const AccelerationStructureVersionKHR & version, + Dispatch const & d ) const { - Result result = static_cast( d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device, reinterpret_cast( &version ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getAccelerationStructureCompatibilityKHR" ); + Result result = static_cast( d.vkGetDeviceAccelerationStructureCompatibilityKHR( + m_device, reinterpret_cast( &version ) ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureCompatibilityKHR" ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template - VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetDeviceGroupPeerMemoryFeatures( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( pPeerMemoryFeatures ) ); + d.vkGetDeviceGroupPeerMemoryFeatures( m_device, + heapIndex, + localDeviceIndex, + remoteDeviceIndex, + reinterpret_cast( pPeerMemoryFeatures ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags + Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures; - d.vkGetDeviceGroupPeerMemoryFeatures( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( &peerMemoryFeatures ) ); + d.vkGetDeviceGroupPeerMemoryFeatures( m_device, + heapIndex, + localDeviceIndex, + remoteDeviceIndex, + reinterpret_cast( &peerMemoryFeatures ) ); return peerMemoryFeatures; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetDeviceGroupPeerMemoryFeaturesKHR( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( pPeerMemoryFeatures ) ); + d.vkGetDeviceGroupPeerMemoryFeaturesKHR( m_device, + heapIndex, + localDeviceIndex, + remoteDeviceIndex, + reinterpret_cast( pPeerMemoryFeatures ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags + Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures; - d.vkGetDeviceGroupPeerMemoryFeaturesKHR( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( &peerMemoryFeatures ) ); + d.vkGetDeviceGroupPeerMemoryFeaturesKHR( m_device, + heapIndex, + localDeviceIndex, + remoteDeviceIndex, + reinterpret_cast( &peerMemoryFeatures ) ); return peerMemoryFeatures; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::getGroupPresentCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::getGroupPresentCapabilitiesKHR( + VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast( pDeviceGroupPresentCapabilities ) ) ); + return static_cast( d.vkGetDeviceGroupPresentCapabilitiesKHR( + m_device, reinterpret_cast( pDeviceGroupPresentCapabilities ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getGroupPresentCapabilitiesKHR(Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::getGroupPresentCapabilitiesKHR( Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities; - Result result = static_cast( d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast( &deviceGroupPresentCapabilities ) ) ); - return createResultValue( result, deviceGroupPresentCapabilities, VULKAN_HPP_NAMESPACE_STRING"::Device::getGroupPresentCapabilitiesKHR" ); + Result result = static_cast( d.vkGetDeviceGroupPresentCapabilitiesKHR( + m_device, reinterpret_cast( &deviceGroupPresentCapabilities ) ) ); + return createResultValue( + result, deviceGroupPresentCapabilities, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupPresentCapabilitiesKHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_USE_PLATFORM_WIN32_KHR - template - VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetDeviceGroupSurfacePresentModes2EXT( m_device, reinterpret_cast( pSurfaceInfo ), reinterpret_cast( pModes ) ) ); + return static_cast( d.vkGetDeviceGroupSurfacePresentModes2EXT( + m_device, + reinterpret_cast( pSurfaceInfo ), + reinterpret_cast( pModes ) ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getGroupSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::getGroupSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; - Result result = static_cast( d.vkGetDeviceGroupSurfacePresentModes2EXT( m_device, reinterpret_cast( &surfaceInfo ), reinterpret_cast( &modes ) ) ); - return createResultValue( result, modes, VULKAN_HPP_NAMESPACE_STRING"::Device::getGroupSurfacePresentModes2EXT" ); + Result result = static_cast( d.vkGetDeviceGroupSurfacePresentModes2EXT( + m_device, + reinterpret_cast( &surfaceInfo ), + reinterpret_cast( &modes ) ) ); + return createResultValue( result, modes, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModes2EXT" ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template - VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, static_cast( surface ), reinterpret_cast( pModes ) ) ); + return static_cast( + d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, + static_cast( surface ), + reinterpret_cast( pModes ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; - Result result = static_cast( d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, static_cast( surface ), reinterpret_cast( &modes ) ) ); - return createResultValue( result, modes, VULKAN_HPP_NAMESPACE_STRING"::Device::getGroupSurfacePresentModesKHR" ); + Result result = static_cast( + d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, + static_cast( surface ), + reinterpret_cast( &modes ) ) ); + return createResultValue( result, modes, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModesKHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize* pCommittedMemoryInBytes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize * pCommittedMemoryInBytes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetDeviceMemoryCommitment( m_device, static_cast( memory ), reinterpret_cast( pCommittedMemoryInBytes ) ); + d.vkGetDeviceMemoryCommitment( + m_device, static_cast( memory ), reinterpret_cast( pCommittedMemoryInBytes ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize + Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_NAMESPACE::DeviceSize committedMemoryInBytes; - d.vkGetDeviceMemoryCommitment( m_device, static_cast( memory ), reinterpret_cast( &committedMemoryInBytes ) ); + d.vkGetDeviceMemoryCommitment( + m_device, static_cast( memory ), reinterpret_cast( &committedMemoryInBytes ) ); return committedMemoryInBytes; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE uint64_t + Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast( pInfo ) ); + return d.vkGetDeviceMemoryOpaqueCaptureAddress( + m_device, reinterpret_cast( pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const DeviceMemoryOpaqueCaptureAddressInfo & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const DeviceMemoryOpaqueCaptureAddressInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast( &info ) ); + return d.vkGetDeviceMemoryOpaqueCaptureAddress( + m_device, reinterpret_cast( &info ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE uint64_t + Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( m_device, reinterpret_cast( pInfo ) ); + return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( + m_device, reinterpret_cast( pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( const DeviceMemoryOpaqueCaptureAddressInfo & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( + const DeviceMemoryOpaqueCaptureAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( m_device, reinterpret_cast( &info ) ); + return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( + m_device, reinterpret_cast( &info ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const char* pName, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const char * pName, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { return d.vkGetDeviceProcAddr( m_device, pName ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const std::string & name, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const std::string & name, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { return d.vkGetDeviceProcAddr( m_device, name.c_str() ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, VULKAN_HPP_NAMESPACE::Queue* pQueue, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::getQueue( uint32_t queueFamilyIndex, + uint32_t queueIndex, + VULKAN_HPP_NAMESPACE::Queue * pQueue, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast( pQueue ) ); + d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast( pQueue ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue + Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_NAMESPACE::Queue queue; - d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast( &queue ) ); + d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast( &queue ) ); return queue; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2* pQueueInfo, VULKAN_HPP_NAMESPACE::Queue* pQueue, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 * pQueueInfo, + VULKAN_HPP_NAMESPACE::Queue * pQueue, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetDeviceQueue2( m_device, reinterpret_cast( pQueueInfo ), reinterpret_cast( pQueue ) ); + d.vkGetDeviceQueue2( + m_device, reinterpret_cast( pQueueInfo ), reinterpret_cast( pQueue ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue Device::getQueue2( const DeviceQueueInfo2 & queueInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue Device::getQueue2( const DeviceQueueInfo2 & queueInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_NAMESPACE::Queue queue; - d.vkGetDeviceQueue2( m_device, reinterpret_cast( &queueInfo ), reinterpret_cast( &queue ) ); + d.vkGetDeviceQueue2( + m_device, reinterpret_cast( &queueInfo ), reinterpret_cast( &queue ) ); return queue; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetEventStatus( m_device, static_cast( event ) ) ); } #else - template - VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d ) const + template + VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const { Result result = static_cast( d.vkGetEventStatus( m_device, static_cast( event ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getEventStatus", { Result::eEventSet, Result::eEventReset } ); + return createResultValue( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEventStatus", { Result::eEventSet, Result::eEventReset } ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR * pGetFdInfo, + int * pFd, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetFenceFdKHR( m_device, reinterpret_cast( pGetFdInfo ), pFd ) ); + return static_cast( + d.vkGetFenceFdKHR( m_device, reinterpret_cast( pGetFdInfo ), pFd ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, + Dispatch const & d ) const { - int fd; - Result result = static_cast( d.vkGetFenceFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); - return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING"::Device::getFenceFdKHR" ); + int fd; + Result result = static_cast( + d.vkGetFenceFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); + return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceFdKHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetFenceStatus( m_device, static_cast( fence ) ) ); } #else - template - VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d ) const + template + VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const { Result result = static_cast( d.vkGetFenceStatus( m_device, static_cast( fence ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getFenceStatus", { Result::eSuccess, Result::eNotReady } ); + return createResultValue( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceStatus", { Result::eSuccess, Result::eNotReady } ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_USE_PLATFORM_WIN32_KHR - template - VULKAN_HPP_INLINE Result Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); + return static_cast( d.vkGetFenceWin32HandleKHR( + m_device, reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const { HANDLE handle; - Result result = static_cast( d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); - return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING"::Device::getFenceWin32HandleKHR" ); + Result result = static_cast( d.vkGetFenceWin32HandleKHR( + m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); + return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceWin32HandleKHR" ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template - VULKAN_HPP_INLINE void Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::getGeneratedCommandsMemoryRequirementsNV( + const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + d.vkGetGeneratedCommandsMemoryRequirementsNV( + m_device, + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; - d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + d.vkGetGeneratedCommandsMemoryRequirementsNV( + m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); return memoryRequirements; } - template - VULKAN_HPP_INLINE StructureChain Device::getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE StructureChain + Device::getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::MemoryRequirements2& memoryRequirements = structureChain.template get(); - d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = + structureChain.template get(); + d.vkGetGeneratedCommandsMemoryRequirementsNV( + m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); return structureChain; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::getImageDrmFormatModifierPropertiesEXT( + VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetImageDrmFormatModifierPropertiesEXT( m_device, static_cast( image ), reinterpret_cast( pProperties ) ) ); + return static_cast( d.vkGetImageDrmFormatModifierPropertiesEXT( + m_device, + static_cast( image ), + reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT properties; - Result result = static_cast( d.vkGetImageDrmFormatModifierPropertiesEXT( m_device, static_cast( image ), reinterpret_cast( &properties ) ) ); - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::Device::getImageDrmFormatModifierPropertiesEXT" ); + Result result = static_cast( d.vkGetImageDrmFormatModifierPropertiesEXT( + m_device, + static_cast( image ), + reinterpret_cast( &properties ) ) ); + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageDrmFormatModifierPropertiesEXT" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::MemoryRequirements* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetImageMemoryRequirements( m_device, static_cast( image ), reinterpret_cast( pMemoryRequirements ) ); + d.vkGetImageMemoryRequirements( + m_device, static_cast( image ), reinterpret_cast( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements + Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements; - d.vkGetImageMemoryRequirements( m_device, static_cast( image ), reinterpret_cast( &memoryRequirements ) ); + d.vkGetImageMemoryRequirements( + m_device, static_cast( image ), reinterpret_cast( &memoryRequirements ) ); return memoryRequirements; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + d.vkGetImageMemoryRequirements2( m_device, + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; - d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + d.vkGetImageMemoryRequirements2( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); return memoryRequirements; } - template - VULKAN_HPP_INLINE StructureChain Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE StructureChain + Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::MemoryRequirements2& memoryRequirements = structureChain.template get(); - d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = + structureChain.template get(); + d.vkGetImageMemoryRequirements2( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); return structureChain; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetImageMemoryRequirements2KHR( m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + d.vkGetImageMemoryRequirements2KHR( m_device, + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; - d.vkGetImageMemoryRequirements2KHR( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + d.vkGetImageMemoryRequirements2KHR( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); return memoryRequirements; } - template - VULKAN_HPP_INLINE StructureChain Device::getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE StructureChain + Device::getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::MemoryRequirements2& memoryRequirements = structureChain.template get(); - d.vkGetImageMemoryRequirements2KHR( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = + structureChain.template get(); + d.vkGetImageMemoryRequirements2KHR( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); return structureChain; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements* pSparseMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( + VULKAN_HPP_NAMESPACE::Image image, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements * pSparseMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetImageSparseMemoryRequirements( m_device, static_cast( image ), pSparseMemoryRequirementCount, reinterpret_cast( pSparseMemoryRequirements ) ); + d.vkGetImageSparseMemoryRequirements( + m_device, + static_cast( image ), + pSparseMemoryRequirementCount, + reinterpret_cast( pSparseMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d ) const + template + VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const { - std::vector sparseMemoryRequirements; - uint32_t sparseMemoryRequirementCount; - d.vkGetImageSparseMemoryRequirements( m_device, static_cast( image ), &sparseMemoryRequirementCount, nullptr ); + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements( + m_device, static_cast( image ), &sparseMemoryRequirementCount, nullptr ); sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); - d.vkGetImageSparseMemoryRequirements( m_device, static_cast( image ), &sparseMemoryRequirementCount, reinterpret_cast( sparseMemoryRequirements.data() ) ); + d.vkGetImageSparseMemoryRequirements( + m_device, + static_cast( image ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); return sparseMemoryRequirements; } - template - VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements( + VULKAN_HPP_NAMESPACE::Image image, Allocator const & vectorAllocator, Dispatch const & d ) const { - std::vector sparseMemoryRequirements( vectorAllocator ); - uint32_t sparseMemoryRequirementCount; - d.vkGetImageSparseMemoryRequirements( m_device, static_cast( image ), &sparseMemoryRequirementCount, nullptr ); + std::vector sparseMemoryRequirements( vectorAllocator ); + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements( + m_device, static_cast( image ), &sparseMemoryRequirementCount, nullptr ); sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); - d.vkGetImageSparseMemoryRequirements( m_device, static_cast( image ), &sparseMemoryRequirementCount, reinterpret_cast( sparseMemoryRequirements.data() ) ); + d.vkGetImageSparseMemoryRequirements( + m_device, + static_cast( image ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); return sparseMemoryRequirements; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2( + const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast( pSparseMemoryRequirements ) ); + d.vkGetImageSparseMemoryRequirements2( + m_device, + reinterpret_cast( pInfo ), + pSparseMemoryRequirementCount, + reinterpret_cast( pSparseMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d ) const + template + VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, + Dispatch const & d ) const { - std::vector sparseMemoryRequirements; - uint32_t sparseMemoryRequirementCount; - d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements2( m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + nullptr ); sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); - d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, reinterpret_cast( sparseMemoryRequirements.data() ) ); + d.vkGetImageSparseMemoryRequirements2( + m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); return sparseMemoryRequirements; } - template - VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements2( + const ImageSparseMemoryRequirementsInfo2 & info, Allocator const & vectorAllocator, Dispatch const & d ) const { - std::vector sparseMemoryRequirements( vectorAllocator ); - uint32_t sparseMemoryRequirementCount; - d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + std::vector sparseMemoryRequirements( vectorAllocator ); + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements2( m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + nullptr ); sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); - d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, reinterpret_cast( sparseMemoryRequirements.data() ) ); + d.vkGetImageSparseMemoryRequirements2( + m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); return sparseMemoryRequirements; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2KHR( + const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast( pSparseMemoryRequirements ) ); + d.vkGetImageSparseMemoryRequirements2KHR( + m_device, + reinterpret_cast( pInfo ), + pSparseMemoryRequirementCount, + reinterpret_cast( pSparseMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d ) const + template + VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, + Dispatch const & d ) const { - std::vector sparseMemoryRequirements; - uint32_t sparseMemoryRequirementCount; - d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements2KHR( m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + nullptr ); sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); - d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, reinterpret_cast( sparseMemoryRequirements.data() ) ); + d.vkGetImageSparseMemoryRequirements2KHR( + m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); return sparseMemoryRequirements; } - template - VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements2KHR( + const ImageSparseMemoryRequirementsInfo2 & info, Allocator const & vectorAllocator, Dispatch const & d ) const { - std::vector sparseMemoryRequirements( vectorAllocator ); - uint32_t sparseMemoryRequirementCount; - d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + std::vector sparseMemoryRequirements( vectorAllocator ); + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements2KHR( m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + nullptr ); sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); - d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, reinterpret_cast( sparseMemoryRequirements.data() ) ); + d.vkGetImageSparseMemoryRequirements2KHR( + m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); return sparseMemoryRequirements; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource* pSubresource, VULKAN_HPP_NAMESPACE::SubresourceLayout* pLayout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource * pSubresource, + VULKAN_HPP_NAMESPACE::SubresourceLayout * pLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetImageSubresourceLayout( m_device, static_cast( image ), reinterpret_cast( pSubresource ), reinterpret_cast( pLayout ) ); + d.vkGetImageSubresourceLayout( m_device, + static_cast( image ), + reinterpret_cast( pSubresource ), + reinterpret_cast( pLayout ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const ImageSubresource & subresource, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout + Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, + const ImageSubresource & subresource, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_NAMESPACE::SubresourceLayout layout; - d.vkGetImageSubresourceLayout( m_device, static_cast( image ), reinterpret_cast( &subresource ), reinterpret_cast( &layout ) ); + d.vkGetImageSubresourceLayout( m_device, + static_cast( image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); return layout; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetImageViewAddressNVX( m_device, static_cast( imageView ), reinterpret_cast( pProperties ) ) ); + return static_cast( + d.vkGetImageViewAddressNVX( m_device, + static_cast( imageView ), + reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX properties; - Result result = static_cast( d.vkGetImageViewAddressNVX( m_device, static_cast( imageView ), reinterpret_cast( &properties ) ) ); - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::Device::getImageViewAddressNVX" ); + Result result = static_cast( + d.vkGetImageViewAddressNVX( m_device, + static_cast( imageView ), + reinterpret_cast( &properties ) ) ); + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewAddressNVX" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast( pInfo ) ); + return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast( pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const ImageViewHandleInfoNVX & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const ImageViewHandleInfoNVX & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast( &info ) ); + return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast( &info ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_USE_PLATFORM_ANDROID_KHR - template - VULKAN_HPP_INLINE Result Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::getMemoryAndroidHardwareBufferANDROID( + const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID * pInfo, + struct AHardwareBuffer ** pBuffer, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast( pInfo ), pBuffer ) ); + return static_cast( d.vkGetMemoryAndroidHardwareBufferANDROID( + m_device, reinterpret_cast( pInfo ), pBuffer ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const &d ) const +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info, + Dispatch const & d ) const { - struct AHardwareBuffer* buffer; - Result result = static_cast( d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast( &info ), &buffer ) ); - return createResultValue( result, buffer, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryAndroidHardwareBufferANDROID" ); + struct AHardwareBuffer * buffer; + Result result = static_cast( d.vkGetMemoryAndroidHardwareBufferANDROID( + m_device, reinterpret_cast( &info ), &buffer ) ); + return createResultValue( + result, buffer, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryAndroidHardwareBufferANDROID" ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - template - VULKAN_HPP_INLINE Result Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR * pGetFdInfo, + int * pFd, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetMemoryFdKHR( m_device, reinterpret_cast( pGetFdInfo ), pFd ) ); + return static_cast( + d.vkGetMemoryFdKHR( m_device, reinterpret_cast( pGetFdInfo ), pFd ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, + Dispatch const & d ) const { - int fd; - Result result = static_cast( d.vkGetMemoryFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); - return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryFdKHR" ); + int fd; + Result result = static_cast( + d.vkGetMemoryFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); + return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdKHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR* pMemoryFdProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + int fd, + VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR * pMemoryFdProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetMemoryFdPropertiesKHR( m_device, static_cast( handleType ), fd, reinterpret_cast( pMemoryFdProperties ) ) ); + return static_cast( + d.vkGetMemoryFdPropertiesKHR( m_device, + static_cast( handleType ), + fd, + reinterpret_cast( pMemoryFdProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + int fd, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR memoryFdProperties; - Result result = static_cast( d.vkGetMemoryFdPropertiesKHR( m_device, static_cast( handleType ), fd, reinterpret_cast( &memoryFdProperties ) ) ); - return createResultValue( result, memoryFdProperties, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryFdPropertiesKHR" ); + Result result = static_cast( + d.vkGetMemoryFdPropertiesKHR( m_device, + static_cast( handleType ), + fd, + reinterpret_cast( &memoryFdProperties ) ) ); + return createResultValue( + result, memoryFdProperties, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdPropertiesKHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::getMemoryHostPointerPropertiesEXT( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + const void * pHostPointer, + VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetMemoryHostPointerPropertiesEXT( m_device, static_cast( handleType ), pHostPointer, reinterpret_cast( pMemoryHostPointerProperties ) ) ); + return static_cast( d.vkGetMemoryHostPointerPropertiesEXT( + m_device, + static_cast( handleType ), + pHostPointer, + reinterpret_cast( pMemoryHostPointerProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + const void * pHostPointer, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT memoryHostPointerProperties; - Result result = static_cast( d.vkGetMemoryHostPointerPropertiesEXT( m_device, static_cast( handleType ), pHostPointer, reinterpret_cast( &memoryHostPointerProperties ) ) ); - return createResultValue( result, memoryHostPointerProperties, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryHostPointerPropertiesEXT" ); + Result result = static_cast( d.vkGetMemoryHostPointerPropertiesEXT( + m_device, + static_cast( handleType ), + pHostPointer, + reinterpret_cast( &memoryHostPointerProperties ) ) ); + return createResultValue( + result, memoryHostPointerProperties, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryHostPointerPropertiesEXT" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_USE_PLATFORM_WIN32_KHR - template - VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); + return static_cast( d.vkGetMemoryWin32HandleKHR( + m_device, reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const { HANDLE handle; - Result result = static_cast( d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); - return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryWin32HandleKHR" ); + Result result = static_cast( d.vkGetMemoryWin32HandleKHR( + m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); + return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleKHR" ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ #ifdef VK_USE_PLATFORM_WIN32_KHR - template - VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, + HANDLE * pHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetMemoryWin32HandleNV( m_device, static_cast( memory ), static_cast( handleType ), pHandle ) ); + return static_cast( + d.vkGetMemoryWin32HandleNV( m_device, + static_cast( memory ), + static_cast( handleType ), + pHandle ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const &d ) const +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, + Dispatch const & d ) const { HANDLE handle; - Result result = static_cast( d.vkGetMemoryWin32HandleNV( m_device, static_cast( memory ), static_cast( handleType ), &handle ) ); - return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryWin32HandleNV" ); + Result result = + static_cast( d.vkGetMemoryWin32HandleNV( m_device, + static_cast( memory ), + static_cast( handleType ), + &handle ) ); + return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleNV" ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ #ifdef VK_USE_PLATFORM_WIN32_KHR - template - VULKAN_HPP_INLINE Result Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::getMemoryWin32HandlePropertiesKHR( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + HANDLE handle, + VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetMemoryWin32HandlePropertiesKHR( m_device, static_cast( handleType ), handle, reinterpret_cast( pMemoryWin32HandleProperties ) ) ); + return static_cast( d.vkGetMemoryWin32HandlePropertiesKHR( + m_device, + static_cast( handleType ), + handle, + reinterpret_cast( pMemoryWin32HandleProperties ) ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const &d ) const +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + HANDLE handle, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties; - Result result = static_cast( d.vkGetMemoryWin32HandlePropertiesKHR( m_device, static_cast( handleType ), handle, reinterpret_cast( &memoryWin32HandleProperties ) ) ); - return createResultValue( result, memoryWin32HandleProperties, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryWin32HandlePropertiesKHR" ); + Result result = static_cast( d.vkGetMemoryWin32HandlePropertiesKHR( + m_device, + static_cast( handleType ), + handle, + reinterpret_cast( &memoryWin32HandleProperties ) ) ); + return createResultValue( + result, memoryWin32HandleProperties, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandlePropertiesKHR" ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template - VULKAN_HPP_INLINE Result Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE* pPresentationTimings, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint32_t * pPresentationTimingCount, + VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE * pPresentationTimings, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast( swapchain ), pPresentationTimingCount, reinterpret_cast( pPresentationTimings ) ) ); + return static_cast( d.vkGetPastPresentationTimingGOOGLE( + m_device, + static_cast( swapchain ), + pPresentationTimingCount, + reinterpret_cast( pPresentationTimings ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const { - std::vector presentationTimings; - uint32_t presentationTimingCount; - Result result; + std::vector presentationTimings; + uint32_t presentationTimingCount; + Result result; do { - result = static_cast( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast( swapchain ), &presentationTimingCount, nullptr ) ); + result = static_cast( d.vkGetPastPresentationTimingGOOGLE( + m_device, static_cast( swapchain ), &presentationTimingCount, nullptr ) ); if ( ( result == Result::eSuccess ) && presentationTimingCount ) { presentationTimings.resize( presentationTimingCount ); - result = static_cast( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast( swapchain ), &presentationTimingCount, reinterpret_cast( presentationTimings.data() ) ) ); + result = static_cast( d.vkGetPastPresentationTimingGOOGLE( + m_device, + static_cast( swapchain ), + &presentationTimingCount, + reinterpret_cast( presentationTimings.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -71799,21 +82978,30 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); presentationTimings.resize( presentationTimingCount ); } - return createResultValue( result, presentationTimings, VULKAN_HPP_NAMESPACE_STRING"::Device::getPastPresentationTimingGOOGLE" ); + return createResultValue( + result, presentationTimings, VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Allocator const & vectorAllocator, + Dispatch const & d ) const { - std::vector presentationTimings( vectorAllocator ); - uint32_t presentationTimingCount; - Result result; + std::vector presentationTimings( vectorAllocator ); + uint32_t presentationTimingCount; + Result result; do { - result = static_cast( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast( swapchain ), &presentationTimingCount, nullptr ) ); + result = static_cast( d.vkGetPastPresentationTimingGOOGLE( + m_device, static_cast( swapchain ), &presentationTimingCount, nullptr ) ); if ( ( result == Result::eSuccess ) && presentationTimingCount ) { presentationTimings.resize( presentationTimingCount ); - result = static_cast( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast( swapchain ), &presentationTimingCount, reinterpret_cast( presentationTimings.data() ) ) ); + result = static_cast( d.vkGetPastPresentationTimingGOOGLE( + m_device, + static_cast( swapchain ), + &presentationTimingCount, + reinterpret_cast( presentationTimings.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -71821,44 +83009,65 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); presentationTimings.resize( presentationTimingCount ); } - return createResultValue( result, presentationTimings, VULKAN_HPP_NAMESPACE_STRING"::Device::getPastPresentationTimingGOOGLE" ); + return createResultValue( + result, presentationTimings, VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, VULKAN_HPP_NAMESPACE::PerformanceValueINTEL* pValue, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, + VULKAN_HPP_NAMESPACE::PerformanceValueINTEL * pValue, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetPerformanceParameterINTEL( m_device, static_cast( parameter ), reinterpret_cast( pValue ) ) ); + return static_cast( + d.vkGetPerformanceParameterINTEL( m_device, + static_cast( parameter ), + reinterpret_cast( pValue ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::PerformanceValueINTEL value; - Result result = static_cast( d.vkGetPerformanceParameterINTEL( m_device, static_cast( parameter ), reinterpret_cast( &value ) ) ); - return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING"::Device::getPerformanceParameterINTEL" ); + Result result = + static_cast( d.vkGetPerformanceParameterINTEL( m_device, + static_cast( parameter ), + reinterpret_cast( &value ) ) ); + return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING "::Device::getPerformanceParameterINTEL" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, size_t* pDataSize, void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + size_t * pDataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), pDataSize, pData ) ); + return static_cast( + d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), pDataSize, pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const & d ) const { - std::vector data; - size_t dataSize; - Result result; + std::vector data; + size_t dataSize; + Result result; do { - result = static_cast( d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, nullptr ) ); + result = static_cast( + d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, nullptr ) ); if ( ( result == Result::eSuccess ) && dataSize ) { data.resize( dataSize ); - result = static_cast( d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, reinterpret_cast( data.data() ) ) ); + result = static_cast( d.vkGetPipelineCacheData( m_device, + static_cast( pipelineCache ), + &dataSize, + reinterpret_cast( data.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -71866,21 +83075,26 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( dataSize <= data.size() ); data.resize( dataSize ); } - return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineCacheData" ); + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineCacheData( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Allocator const & vectorAllocator, Dispatch const & d ) const { - std::vector data( vectorAllocator ); - size_t dataSize; - Result result; + std::vector data( vectorAllocator ); + size_t dataSize; + Result result; do { - result = static_cast( d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, nullptr ) ); + result = static_cast( + d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, nullptr ) ); if ( ( result == Result::eSuccess ) && dataSize ) { data.resize( dataSize ); - result = static_cast( d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, reinterpret_cast( data.data() ) ) ); + result = static_cast( d.vkGetPipelineCacheData( m_device, + static_cast( pipelineCache ), + &dataSize, + reinterpret_cast( data.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -71888,29 +83102,47 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( dataSize <= data.size() ); data.resize( dataSize ); } - return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineCacheData" ); + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR* pInternalRepresentations, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::getPipelineExecutableInternalRepresentationsKHR( + const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pInternalRepresentationCount, + VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR * pInternalRepresentations, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, reinterpret_cast( pExecutableInfo ), pInternalRepresentationCount, reinterpret_cast( pInternalRepresentations ) ) ); + return static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( + m_device, + reinterpret_cast( pExecutableInfo ), + pInternalRepresentationCount, + reinterpret_cast( pInternalRepresentations ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, + Dispatch const & d ) const { - std::vector internalRepresentations; - uint32_t internalRepresentationCount; - Result result; + std::vector internalRepresentations; + uint32_t internalRepresentationCount; + Result result; do { - result = static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, reinterpret_cast( &executableInfo ), &internalRepresentationCount, nullptr ) ); + result = static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( + m_device, + reinterpret_cast( &executableInfo ), + &internalRepresentationCount, + nullptr ) ); if ( ( result == Result::eSuccess ) && internalRepresentationCount ) { internalRepresentations.resize( internalRepresentationCount ); - result = static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, reinterpret_cast( &executableInfo ), &internalRepresentationCount, reinterpret_cast( internalRepresentations.data() ) ) ); + result = static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( + m_device, + reinterpret_cast( &executableInfo ), + &internalRepresentationCount, + reinterpret_cast( internalRepresentations.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -71918,21 +83150,34 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() ); internalRepresentations.resize( internalRepresentationCount ); } - return createResultValue( result, internalRepresentations, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutableInternalRepresentationsKHR" ); + return createResultValue( result, + internalRepresentations, + VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, + Allocator const & vectorAllocator, + Dispatch const & d ) const { - std::vector internalRepresentations( vectorAllocator ); - uint32_t internalRepresentationCount; - Result result; + std::vector internalRepresentations( vectorAllocator ); + uint32_t internalRepresentationCount; + Result result; do { - result = static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, reinterpret_cast( &executableInfo ), &internalRepresentationCount, nullptr ) ); + result = static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( + m_device, + reinterpret_cast( &executableInfo ), + &internalRepresentationCount, + nullptr ) ); if ( ( result == Result::eSuccess ) && internalRepresentationCount ) { internalRepresentations.resize( internalRepresentationCount ); - result = static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, reinterpret_cast( &executableInfo ), &internalRepresentationCount, reinterpret_cast( internalRepresentations.data() ) ) ); + result = static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( + m_device, + reinterpret_cast( &executableInfo ), + &internalRepresentationCount, + reinterpret_cast( internalRepresentations.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -71940,29 +83185,45 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() ); internalRepresentations.resize( internalRepresentationCount ); } - return createResultValue( result, internalRepresentations, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutableInternalRepresentationsKHR" ); + return createResultValue( result, + internalRepresentations, + VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR * pPipelineInfo, + uint32_t * pExecutableCount, + VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast( pPipelineInfo ), pExecutableCount, reinterpret_cast( pProperties ) ) ); + return static_cast( + d.vkGetPipelineExecutablePropertiesKHR( m_device, + reinterpret_cast( pPipelineInfo ), + pExecutableCount, + reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Dispatch const & d ) const { - std::vector properties; - uint32_t executableCount; - Result result; + std::vector properties; + uint32_t executableCount; + Result result; do { - result = static_cast( d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast( &pipelineInfo ), &executableCount, nullptr ) ); + result = static_cast( d.vkGetPipelineExecutablePropertiesKHR( + m_device, reinterpret_cast( &pipelineInfo ), &executableCount, nullptr ) ); if ( ( result == Result::eSuccess ) && executableCount ) { properties.resize( executableCount ); - result = static_cast( d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast( &pipelineInfo ), &executableCount, reinterpret_cast( properties.data() ) ) ); + result = static_cast( d.vkGetPipelineExecutablePropertiesKHR( + m_device, + reinterpret_cast( &pipelineInfo ), + &executableCount, + reinterpret_cast( properties.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -71970,21 +83231,30 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( executableCount <= properties.size() ); properties.resize( executableCount ); } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutablePropertiesKHR" ); + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, + Allocator const & vectorAllocator, + Dispatch const & d ) const { - std::vector properties( vectorAllocator ); - uint32_t executableCount; - Result result; + std::vector properties( vectorAllocator ); + uint32_t executableCount; + Result result; do { - result = static_cast( d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast( &pipelineInfo ), &executableCount, nullptr ) ); + result = static_cast( d.vkGetPipelineExecutablePropertiesKHR( + m_device, reinterpret_cast( &pipelineInfo ), &executableCount, nullptr ) ); if ( ( result == Result::eSuccess ) && executableCount ) { properties.resize( executableCount ); - result = static_cast( d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast( &pipelineInfo ), &executableCount, reinterpret_cast( properties.data() ) ) ); + result = static_cast( d.vkGetPipelineExecutablePropertiesKHR( + m_device, + reinterpret_cast( &pipelineInfo ), + &executableCount, + reinterpret_cast( properties.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -71992,29 +83262,48 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( executableCount <= properties.size() ); properties.resize( executableCount ); } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutablePropertiesKHR" ); + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR* pStatistics, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pStatisticCount, + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR * pStatistics, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast( pExecutableInfo ), pStatisticCount, reinterpret_cast( pStatistics ) ) ); + return static_cast( + d.vkGetPipelineExecutableStatisticsKHR( m_device, + reinterpret_cast( pExecutableInfo ), + pStatisticCount, + reinterpret_cast( pStatistics ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, + Dispatch const & d ) const { - std::vector statistics; - uint32_t statisticCount; - Result result; + std::vector statistics; + uint32_t statisticCount; + Result result; do { - result = static_cast( d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast( &executableInfo ), &statisticCount, nullptr ) ); + result = static_cast( d.vkGetPipelineExecutableStatisticsKHR( + m_device, + reinterpret_cast( &executableInfo ), + &statisticCount, + nullptr ) ); if ( ( result == Result::eSuccess ) && statisticCount ) { statistics.resize( statisticCount ); - result = static_cast( d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast( &executableInfo ), &statisticCount, reinterpret_cast( statistics.data() ) ) ); + result = static_cast( d.vkGetPipelineExecutableStatisticsKHR( + m_device, + reinterpret_cast( &executableInfo ), + &statisticCount, + reinterpret_cast( statistics.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -72022,21 +83311,33 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( statisticCount <= statistics.size() ); statistics.resize( statisticCount ); } - return createResultValue( result, statistics, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutableStatisticsKHR" ); + return createResultValue( + result, statistics, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, + Allocator const & vectorAllocator, + Dispatch const & d ) const { - std::vector statistics( vectorAllocator ); - uint32_t statisticCount; - Result result; + std::vector statistics( vectorAllocator ); + uint32_t statisticCount; + Result result; do { - result = static_cast( d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast( &executableInfo ), &statisticCount, nullptr ) ); + result = static_cast( d.vkGetPipelineExecutableStatisticsKHR( + m_device, + reinterpret_cast( &executableInfo ), + &statisticCount, + nullptr ) ); if ( ( result == Result::eSuccess ) && statisticCount ) { statistics.resize( statisticCount ); - result = static_cast( d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast( &executableInfo ), &statisticCount, reinterpret_cast( statistics.data() ) ) ); + result = static_cast( d.vkGetPipelineExecutableStatisticsKHR( + m_device, + reinterpret_cast( &executableInfo ), + &statisticCount, + reinterpret_cast( statistics.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -72044,179 +83345,324 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( statisticCount <= statistics.size() ); statistics.resize( statisticCount ); } - return createResultValue( result, statistics, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutableStatisticsKHR" ); + return createResultValue( + result, statistics, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + void * pData, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetQueryPoolResults( m_device, static_cast( queryPool ), firstQuery, queryCount, dataSize, pData, static_cast( stride ), static_cast( flags ) ) ); + return static_cast( d.vkGetQueryPoolResults( m_device, + static_cast( queryPool ), + firstQuery, + queryCount, + dataSize, + pData, + static_cast( stride ), + static_cast( flags ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, ArrayProxy data, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d ) const + template + VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + ArrayProxy data, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d ) const { - Result result = static_cast( d.vkGetQueryPoolResults( m_device, static_cast( queryPool ), firstQuery, queryCount, data.size() * sizeof( T ) , reinterpret_cast( data.data() ), static_cast( stride ), static_cast( flags ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getQueryPoolResults", { Result::eSuccess, Result::eNotReady } ); + Result result = static_cast( d.vkGetQueryPoolResults( m_device, + static_cast( queryPool ), + firstQuery, + queryCount, + data.size() * sizeof( T ), + reinterpret_cast( data.data() ), + static_cast( stride ), + static_cast( flags ) ) ); + return createResultValue( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults", { Result::eSuccess, Result::eNotReady } ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE Result Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); + return static_cast( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( + m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy data, Dispatch const &d ) const +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + ArrayProxy data, + Dispatch const & d ) const { - Result result = static_cast( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, data.size() * sizeof( T ) , reinterpret_cast( data.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" ); + Result result = static_cast( + d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, + static_cast( pipeline ), + firstGroup, + groupCount, + data.size() * sizeof( T ), + reinterpret_cast( data.data() ) ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template - VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); + return static_cast( d.vkGetRayTracingShaderGroupHandlesKHR( + m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy data, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + ArrayProxy data, + Dispatch const & d ) const { - Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, data.size() * sizeof( T ) , reinterpret_cast( data.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getRayTracingShaderGroupHandlesKHR" ); + Result result = + static_cast( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, + static_cast( pipeline ), + firstGroup, + groupCount, + data.size() * sizeof( T ), + reinterpret_cast( data.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesKHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); + return static_cast( d.vkGetRayTracingShaderGroupHandlesNV( + m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy data, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + ArrayProxy data, + Dispatch const & d ) const { - Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast( pipeline ), firstGroup, groupCount, data.size() * sizeof( T ) , reinterpret_cast( data.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getRayTracingShaderGroupHandlesNV" ); + Result result = + static_cast( d.vkGetRayTracingShaderGroupHandlesNV( m_device, + static_cast( pipeline ), + firstGroup, + groupCount, + data.size() * sizeof( T ), + reinterpret_cast( data.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesNV" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE* pDisplayTimingProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE * pDisplayTimingProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetRefreshCycleDurationGOOGLE( m_device, static_cast( swapchain ), reinterpret_cast( pDisplayTimingProperties ) ) ); + return static_cast( d.vkGetRefreshCycleDurationGOOGLE( + m_device, + static_cast( swapchain ), + reinterpret_cast( pDisplayTimingProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE displayTimingProperties; - Result result = static_cast( d.vkGetRefreshCycleDurationGOOGLE( m_device, static_cast( swapchain ), reinterpret_cast( &displayTimingProperties ) ) ); - return createResultValue( result, displayTimingProperties, VULKAN_HPP_NAMESPACE_STRING"::Device::getRefreshCycleDurationGOOGLE" ); + Result result = static_cast( d.vkGetRefreshCycleDurationGOOGLE( + m_device, + static_cast( swapchain ), + reinterpret_cast( &displayTimingProperties ) ) ); + return createResultValue( + result, displayTimingProperties, VULKAN_HPP_NAMESPACE_STRING "::Device::getRefreshCycleDurationGOOGLE" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, VULKAN_HPP_NAMESPACE::Extent2D* pGranularity, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetRenderAreaGranularity( m_device, static_cast( renderPass ), reinterpret_cast( pGranularity ) ); + d.vkGetRenderAreaGranularity( + m_device, static_cast( renderPass ), reinterpret_cast( pGranularity ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D + Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_NAMESPACE::Extent2D granularity; - d.vkGetRenderAreaGranularity( m_device, static_cast( renderPass ), reinterpret_cast( &granularity ) ); + d.vkGetRenderAreaGranularity( + m_device, static_cast( renderPass ), reinterpret_cast( &granularity ) ); return granularity; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t* pValue, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + uint64_t * pValue, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetSemaphoreCounterValue( m_device, static_cast( semaphore ), pValue ) ); + return static_cast( + d.vkGetSemaphoreCounterValue( m_device, static_cast( semaphore ), pValue ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d ) const { uint64_t value; - Result result = static_cast( d.vkGetSemaphoreCounterValue( m_device, static_cast( semaphore ), &value ) ); - return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING"::Device::getSemaphoreCounterValue" ); + Result result = + static_cast( d.vkGetSemaphoreCounterValue( m_device, static_cast( semaphore ), &value ) ); + return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValue" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t* pValue, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + uint64_t * pValue, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast( semaphore ), pValue ) ); + return static_cast( + d.vkGetSemaphoreCounterValueKHR( m_device, static_cast( semaphore ), pValue ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d ) const { uint64_t value; - Result result = static_cast( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast( semaphore ), &value ) ); - return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING"::Device::getSemaphoreCounterValueKHR" ); + Result result = + static_cast( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast( semaphore ), &value ) ); + return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValueKHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR * pGetFdInfo, + int * pFd, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast( pGetFdInfo ), pFd ) ); + return static_cast( + d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast( pGetFdInfo ), pFd ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const & d ) const { - int fd; - Result result = static_cast( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); - return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING"::Device::getSemaphoreFdKHR" ); + int fd; + Result result = static_cast( + d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); + return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreFdKHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_USE_PLATFORM_WIN32_KHR - template - VULKAN_HPP_INLINE Result Device::getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::getSemaphoreWin32HandleKHR( + const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); + return static_cast( d.vkGetSemaphoreWin32HandleKHR( + m_device, reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, + Dispatch const & d ) const { HANDLE handle; - Result result = static_cast( d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); - return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING"::Device::getSemaphoreWin32HandleKHR" ); + Result result = static_cast( d.vkGetSemaphoreWin32HandleKHR( + m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); + return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreWin32HandleKHR" ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template - VULKAN_HPP_INLINE Result Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, + size_t * pInfoSize, + void * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetShaderInfoAMD( m_device, static_cast( pipeline ), static_cast( shaderStage ), static_cast( infoType ), pInfoSize, pInfo ) ); + return static_cast( d.vkGetShaderInfoAMD( m_device, + static_cast( pipeline ), + static_cast( shaderStage ), + static_cast( infoType ), + pInfoSize, + pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, + Dispatch const & d ) const { - std::vector info; - size_t infoSize; - Result result; + std::vector info; + size_t infoSize; + Result result; do { - result = static_cast( d.vkGetShaderInfoAMD( m_device, static_cast( pipeline ), static_cast( shaderStage ), static_cast( infoType ), &infoSize, nullptr ) ); + result = static_cast( d.vkGetShaderInfoAMD( m_device, + static_cast( pipeline ), + static_cast( shaderStage ), + static_cast( infoType ), + &infoSize, + nullptr ) ); if ( ( result == Result::eSuccess ) && infoSize ) { info.resize( infoSize ); - result = static_cast( d.vkGetShaderInfoAMD( m_device, static_cast( pipeline ), static_cast( shaderStage ), static_cast( infoType ), &infoSize, reinterpret_cast( info.data() ) ) ); + result = static_cast( d.vkGetShaderInfoAMD( m_device, + static_cast( pipeline ), + static_cast( shaderStage ), + static_cast( infoType ), + &infoSize, + reinterpret_cast( info.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -72224,21 +83670,36 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( infoSize <= info.size() ); info.resize( infoSize ); } - return createResultValue( result, info, VULKAN_HPP_NAMESPACE_STRING"::Device::getShaderInfoAMD" ); + return createResultValue( result, info, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, + Allocator const & vectorAllocator, + Dispatch const & d ) const { - std::vector info( vectorAllocator ); - size_t infoSize; - Result result; + std::vector info( vectorAllocator ); + size_t infoSize; + Result result; do { - result = static_cast( d.vkGetShaderInfoAMD( m_device, static_cast( pipeline ), static_cast( shaderStage ), static_cast( infoType ), &infoSize, nullptr ) ); + result = static_cast( d.vkGetShaderInfoAMD( m_device, + static_cast( pipeline ), + static_cast( shaderStage ), + static_cast( infoType ), + &infoSize, + nullptr ) ); if ( ( result == Result::eSuccess ) && infoSize ) { info.resize( infoSize ); - result = static_cast( d.vkGetShaderInfoAMD( m_device, static_cast( pipeline ), static_cast( shaderStage ), static_cast( infoType ), &infoSize, reinterpret_cast( info.data() ) ) ); + result = static_cast( d.vkGetShaderInfoAMD( m_device, + static_cast( pipeline ), + static_cast( shaderStage ), + static_cast( infoType ), + &infoSize, + reinterpret_cast( info.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -72246,44 +83707,69 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( infoSize <= info.size() ); info.resize( infoSize ); } - return createResultValue( result, info, VULKAN_HPP_NAMESPACE_STRING"::Device::getShaderInfoAMD" ); + return createResultValue( result, info, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, + uint64_t * pCounterValue, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetSwapchainCounterEXT( m_device, static_cast( swapchain ), static_cast( counter ), pCounterValue ) ); + return static_cast( d.vkGetSwapchainCounterEXT( m_device, + static_cast( swapchain ), + static_cast( counter ), + pCounterValue ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, + Dispatch const & d ) const { uint64_t counterValue; - Result result = static_cast( d.vkGetSwapchainCounterEXT( m_device, static_cast( swapchain ), static_cast( counter ), &counterValue ) ); - return createResultValue( result, counterValue, VULKAN_HPP_NAMESPACE_STRING"::Device::getSwapchainCounterEXT" ); + Result result = + static_cast( d.vkGetSwapchainCounterEXT( m_device, + static_cast( swapchain ), + static_cast( counter ), + &counterValue ) ); + return createResultValue( result, counterValue, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainCounterEXT" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VULKAN_HPP_NAMESPACE::Image* pSwapchainImages, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint32_t * pSwapchainImageCount, + VULKAN_HPP_NAMESPACE::Image * pSwapchainImages, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetSwapchainImagesKHR( m_device, static_cast( swapchain ), pSwapchainImageCount, reinterpret_cast( pSwapchainImages ) ) ); + return static_cast( d.vkGetSwapchainImagesKHR( m_device, + static_cast( swapchain ), + pSwapchainImageCount, + reinterpret_cast( pSwapchainImages ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const { - std::vector swapchainImages; - uint32_t swapchainImageCount; - Result result; + std::vector swapchainImages; + uint32_t swapchainImageCount; + Result result; do { - result = static_cast( d.vkGetSwapchainImagesKHR( m_device, static_cast( swapchain ), &swapchainImageCount, nullptr ) ); + result = static_cast( d.vkGetSwapchainImagesKHR( + m_device, static_cast( swapchain ), &swapchainImageCount, nullptr ) ); if ( ( result == Result::eSuccess ) && swapchainImageCount ) { swapchainImages.resize( swapchainImageCount ); - result = static_cast( d.vkGetSwapchainImagesKHR( m_device, static_cast( swapchain ), &swapchainImageCount, reinterpret_cast( swapchainImages.data() ) ) ); + result = + static_cast( d.vkGetSwapchainImagesKHR( m_device, + static_cast( swapchain ), + &swapchainImageCount, + reinterpret_cast( swapchainImages.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -72291,21 +83777,27 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); swapchainImages.resize( swapchainImageCount ); } - return createResultValue( result, swapchainImages, VULKAN_HPP_NAMESPACE_STRING"::Device::getSwapchainImagesKHR" ); + return createResultValue( result, swapchainImages, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::getSwapchainImagesKHR( + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const & vectorAllocator, Dispatch const & d ) const { - std::vector swapchainImages( vectorAllocator ); - uint32_t swapchainImageCount; - Result result; + std::vector swapchainImages( vectorAllocator ); + uint32_t swapchainImageCount; + Result result; do { - result = static_cast( d.vkGetSwapchainImagesKHR( m_device, static_cast( swapchain ), &swapchainImageCount, nullptr ) ); + result = static_cast( d.vkGetSwapchainImagesKHR( + m_device, static_cast( swapchain ), &swapchainImageCount, nullptr ) ); if ( ( result == Result::eSuccess ) && swapchainImageCount ) { swapchainImages.resize( swapchainImageCount ); - result = static_cast( d.vkGetSwapchainImagesKHR( m_device, static_cast( swapchain ), &swapchainImageCount, reinterpret_cast( swapchainImages.data() ) ) ); + result = + static_cast( d.vkGetSwapchainImagesKHR( m_device, + static_cast( swapchain ), + &swapchainImageCount, + reinterpret_cast( swapchainImages.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -72313,44 +83805,60 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); swapchainImages.resize( swapchainImageCount ); } - return createResultValue( result, swapchainImages, VULKAN_HPP_NAMESPACE_STRING"::Device::getSwapchainImagesKHR" ); + return createResultValue( result, swapchainImages, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetSwapchainStatusKHR( m_device, static_cast( swapchain ) ) ); } #else - template - VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const + template + VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d ) const { - Result result = static_cast( d.vkGetSwapchainStatusKHR( m_device, static_cast( swapchain ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getSwapchainStatusKHR", { Result::eSuccess, Result::eSuboptimalKHR } ); + Result result = + static_cast( d.vkGetSwapchainStatusKHR( m_device, static_cast( swapchain ) ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainStatusKHR", + { Result::eSuccess, Result::eSuboptimalKHR } ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, size_t* pDataSize, void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + size_t * pDataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), pDataSize, pData ) ); + return static_cast( d.vkGetValidationCacheDataEXT( + m_device, static_cast( validationCache ), pDataSize, pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + Dispatch const & d ) const { - std::vector data; - size_t dataSize; - Result result; + std::vector data; + size_t dataSize; + Result result; do { - result = static_cast( d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), &dataSize, nullptr ) ); + result = static_cast( d.vkGetValidationCacheDataEXT( + m_device, static_cast( validationCache ), &dataSize, nullptr ) ); if ( ( result == Result::eSuccess ) && dataSize ) { data.resize( dataSize ); - result = static_cast( d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), &dataSize, reinterpret_cast( data.data() ) ) ); + result = + static_cast( d.vkGetValidationCacheDataEXT( m_device, + static_cast( validationCache ), + &dataSize, + reinterpret_cast( data.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -72358,21 +83866,29 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( dataSize <= data.size() ); data.resize( dataSize ); } - return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getValidationCacheDataEXT" ); + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + Allocator const & vectorAllocator, + Dispatch const & d ) const { - std::vector data( vectorAllocator ); - size_t dataSize; - Result result; + std::vector data( vectorAllocator ); + size_t dataSize; + Result result; do { - result = static_cast( d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), &dataSize, nullptr ) ); + result = static_cast( d.vkGetValidationCacheDataEXT( + m_device, static_cast( validationCache ), &dataSize, nullptr ) ); if ( ( result == Result::eSuccess ) && dataSize ) { data.resize( dataSize ); - result = static_cast( d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), &dataSize, reinterpret_cast( data.data() ) ) ); + result = + static_cast( d.vkGetValidationCacheDataEXT( m_device, + static_cast( validationCache ), + &dataSize, + reinterpret_cast( data.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -72380,1130 +83896,1897 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( dataSize <= data.size() ); data.resize( dataSize ); } - return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getValidationCacheDataEXT" ); + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR* pImportFenceFdInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR * pImportFenceFdInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkImportFenceFdKHR( m_device, reinterpret_cast( pImportFenceFdInfo ) ) ); + return static_cast( + d.vkImportFenceFdKHR( m_device, reinterpret_cast( pImportFenceFdInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const & d ) const { - Result result = static_cast( d.vkImportFenceFdKHR( m_device, reinterpret_cast( &importFenceFdInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importFenceFdKHR" ); + Result result = static_cast( + d.vkImportFenceFdKHR( m_device, reinterpret_cast( &importFenceFdInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_USE_PLATFORM_WIN32_KHR - template - VULKAN_HPP_INLINE Result Device::importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::importFenceWin32HandleKHR( + const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast( pImportFenceWin32HandleInfo ) ) ); + return static_cast( d.vkImportFenceWin32HandleKHR( + m_device, reinterpret_cast( pImportFenceWin32HandleInfo ) ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const &d ) const +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, + Dispatch const & d ) const { - Result result = static_cast( d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast( &importFenceWin32HandleInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importFenceWin32HandleKHR" ); + Result result = static_cast( d.vkImportFenceWin32HandleKHR( + m_device, reinterpret_cast( &importFenceWin32HandleInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceWin32HandleKHR" ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template - VULKAN_HPP_INLINE Result Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast( pImportSemaphoreFdInfo ) ) ); + return static_cast( d.vkImportSemaphoreFdKHR( + m_device, reinterpret_cast( pImportSemaphoreFdInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const & d ) const { - Result result = static_cast( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast( &importSemaphoreFdInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importSemaphoreFdKHR" ); + Result result = static_cast( d.vkImportSemaphoreFdKHR( + m_device, reinterpret_cast( &importSemaphoreFdInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_USE_PLATFORM_WIN32_KHR - template - VULKAN_HPP_INLINE Result Device::importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::importSemaphoreWin32HandleKHR( + const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast( pImportSemaphoreWin32HandleInfo ) ) ); + return static_cast( d.vkImportSemaphoreWin32HandleKHR( + m_device, reinterpret_cast( pImportSemaphoreWin32HandleInfo ) ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, Dispatch const &d ) const +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, + Dispatch const & d ) const { - Result result = static_cast( d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast( &importSemaphoreWin32HandleInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importSemaphoreWin32HandleKHR" ); + Result result = static_cast( d.vkImportSemaphoreWin32HandleKHR( + m_device, reinterpret_cast( &importSemaphoreWin32HandleInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreWin32HandleKHR" ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template - VULKAN_HPP_INLINE Result Device::initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL* pInitializeInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::initializePerformanceApiINTEL( + const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast( pInitializeInfo ) ) ); + return static_cast( d.vkInitializePerformanceApiINTEL( + m_device, reinterpret_cast( pInitializeInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL & initializeInfo, + Dispatch const & d ) const { - Result result = static_cast( d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast( &initializeInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::initializePerformanceApiINTEL" ); + Result result = static_cast( d.vkInitializePerformanceApiINTEL( + m_device, reinterpret_cast( &initializeInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::initializePerformanceApiINTEL" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange* pMemoryRanges, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Device::invalidateMappedMemoryRanges( uint32_t memoryRangeCount, + const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkInvalidateMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast( pMemoryRanges ) ) ); + return static_cast( d.vkInvalidateMappedMemoryRanges( + m_device, memoryRangeCount, reinterpret_cast( pMemoryRanges ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::invalidateMappedMemoryRanges( ArrayProxy memoryRanges, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::invalidateMappedMemoryRanges( ArrayProxy memoryRanges, + Dispatch const & d ) const { - Result result = static_cast( d.vkInvalidateMappedMemoryRanges( m_device, memoryRanges.size() , reinterpret_cast( memoryRanges.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::invalidateMappedMemoryRanges" ); + Result result = static_cast( d.vkInvalidateMappedMemoryRanges( + m_device, memoryRanges.size(), reinterpret_cast( memoryRanges.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, void** ppData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, + void ** ppData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkMapMemory( m_device, static_cast( memory ), static_cast( offset ), static_cast( size ), static_cast( flags ), ppData ) ); + return static_cast( d.vkMapMemory( m_device, + static_cast( memory ), + static_cast( offset ), + static_cast( size ), + static_cast( flags ), + ppData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, + Dispatch const & d ) const { - void* pData; - Result result = static_cast( d.vkMapMemory( m_device, static_cast( memory ), static_cast( offset ), static_cast( size ), static_cast( flags ), &pData ) ); - return createResultValue( result, pData, VULKAN_HPP_NAMESPACE_STRING"::Device::mapMemory" ); + void * pData; + Result result = static_cast( d.vkMapMemory( m_device, + static_cast( memory ), + static_cast( offset ), + static_cast( size ), + static_cast( flags ), + &pData ) ); + return createResultValue( result, pData, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::PipelineCache* pSrcCaches, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, + uint32_t srcCacheCount, + const VULKAN_HPP_NAMESPACE::PipelineCache * pSrcCaches, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkMergePipelineCaches( m_device, static_cast( dstCache ), srcCacheCount, reinterpret_cast( pSrcCaches ) ) ); + return static_cast( d.vkMergePipelineCaches( m_device, + static_cast( dstCache ), + srcCacheCount, + reinterpret_cast( pSrcCaches ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, ArrayProxy srcCaches, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, + ArrayProxy srcCaches, + Dispatch const & d ) const { - Result result = static_cast( d.vkMergePipelineCaches( m_device, static_cast( dstCache ), srcCaches.size() , reinterpret_cast( srcCaches.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::mergePipelineCaches" ); + Result result = + static_cast( d.vkMergePipelineCaches( m_device, + static_cast( dstCache ), + srcCaches.size(), + reinterpret_cast( srcCaches.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergePipelineCaches" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::ValidationCacheEXT* pSrcCaches, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, + uint32_t srcCacheCount, + const VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pSrcCaches, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkMergeValidationCachesEXT( m_device, static_cast( dstCache ), srcCacheCount, reinterpret_cast( pSrcCaches ) ) ); + return static_cast( + d.vkMergeValidationCachesEXT( m_device, + static_cast( dstCache ), + srcCacheCount, + reinterpret_cast( pSrcCaches ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, ArrayProxy srcCaches, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, + ArrayProxy srcCaches, + Dispatch const & d ) const { - Result result = static_cast( d.vkMergeValidationCachesEXT( m_device, static_cast( dstCache ), srcCaches.size() , reinterpret_cast( srcCaches.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::mergeValidationCachesEXT" ); + Result result = static_cast( + d.vkMergeValidationCachesEXT( m_device, + static_cast( dstCache ), + srcCaches.size(), + reinterpret_cast( srcCaches.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergeValidationCachesEXT" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT* pDeviceEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT * pDeviceEventInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Fence * pFence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkRegisterDeviceEventEXT( m_device, reinterpret_cast( pDeviceEventInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pFence ) ) ); + return static_cast( + d.vkRegisterDeviceEventEXT( m_device, + reinterpret_cast( pDeviceEventInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pFence ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::Fence fence; - Result result = static_cast( d.vkRegisterDeviceEventEXT( m_device, reinterpret_cast( &deviceEventInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &fence ) ) ); - return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::registerEventEXT" ); + Result result = static_cast( d.vkRegisterDeviceEventEXT( + m_device, + reinterpret_cast( &deviceEventInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &fence ) ) ); + return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXT" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::registerEventEXTUnique( const DeviceEventInfoEXT & deviceEventInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::registerEventEXTUnique( const DeviceEventInfoEXT & deviceEventInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::Fence fence; - Result result = static_cast( d.vkRegisterDeviceEventEXT( m_device, reinterpret_cast( &deviceEventInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &fence ) ) ); + Result result = static_cast( d.vkRegisterDeviceEventEXT( + m_device, + reinterpret_cast( &deviceEventInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &fence ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::registerEventEXTUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXTUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT* pDisplayEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Fence * pFence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkRegisterDisplayEventEXT( m_device, static_cast( display ), reinterpret_cast( pDisplayEventInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pFence ) ) ); + return static_cast( + d.vkRegisterDisplayEventEXT( m_device, + static_cast( display ), + reinterpret_cast( pDisplayEventInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pFence ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const DisplayEventInfoEXT & displayEventInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::Fence fence; - Result result = static_cast( d.vkRegisterDisplayEventEXT( m_device, static_cast( display ), reinterpret_cast( &displayEventInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &fence ) ) ); - return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::registerDisplayEventEXT" ); + Result result = static_cast( d.vkRegisterDisplayEventEXT( + m_device, + static_cast( display ), + reinterpret_cast( &displayEventInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &fence ) ) ); + return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXT" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const DisplayEventInfoEXT & displayEventInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::Fence fence; - Result result = static_cast( d.vkRegisterDisplayEventEXT( m_device, static_cast( display ), reinterpret_cast( &displayEventInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &fence ) ) ); + Result result = static_cast( d.vkRegisterDisplayEventEXT( + m_device, + static_cast( display ), + reinterpret_cast( &displayEventInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &fence ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::registerDisplayEventEXTUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXTUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_USE_PLATFORM_WIN32_KHR +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); + } +# else + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const + { + Result result = static_cast( + d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releaseFullScreenExclusiveModeEXT" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::releasePerformanceConfigurationINTEL( + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); + return static_cast( d.vkReleasePerformanceConfigurationINTEL( + m_device, static_cast( configuration ) ) ); } #else - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d ) const { - Result result = static_cast( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::releaseFullScreenExclusiveModeEXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast( configuration ) ) ); - } -#else - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d ) const - { - Result result = static_cast( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast( configuration ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::releasePerformanceConfigurationINTEL" ); + Result result = static_cast( d.vkReleasePerformanceConfigurationINTEL( + m_device, static_cast( configuration ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releasePerformanceConfigurationINTEL" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR(Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkReleaseProfilingLockKHR( m_device ); } #else - template - VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkReleaseProfilingLockKHR( m_device ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkResetCommandPool( m_device, static_cast( commandPool ), static_cast( flags ) ) ); + return static_cast( d.vkResetCommandPool( + m_device, static_cast( commandPool ), static_cast( flags ) ) ); } #else - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, + Dispatch const & d ) const { - Result result = static_cast( d.vkResetCommandPool( m_device, static_cast( commandPool ), static_cast( flags ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetCommandPool" ); + Result result = static_cast( d.vkResetCommandPool( + m_device, static_cast( commandPool ), static_cast( flags ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetCommandPool" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkResetDescriptorPool( m_device, static_cast( descriptorPool ), static_cast( flags ) ) ); + return static_cast( d.vkResetDescriptorPool( + m_device, static_cast( descriptorPool ), static_cast( flags ) ) ); } #else - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, + Dispatch const & d ) const { - Result result = static_cast( d.vkResetDescriptorPool( m_device, static_cast( descriptorPool ), static_cast( flags ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetDescriptorPool" ); + Result result = static_cast( d.vkResetDescriptorPool( + m_device, static_cast( descriptorPool ), static_cast( flags ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetDescriptorPool" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkResetEvent( m_device, static_cast( event ) ) ); } #else - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d ) const { Result result = static_cast( d.vkResetEvent( m_device, static_cast( event ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetEvent" ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetEvent" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::resetFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence* pFences, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::resetFences( uint32_t fenceCount, + const VULKAN_HPP_NAMESPACE::Fence * pFences, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkResetFences( m_device, fenceCount, reinterpret_cast( pFences ) ) ); + return static_cast( d.vkResetFences( m_device, fenceCount, reinterpret_cast( pFences ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::resetFences( ArrayProxy fences, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::resetFences( ArrayProxy fences, Dispatch const & d ) const { - Result result = static_cast( d.vkResetFences( m_device, fences.size() , reinterpret_cast( fences.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetFences" ); + Result result = static_cast( + d.vkResetFences( m_device, fences.size(), reinterpret_cast( fences.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkResetQueryPool( m_device, static_cast( queryPool ), firstQuery, queryCount ); } #else - template - VULKAN_HPP_INLINE void Device::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkResetQueryPool( m_device, static_cast( queryPool ), firstQuery, queryCount ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkResetQueryPoolEXT( m_device, static_cast( queryPool ), firstQuery, queryCount ); } #else - template - VULKAN_HPP_INLINE void Device::resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkResetQueryPoolEXT( m_device, static_cast( queryPool ), firstQuery, queryCount ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pNameInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectNameEXT( + const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pNameInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast( pNameInfo ) ) ); + return static_cast( d.vkSetDebugUtilsObjectNameEXT( + m_device, reinterpret_cast( pNameInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const & d ) const { - Result result = static_cast( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast( &nameInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::setDebugUtilsObjectNameEXT" ); + Result result = static_cast( d.vkSetDebugUtilsObjectNameEXT( + m_device, reinterpret_cast( &nameInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectNameEXT" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT* pTagInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectTagEXT( + const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT * pTagInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast( pTagInfo ) ) ); + return static_cast( + d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast( pTagInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const & d ) const { - Result result = static_cast( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast( &tagInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::setDebugUtilsObjectTagEXT" ); + Result result = static_cast( + d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast( &tagInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectTagEXT" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkSetEvent( m_device, static_cast( event ) ) ); } #else - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d ) const { Result result = static_cast( d.vkSetEvent( m_device, static_cast( event ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::setEvent" ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setEvent" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains, const VULKAN_HPP_NAMESPACE::HdrMetadataEXT* pMetadata, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( uint32_t swapchainCount, + const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, + const VULKAN_HPP_NAMESPACE::HdrMetadataEXT * pMetadata, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkSetHdrMetadataEXT( m_device, swapchainCount, reinterpret_cast( pSwapchains ), reinterpret_cast( pMetadata ) ); + d.vkSetHdrMetadataEXT( m_device, + swapchainCount, + reinterpret_cast( pSwapchains ), + reinterpret_cast( pMetadata ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( ArrayProxy swapchains, ArrayProxy metadata, Dispatch const &d ) const + template + VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( ArrayProxy swapchains, + ArrayProxy metadata, + Dispatch const & d ) const { -#ifdef VULKAN_HPP_NO_EXCEPTIONS +# ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_ASSERT( swapchains.size() == metadata.size() ); -#else +# else if ( swapchains.size() != metadata.size() ) { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkDevice::setHdrMetadataEXT: swapchains.size() != metadata.size()" ); + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::VkDevice::setHdrMetadataEXT: swapchains.size() != metadata.size()" ); } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - d.vkSetHdrMetadataEXT( m_device, swapchains.size() , reinterpret_cast( swapchains.data() ), reinterpret_cast( metadata.data() ) ); +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + d.vkSetHdrMetadataEXT( m_device, + swapchains.size(), + reinterpret_cast( swapchains.data() ), + reinterpret_cast( metadata.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, + VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkSetLocalDimmingAMD( m_device, static_cast( swapChain ), static_cast( localDimmingEnable ) ); + d.vkSetLocalDimmingAMD( + m_device, static_cast( swapChain ), static_cast( localDimmingEnable ) ); } #else - template - VULKAN_HPP_INLINE void Device::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, + VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkSetLocalDimmingAMD( m_device, static_cast( swapChain ), static_cast( localDimmingEnable ) ); + d.vkSetLocalDimmingAMD( + m_device, static_cast( swapChain ), static_cast( localDimmingEnable ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo* pSignalInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkSignalSemaphore( m_device, reinterpret_cast( pSignalInfo ) ) ); + return static_cast( + d.vkSignalSemaphore( m_device, reinterpret_cast( pSignalInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::signalSemaphore( const SemaphoreSignalInfo & signalInfo, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::signalSemaphore( const SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const { - Result result = static_cast( d.vkSignalSemaphore( m_device, reinterpret_cast( &signalInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::signalSemaphore" ); + Result result = static_cast( + d.vkSignalSemaphore( m_device, reinterpret_cast( &signalInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo* pSignalInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast( pSignalInfo ) ) ); + return static_cast( + d.vkSignalSemaphoreKHR( m_device, reinterpret_cast( pSignalInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::signalSemaphoreKHR( const SemaphoreSignalInfo & signalInfo, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::signalSemaphoreKHR( const SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const { - Result result = static_cast( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast( &signalInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::signalSemaphoreKHR" ); + Result result = static_cast( + d.vkSignalSemaphoreKHR( m_device, reinterpret_cast( &signalInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphoreKHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkTrimCommandPool( m_device, static_cast( commandPool ), static_cast( flags ) ); + d.vkTrimCommandPool( + m_device, static_cast( commandPool ), static_cast( flags ) ); } #else - template - VULKAN_HPP_INLINE void Device::trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkTrimCommandPool( m_device, static_cast( commandPool ), static_cast( flags ) ); + d.vkTrimCommandPool( + m_device, static_cast( commandPool ), static_cast( flags ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkTrimCommandPoolKHR( m_device, static_cast( commandPool ), static_cast( flags ) ); + d.vkTrimCommandPoolKHR( + m_device, static_cast( commandPool ), static_cast( flags ) ); } #else - template - VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkTrimCommandPoolKHR( m_device, static_cast( commandPool ), static_cast( flags ) ); + d.vkTrimCommandPoolKHR( + m_device, static_cast( commandPool ), static_cast( flags ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL(Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkUninitializePerformanceApiINTEL( m_device ); } #else - template - VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkUninitializePerformanceApiINTEL( m_device ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkUnmapMemory( m_device, static_cast( memory ) ); } #else - template - VULKAN_HPP_INLINE void Device::unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkUnmapMemory( m_device, static_cast( memory ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkUpdateDescriptorSetWithTemplate( m_device, static_cast( descriptorSet ), static_cast( descriptorUpdateTemplate ), pData ); + d.vkUpdateDescriptorSetWithTemplate( m_device, + static_cast( descriptorSet ), + static_cast( descriptorUpdateTemplate ), + pData ); } #else - template - VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkUpdateDescriptorSetWithTemplate( m_device, static_cast( descriptorSet ), static_cast( descriptorUpdateTemplate ), pData ); + d.vkUpdateDescriptorSetWithTemplate( m_device, + static_cast( descriptorSet ), + static_cast( descriptorUpdateTemplate ), + pData ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkUpdateDescriptorSetWithTemplateKHR( m_device, static_cast( descriptorSet ), static_cast( descriptorUpdateTemplate ), pData ); + d.vkUpdateDescriptorSetWithTemplateKHR( m_device, + static_cast( descriptorSet ), + static_cast( descriptorUpdateTemplate ), + pData ); } #else - template - VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkUpdateDescriptorSetWithTemplateKHR( m_device, static_cast( descriptorSet ), static_cast( descriptorUpdateTemplate ), pData ); + d.vkUpdateDescriptorSetWithTemplateKHR( m_device, + static_cast( descriptorSet ), + static_cast( descriptorUpdateTemplate ), + pData ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::updateDescriptorSets( uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VULKAN_HPP_NAMESPACE::CopyDescriptorSet* pDescriptorCopies, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + Device::updateDescriptorSets( uint32_t descriptorWriteCount, + const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, + uint32_t descriptorCopyCount, + const VULKAN_HPP_NAMESPACE::CopyDescriptorSet * pDescriptorCopies, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkUpdateDescriptorSets( m_device, descriptorWriteCount, reinterpret_cast( pDescriptorWrites ), descriptorCopyCount, reinterpret_cast( pDescriptorCopies ) ); + d.vkUpdateDescriptorSets( m_device, + descriptorWriteCount, + reinterpret_cast( pDescriptorWrites ), + descriptorCopyCount, + reinterpret_cast( pDescriptorCopies ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::updateDescriptorSets( ArrayProxy descriptorWrites, ArrayProxy descriptorCopies, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + Device::updateDescriptorSets( ArrayProxy descriptorWrites, + ArrayProxy descriptorCopies, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkUpdateDescriptorSets( m_device, descriptorWrites.size() , reinterpret_cast( descriptorWrites.data() ), descriptorCopies.size() , reinterpret_cast( descriptorCopies.data() ) ); + d.vkUpdateDescriptorSets( m_device, + descriptorWrites.size(), + reinterpret_cast( descriptorWrites.data() ), + descriptorCopies.size(), + reinterpret_cast( descriptorCopies.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::waitForFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence* pFences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::waitForFences( uint32_t fenceCount, + const VULKAN_HPP_NAMESPACE::Fence * pFences, + VULKAN_HPP_NAMESPACE::Bool32 waitAll, + uint64_t timeout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkWaitForFences( m_device, fenceCount, reinterpret_cast( pFences ), static_cast( waitAll ), timeout ) ); + return static_cast( d.vkWaitForFences( + m_device, fenceCount, reinterpret_cast( pFences ), static_cast( waitAll ), timeout ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::waitForFences( ArrayProxy fences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const &d ) const + template + VULKAN_HPP_INLINE Result Device::waitForFences( ArrayProxy fences, + VULKAN_HPP_NAMESPACE::Bool32 waitAll, + uint64_t timeout, + Dispatch const & d ) const { - Result result = static_cast( d.vkWaitForFences( m_device, fences.size() , reinterpret_cast( fences.data() ), static_cast( waitAll ), timeout ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::waitForFences", { Result::eSuccess, Result::eTimeout } ); + Result result = static_cast( d.vkWaitForFences( m_device, + fences.size(), + reinterpret_cast( fences.data() ), + static_cast( waitAll ), + timeout ) ); + return createResultValue( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitForFences", { Result::eSuccess, Result::eTimeout } ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo* pWaitInfo, uint64_t timeout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, + uint64_t timeout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkWaitSemaphores( m_device, reinterpret_cast( pWaitInfo ), timeout ) ); + return static_cast( + d.vkWaitSemaphores( m_device, reinterpret_cast( pWaitInfo ), timeout ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::waitSemaphores( const SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const &d ) const + template + VULKAN_HPP_INLINE Result Device::waitSemaphores( const SemaphoreWaitInfo & waitInfo, + uint64_t timeout, + Dispatch const & d ) const { - Result result = static_cast( d.vkWaitSemaphores( m_device, reinterpret_cast( &waitInfo ), timeout ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::waitSemaphores", { Result::eSuccess, Result::eTimeout } ); + Result result = static_cast( + d.vkWaitSemaphores( m_device, reinterpret_cast( &waitInfo ), timeout ) ); + return createResultValue( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphores", { Result::eSuccess, Result::eTimeout } ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo* pWaitInfo, uint64_t timeout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, + uint64_t timeout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkWaitSemaphoresKHR( m_device, reinterpret_cast( pWaitInfo ), timeout ) ); + return static_cast( + d.vkWaitSemaphoresKHR( m_device, reinterpret_cast( pWaitInfo ), timeout ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR( const SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const &d ) const + template + VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR( const SemaphoreWaitInfo & waitInfo, + uint64_t timeout, + Dispatch const & d ) const { - Result result = static_cast( d.vkWaitSemaphoresKHR( m_device, reinterpret_cast( &waitInfo ), timeout ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::waitSemaphoresKHR", { Result::eSuccess, Result::eTimeout } ); + Result result = static_cast( + d.vkWaitSemaphoresKHR( m_device, reinterpret_cast( &waitInfo ), timeout ) ); + return createResultValue( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphoresKHR", { Result::eSuccess, Result::eTimeout } ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE Result Device::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t dataSize, void* pData, size_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::writeAccelerationStructuresPropertiesKHR( + uint32_t accelerationStructureCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + void * pData, + size_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkWriteAccelerationStructuresPropertiesKHR( m_device, accelerationStructureCount, reinterpret_cast( pAccelerationStructures ), static_cast( queryType ), dataSize, pData, stride ) ); + return static_cast( d.vkWriteAccelerationStructuresPropertiesKHR( + m_device, + accelerationStructureCount, + reinterpret_cast( pAccelerationStructures ), + static_cast( queryType ), + dataSize, + pData, + stride ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::writeAccelerationStructuresPropertiesKHR( ArrayProxy accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, ArrayProxy data, size_t stride, Dispatch const &d ) const +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::writeAccelerationStructuresPropertiesKHR( + ArrayProxy accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + ArrayProxy data, + size_t stride, + Dispatch const & d ) const { - Result result = static_cast( d.vkWriteAccelerationStructuresPropertiesKHR( m_device, accelerationStructures.size() , reinterpret_cast( accelerationStructures.data() ), static_cast( queryType ), data.size() * sizeof( T ) , reinterpret_cast( data.data() ), stride ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::writeAccelerationStructuresPropertiesKHR" ); + Result result = static_cast( d.vkWriteAccelerationStructuresPropertiesKHR( + m_device, + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + data.size() * sizeof( T ), + reinterpret_cast( data.data() ), + stride ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_USE_PLATFORM_ANDROID_KHR - template - VULKAN_HPP_INLINE Result Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + return static_cast( + d.vkCreateAndroidSurfaceKHR( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Instance::createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createAndroidSurfaceKHR" ); + Result result = static_cast( d.vkCreateAndroidSurfaceKHR( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHR" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + Result result = static_cast( d.vkCreateAndroidSurfaceKHR( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createAndroidSurfaceKHRUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHRUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - template - VULKAN_HPP_INLINE Result Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT* pCallback, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT * pCallback, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pCallback ) ) ); + return static_cast( + d.vkCreateDebugReportCallbackEXT( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pCallback ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Instance::createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback; - Result result = static_cast( d.vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &callback ) ) ); - return createResultValue( result, callback, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDebugReportCallbackEXT" ); + Result result = static_cast( d.vkCreateDebugReportCallbackEXT( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &callback ) ) ); + return createResultValue( + result, callback, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXT" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback; - Result result = static_cast( d.vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &callback ) ) ); + Result result = static_cast( d.vkCreateDebugReportCallbackEXT( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &callback ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, callback, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDebugReportCallbackEXTUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, callback, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXTUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT* pMessenger, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT * pMessenger, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateDebugUtilsMessengerEXT( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pMessenger ) ) ); + return static_cast( + d.vkCreateDebugUtilsMessengerEXT( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pMessenger ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Instance::createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger; - Result result = static_cast( d.vkCreateDebugUtilsMessengerEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &messenger ) ) ); - return createResultValue( result, messenger, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDebugUtilsMessengerEXT" ); + Result result = static_cast( d.vkCreateDebugUtilsMessengerEXT( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &messenger ) ) ); + return createResultValue( + result, messenger, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXT" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createDebugUtilsMessengerEXTUnique( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createDebugUtilsMessengerEXTUnique( const DebugUtilsMessengerCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger; - Result result = static_cast( d.vkCreateDebugUtilsMessengerEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &messenger ) ) ); + Result result = static_cast( d.vkCreateDebugUtilsMessengerEXT( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &messenger ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, messenger, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDebugUtilsMessengerEXTUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, messenger, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXTUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + return static_cast( + d.vkCreateDisplayPlaneSurfaceKHR( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Instance::createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDisplayPlaneSurfaceKHR" ); + Result result = static_cast( d.vkCreateDisplayPlaneSurfaceKHR( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHR" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + Result result = static_cast( d.vkCreateDisplayPlaneSurfaceKHR( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDisplayPlaneSurfaceKHRUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHRUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateHeadlessSurfaceEXT( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + return static_cast( + d.vkCreateHeadlessSurfaceEXT( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Instance::createHeadlessSurfaceEXT( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createHeadlessSurfaceEXT( const HeadlessSurfaceCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateHeadlessSurfaceEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createHeadlessSurfaceEXT" ); + Result result = static_cast( d.vkCreateHeadlessSurfaceEXT( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXT" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createHeadlessSurfaceEXTUnique( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createHeadlessSurfaceEXTUnique( const HeadlessSurfaceCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateHeadlessSurfaceEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + Result result = static_cast( d.vkCreateHeadlessSurfaceEXT( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createHeadlessSurfaceEXTUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXTUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_USE_PLATFORM_IOS_MVK - template - VULKAN_HPP_INLINE Result Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + return static_cast( + d.vkCreateIOSSurfaceMVK( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Instance::createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createIOSSurfaceMVK" ); + Result result = static_cast( d.vkCreateIOSSurfaceMVK( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVK" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + Result result = static_cast( d.vkCreateIOSSurfaceMVK( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createIOSSurfaceMVKUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVKUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_IOS_MVK*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_IOS_MVK*/ #ifdef VK_USE_PLATFORM_FUCHSIA - template - VULKAN_HPP_INLINE Result Instance::createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Instance::createImagePipeSurfaceFUCHSIA( + const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + return static_cast( + d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Instance::createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createImagePipeSurfaceFUCHSIA" ); + Result result = static_cast( d.vkCreateImagePipeSurfaceFUCHSIA( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIA" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createImagePipeSurfaceFUCHSIAUnique( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createImagePipeSurfaceFUCHSIAUnique( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + Result result = static_cast( d.vkCreateImagePipeSurfaceFUCHSIA( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createImagePipeSurfaceFUCHSIAUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIAUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_FUCHSIA*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ #ifdef VK_USE_PLATFORM_MACOS_MVK - template - VULKAN_HPP_INLINE Result Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + return static_cast( + d.vkCreateMacOSSurfaceMVK( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Instance::createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createMacOSSurfaceMVK" ); + Result result = static_cast( d.vkCreateMacOSSurfaceMVK( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVK" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + Result result = static_cast( d.vkCreateMacOSSurfaceMVK( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createMacOSSurfaceMVKUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVKUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_MACOS_MVK*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ #ifdef VK_USE_PLATFORM_METAL_EXT - template - VULKAN_HPP_INLINE Result Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateMetalSurfaceEXT( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + return static_cast( + d.vkCreateMetalSurfaceEXT( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Instance::createMetalSurfaceEXT( const MetalSurfaceCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createMetalSurfaceEXT( const MetalSurfaceCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateMetalSurfaceEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createMetalSurfaceEXT" ); + Result result = static_cast( d.vkCreateMetalSurfaceEXT( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXT" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createMetalSurfaceEXTUnique( const MetalSurfaceCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createMetalSurfaceEXTUnique( const MetalSurfaceCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateMetalSurfaceEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + Result result = static_cast( d.vkCreateMetalSurfaceEXT( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createMetalSurfaceEXTUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXTUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_METAL_EXT*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_METAL_EXT*/ #ifdef VK_USE_PLATFORM_GGP - template - VULKAN_HPP_INLINE Result Instance::createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Instance::createStreamDescriptorSurfaceGGP( + const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateStreamDescriptorSurfaceGGP( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + return static_cast( d.vkCreateStreamDescriptorSurfaceGGP( + m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Instance::createStreamDescriptorSurfaceGGP( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createStreamDescriptorSurfaceGGP( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateStreamDescriptorSurfaceGGP( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createStreamDescriptorSurfaceGGP" ); + Result result = static_cast( d.vkCreateStreamDescriptorSurfaceGGP( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGP" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createStreamDescriptorSurfaceGGPUnique( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createStreamDescriptorSurfaceGGPUnique( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateStreamDescriptorSurfaceGGP( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + Result result = static_cast( d.vkCreateStreamDescriptorSurfaceGGP( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createStreamDescriptorSurfaceGGPUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGPUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_GGP*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_GGP*/ #ifdef VK_USE_PLATFORM_VI_NN - template - VULKAN_HPP_INLINE Result Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateViSurfaceNN( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + return static_cast( d.vkCreateViSurfaceNN( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Instance::createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Instance::createViSurfaceNN( + const ViSurfaceCreateInfoNN & createInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateViSurfaceNN( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createViSurfaceNN" ); + Result result = static_cast( d.vkCreateViSurfaceNN( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNN" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateViSurfaceNN( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + Result result = static_cast( d.vkCreateViSurfaceNN( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createViSurfaceNNUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNNUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_VI_NN*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_VI_NN*/ #ifdef VK_USE_PLATFORM_WAYLAND_KHR - template - VULKAN_HPP_INLINE Result Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + return static_cast( + d.vkCreateWaylandSurfaceKHR( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Instance::createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createWaylandSurfaceKHR" ); + Result result = static_cast( d.vkCreateWaylandSurfaceKHR( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHR" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + Result result = static_cast( d.vkCreateWaylandSurfaceKHR( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createWaylandSurfaceKHRUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHRUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ #ifdef VK_USE_PLATFORM_WIN32_KHR - template - VULKAN_HPP_INLINE Result Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + return static_cast( + d.vkCreateWin32SurfaceKHR( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Instance::createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createWin32SurfaceKHR" ); + Result result = static_cast( d.vkCreateWin32SurfaceKHR( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHR" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + Result result = static_cast( d.vkCreateWin32SurfaceKHR( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createWin32SurfaceKHRUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHRUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ #ifdef VK_USE_PLATFORM_XCB_KHR - template - VULKAN_HPP_INLINE Result Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + return static_cast( + d.vkCreateXcbSurfaceKHR( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Instance::createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createXcbSurfaceKHR" ); + Result result = static_cast( d.vkCreateXcbSurfaceKHR( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHR" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + Result result = static_cast( d.vkCreateXcbSurfaceKHR( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createXcbSurfaceKHRUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHRUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XCB_KHR*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XCB_KHR*/ #ifdef VK_USE_PLATFORM_XLIB_KHR - template - VULKAN_HPP_INLINE Result Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + return static_cast( + d.vkCreateXlibSurfaceKHR( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Instance::createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createXlibSurfaceKHR" ); + Result result = static_cast( d.vkCreateXlibSurfaceKHR( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHR" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + Result result = static_cast( d.vkCreateXlibSurfaceKHR( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createXlibSurfaceKHRUnique", deleter ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHRUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XLIB_KHR*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ - template - VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const char * pLayerPrefix, + const char * pMessage, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDebugReportMessageEXT( m_instance, static_cast( flags ), static_cast( objectType ), object, location, messageCode, pLayerPrefix, pMessage ); + d.vkDebugReportMessageEXT( m_instance, + static_cast( flags ), + static_cast( objectType ), + object, + location, + messageCode, + pLayerPrefix, + pMessage ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message, Dispatch const &d ) const + template + VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const std::string & layerPrefix, + const std::string & message, + Dispatch const & d ) const { -#ifdef VULKAN_HPP_NO_EXCEPTIONS +# ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_ASSERT( layerPrefix.size() == message.size() ); -#else +# else if ( layerPrefix.size() != message.size() ) { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkInstance::debugReportMessageEXT: layerPrefix.size() != message.size()" ); + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::VkInstance::debugReportMessageEXT: layerPrefix.size() != message.size()" ); } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - d.vkDebugReportMessageEXT( m_instance, static_cast( flags ), static_cast( objectType ), object, location, messageCode, layerPrefix.c_str(), message.c_str() ); +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + d.vkDebugReportMessageEXT( m_instance, + static_cast( flags ), + static_cast( objectType ), + object, + location, + messageCode, + layerPrefix.c_str(), + message.c_str() ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast( callback ), reinterpret_cast( pAllocator ) ); + d.vkDestroyDebugReportCallbackEXT( m_instance, + static_cast( callback ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast( callback ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyDebugReportCallbackEXT( + m_instance, + static_cast( callback ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast( callback ), reinterpret_cast( pAllocator ) ); + d.vkDestroyDebugReportCallbackEXT( m_instance, + static_cast( callback ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast( callback ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyDebugReportCallbackEXT( + m_instance, + static_cast( callback ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast( messenger ), reinterpret_cast( pAllocator ) ); + d.vkDestroyDebugUtilsMessengerEXT( m_instance, + static_cast( messenger ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast( messenger ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyDebugUtilsMessengerEXT( + m_instance, + static_cast( messenger ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast( messenger ), reinterpret_cast( pAllocator ) ); + d.vkDestroyDebugUtilsMessengerEXT( m_instance, + static_cast( messenger ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast( messenger ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyDebugUtilsMessengerEXT( + m_instance, + static_cast( messenger ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Instance::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Instance::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyInstance( m_instance, reinterpret_cast( pAllocator ) ); + d.vkDestroyInstance( m_instance, reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Instance::destroy( Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Instance::destroy( Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyInstance( m_instance, reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyInstance( + m_instance, + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySurfaceKHR( m_instance, static_cast( surface ), reinterpret_cast( pAllocator ) ); + d.vkDestroySurfaceKHR( + m_instance, static_cast( surface ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySurfaceKHR( m_instance, static_cast( surface ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroySurfaceKHR( + m_instance, + static_cast( surface ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySurfaceKHR( m_instance, static_cast( surface ), reinterpret_cast( pAllocator ) ); + d.vkDestroySurfaceKHR( + m_instance, static_cast( surface ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySurfaceKHR( m_instance, static_cast( surface ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroySurfaceKHR( + m_instance, + static_cast( surface ), + reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroups( uint32_t* pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroups( + uint32_t * pPhysicalDeviceGroupCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, pPhysicalDeviceGroupCount, reinterpret_cast( pPhysicalDeviceGroupProperties ) ) ); + return static_cast( d.vkEnumeratePhysicalDeviceGroups( + m_instance, + pPhysicalDeviceGroupCount, + reinterpret_cast( pPhysicalDeviceGroupProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDeviceGroups(Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::enumeratePhysicalDeviceGroups( Dispatch const & d ) const { - std::vector physicalDeviceGroupProperties; - uint32_t physicalDeviceGroupCount; - Result result; + std::vector physicalDeviceGroupProperties; + uint32_t physicalDeviceGroupCount; + Result result; do { - result = static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) ); + result = + static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) ); if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount ) { physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); - result = static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); + result = static_cast( d.vkEnumeratePhysicalDeviceGroups( + m_instance, + &physicalDeviceGroupCount, + reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -73511,21 +85794,27 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); } - return createResultValue( result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDeviceGroups" ); + return createResultValue( + result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDeviceGroups(Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::enumeratePhysicalDeviceGroups( Allocator const & vectorAllocator, Dispatch const & d ) const { - std::vector physicalDeviceGroupProperties( vectorAllocator ); - uint32_t physicalDeviceGroupCount; - Result result; + std::vector physicalDeviceGroupProperties( vectorAllocator ); + uint32_t physicalDeviceGroupCount; + Result result; do { - result = static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) ); + result = + static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) ); if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount ) { physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); - result = static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); + result = static_cast( d.vkEnumeratePhysicalDeviceGroups( + m_instance, + &physicalDeviceGroupCount, + reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -73533,29 +85822,41 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); } - return createResultValue( result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDeviceGroups" ); + return createResultValue( + result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroupsKHR( uint32_t* pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroupsKHR( + uint32_t * pPhysicalDeviceGroupCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, pPhysicalDeviceGroupCount, reinterpret_cast( pPhysicalDeviceGroupProperties ) ) ); + return static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( + m_instance, + pPhysicalDeviceGroupCount, + reinterpret_cast( pPhysicalDeviceGroupProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDeviceGroupsKHR(Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::enumeratePhysicalDeviceGroupsKHR( Dispatch const & d ) const { - std::vector physicalDeviceGroupProperties; - uint32_t physicalDeviceGroupCount; - Result result; + std::vector physicalDeviceGroupProperties; + uint32_t physicalDeviceGroupCount; + Result result; do { - result = static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) ); + result = + static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) ); if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount ) { physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); - result = static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); + result = static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( + m_instance, + &physicalDeviceGroupCount, + reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -73563,21 +85864,28 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); } - return createResultValue( result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDeviceGroupsKHR" ); + return createResultValue( result, + physicalDeviceGroupProperties, + VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDeviceGroupsKHR(Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::enumeratePhysicalDeviceGroupsKHR( Allocator const & vectorAllocator, Dispatch const & d ) const { - std::vector physicalDeviceGroupProperties( vectorAllocator ); - uint32_t physicalDeviceGroupCount; - Result result; + std::vector physicalDeviceGroupProperties( vectorAllocator ); + uint32_t physicalDeviceGroupCount; + Result result; do { - result = static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) ); + result = + static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) ); if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount ) { physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); - result = static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); + result = static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( + m_instance, + &physicalDeviceGroupCount, + reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -73585,29 +85893,36 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); } - return createResultValue( result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDeviceGroupsKHR" ); + return createResultValue( result, + physicalDeviceGroupProperties, + VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDevices( uint32_t* pPhysicalDeviceCount, VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDevices( uint32_t * pPhysicalDeviceCount, + VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkEnumeratePhysicalDevices( m_instance, pPhysicalDeviceCount, reinterpret_cast( pPhysicalDevices ) ) ); + return static_cast( d.vkEnumeratePhysicalDevices( + m_instance, pPhysicalDeviceCount, reinterpret_cast( pPhysicalDevices ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDevices(Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::enumeratePhysicalDevices( Dispatch const & d ) const { - std::vector physicalDevices; - uint32_t physicalDeviceCount; - Result result; + std::vector physicalDevices; + uint32_t physicalDeviceCount; + Result result; do { result = static_cast( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) ); if ( ( result == Result::eSuccess ) && physicalDeviceCount ) { physicalDevices.resize( physicalDeviceCount ); - result = static_cast( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast( physicalDevices.data() ) ) ); + result = static_cast( d.vkEnumeratePhysicalDevices( + m_instance, &physicalDeviceCount, reinterpret_cast( physicalDevices.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -73615,21 +85930,24 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() ); physicalDevices.resize( physicalDeviceCount ); } - return createResultValue( result, physicalDevices, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDevices" ); + return createResultValue( + result, physicalDevices, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDevices(Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::enumeratePhysicalDevices( Allocator const & vectorAllocator, Dispatch const & d ) const { - std::vector physicalDevices( vectorAllocator ); - uint32_t physicalDeviceCount; - Result result; + std::vector physicalDevices( vectorAllocator ); + uint32_t physicalDeviceCount; + Result result; do { result = static_cast( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) ); if ( ( result == Result::eSuccess ) && physicalDeviceCount ) { physicalDevices.resize( physicalDeviceCount ); - result = static_cast( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast( physicalDevices.data() ) ) ); + result = static_cast( d.vkEnumeratePhysicalDevices( + m_instance, &physicalDeviceCount, reinterpret_cast( physicalDevices.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -73637,113 +85955,183 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() ); physicalDevices.resize( physicalDeviceCount ); } - return createResultValue( result, physicalDevices, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDevices" ); + return createResultValue( + result, physicalDevices, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const char* pName, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const char * pName, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { return d.vkGetInstanceProcAddr( m_instance, pName ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const std::string & name, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const std::string & name, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { return d.vkGetInstanceProcAddr( m_instance, name.c_str() ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT* pCallbackData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( + VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, + const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT * pCallbackData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkSubmitDebugUtilsMessageEXT( m_instance, static_cast( messageSeverity ), static_cast( messageTypes ), reinterpret_cast( pCallbackData ) ); + d.vkSubmitDebugUtilsMessageEXT( m_instance, + static_cast( messageSeverity ), + static_cast( messageTypes ), + reinterpret_cast( pCallbackData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const DebugUtilsMessengerCallbackDataEXT & callbackData, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, + const DebugUtilsMessengerCallbackDataEXT & callbackData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkSubmitDebugUtilsMessageEXT( m_instance, static_cast( messageSeverity ), static_cast( messageTypes ), reinterpret_cast( &callbackData ) ); + d.vkSubmitDebugUtilsMessageEXT( m_instance, + static_cast( messageSeverity ), + static_cast( messageTypes ), + reinterpret_cast( &callbackData ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT - template - VULKAN_HPP_INLINE Result PhysicalDevice::acquireXlibDisplayEXT( Display* dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result PhysicalDevice::acquireXlibDisplayEXT( Display * dpy, + VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkAcquireXlibDisplayEXT( m_physicalDevice, dpy, static_cast( display ) ) ); + return static_cast( + d.vkAcquireXlibDisplayEXT( m_physicalDevice, dpy, static_cast( display ) ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::acquireXlibDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d ) const +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::acquireXlibDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const { Display dpy; - Result result = static_cast( d.vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast( display ) ) ); - return createResultValue( result, dpy, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::acquireXlibDisplayEXT" ); + Result result = + static_cast( d.vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast( display ) ) ); + return createResultValue( result, dpy, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ - template - VULKAN_HPP_INLINE Result PhysicalDevice::createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Device* pDevice, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result PhysicalDevice::createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Device * pDevice, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateDevice( m_physicalDevice, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pDevice ) ) ); + return static_cast( d.vkCreateDevice( m_physicalDevice, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pDevice ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::createDevice( const DeviceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::createDevice( + const DeviceCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::Device device; - Result result = static_cast( d.vkCreateDevice( m_physicalDevice, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &device ) ) ); - return createResultValue( result, device, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::createDevice" ); + Result result = static_cast( d.vkCreateDevice( + m_physicalDevice, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &device ) ) ); + return createResultValue( result, device, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDevice" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::createDeviceUnique( const DeviceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::createDeviceUnique( + const DeviceCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::Device device; - Result result = static_cast( d.vkCreateDevice( m_physicalDevice, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &device ) ) ); + Result result = static_cast( d.vkCreateDevice( + m_physicalDevice, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &device ) ) ); - ObjectDestroy deleter( allocator, d ); - return createResultValue( result, device, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::createDeviceUnique", deleter ); + ObjectDestroy deleter( allocator, d ); + return createResultValue( + result, device, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDeviceUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DisplayModeKHR* pMode, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DisplayModeKHR * pMode, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateDisplayModeKHR( m_physicalDevice, static_cast( display ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pMode ) ) ); + return static_cast( + d.vkCreateDisplayModeKHR( m_physicalDevice, + static_cast( display ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pMode ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const DisplayModeCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::DisplayModeKHR mode; - Result result = static_cast( d.vkCreateDisplayModeKHR( m_physicalDevice, static_cast( display ), reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &mode ) ) ); - return createResultValue( result, mode, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::createDisplayModeKHR" ); + Result result = static_cast( d.vkCreateDisplayModeKHR( + m_physicalDevice, + static_cast( display ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &mode ) ) ); + return createResultValue( result, mode, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + PhysicalDevice::enumerateDeviceExtensionProperties( const char * pLayerName, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast( pProperties ) ) ); + return static_cast( d.vkEnumerateDeviceExtensionProperties( + m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateDeviceExtensionProperties( Optional layerName, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::enumerateDeviceExtensionProperties( Optional layerName, + Dispatch const & d ) const { - std::vector properties; - uint32_t propertyCount; - Result result; + std::vector properties; + uint32_t propertyCount; + Result result; do { - result = static_cast( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); + result = static_cast( d.vkEnumerateDeviceExtensionProperties( + m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); if ( ( result == Result::eSuccess ) && propertyCount ) { properties.resize( propertyCount ); - result = static_cast( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ) ); + result = static_cast( + d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, + layerName ? layerName->c_str() : nullptr, + &propertyCount, + reinterpret_cast( properties.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -73751,21 +86139,30 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); properties.resize( propertyCount ); } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateDeviceExtensionProperties" ); + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateDeviceExtensionProperties( Optional layerName, Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::enumerateDeviceExtensionProperties( Optional layerName, + Allocator const & vectorAllocator, + Dispatch const & d ) const { - std::vector properties( vectorAllocator ); - uint32_t propertyCount; - Result result; + std::vector properties( vectorAllocator ); + uint32_t propertyCount; + Result result; do { - result = static_cast( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); + result = static_cast( d.vkEnumerateDeviceExtensionProperties( + m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); if ( ( result == Result::eSuccess ) && propertyCount ) { properties.resize( propertyCount ); - result = static_cast( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ) ); + result = static_cast( + d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, + layerName ? layerName->c_str() : nullptr, + &propertyCount, + reinterpret_cast( properties.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -73773,29 +86170,36 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); properties.resize( propertyCount ); } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateDeviceExtensionProperties" ); + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceLayerProperties( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + PhysicalDevice::enumerateDeviceLayerProperties( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + return static_cast( d.vkEnumerateDeviceLayerProperties( + m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateDeviceLayerProperties(Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::enumerateDeviceLayerProperties( Dispatch const & d ) const { - std::vector properties; - uint32_t propertyCount; - Result result; + std::vector properties; + uint32_t propertyCount; + Result result; do { result = static_cast( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) ); if ( ( result == Result::eSuccess ) && propertyCount ) { properties.resize( propertyCount ); - result = static_cast( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + result = static_cast( d.vkEnumerateDeviceLayerProperties( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -73803,21 +86207,24 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); properties.resize( propertyCount ); } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateDeviceLayerProperties" ); + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateDeviceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::enumerateDeviceLayerProperties( Allocator const & vectorAllocator, Dispatch const & d ) const { - std::vector properties( vectorAllocator ); - uint32_t propertyCount; - Result result; + std::vector properties( vectorAllocator ); + uint32_t propertyCount; + Result result; do { result = static_cast( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) ); if ( ( result == Result::eSuccess ) && propertyCount ) { properties.resize( propertyCount ); - result = static_cast( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + result = static_cast( d.vkEnumerateDeviceLayerProperties( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -73825,29 +86232,54 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); properties.resize( propertyCount ); } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateDeviceLayerProperties" ); + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, uint32_t* pCounterCount, VULKAN_HPP_NAMESPACE::PerformanceCounterKHR* pCounters, VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR* pCounterDescriptions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( + uint32_t queueFamilyIndex, + uint32_t * pCounterCount, + VULKAN_HPP_NAMESPACE::PerformanceCounterKHR * pCounters, + VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR * pCounterDescriptions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, pCounterCount, reinterpret_cast( pCounters ), reinterpret_cast( pCounterDescriptions ) ) ); + return static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + m_physicalDevice, + queueFamilyIndex, + pCounterCount, + reinterpret_cast( pCounters ), + reinterpret_cast( pCounterDescriptions ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, ArrayProxy counters, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( + uint32_t queueFamilyIndex, + ArrayProxy counters, + Dispatch const & d ) const { - std::vector counterDescriptions; - uint32_t counterCount; - Result result; + std::vector counterDescriptions; + uint32_t counterCount; + Result result; do { - result = static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, counters.size() , reinterpret_cast( counters.data() ), nullptr ) ); + result = static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + m_physicalDevice, + queueFamilyIndex, + counters.size(), + reinterpret_cast( counters.data() ), + nullptr ) ); if ( ( result == Result::eSuccess ) && counterCount ) { counterDescriptions.resize( counterCount ); - result = static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, counters.size() , reinterpret_cast( counters.data() ), reinterpret_cast( counterDescriptions.data() ) ) ); + result = static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + m_physicalDevice, + queueFamilyIndex, + counters.size(), + reinterpret_cast( counters.data() ), + reinterpret_cast( counterDescriptions.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -73855,21 +86287,39 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( counterCount <= counterDescriptions.size() ); counterDescriptions.resize( counterCount ); } - return createResultValue( result, counterDescriptions, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); + return createResultValue( result, + counterDescriptions, + VULKAN_HPP_NAMESPACE_STRING + "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, ArrayProxy counters, Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( + uint32_t queueFamilyIndex, + ArrayProxy counters, + Allocator const & vectorAllocator, + Dispatch const & d ) const { - std::vector counterDescriptions( vectorAllocator ); - uint32_t counterCount; - Result result; + std::vector counterDescriptions( vectorAllocator ); + uint32_t counterCount; + Result result; do { - result = static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, counters.size() , reinterpret_cast( counters.data() ), nullptr ) ); + result = static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + m_physicalDevice, + queueFamilyIndex, + counters.size(), + reinterpret_cast( counters.data() ), + nullptr ) ); if ( ( result == Result::eSuccess ) && counterCount ) { counterDescriptions.resize( counterCount ); - result = static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, counters.size() , reinterpret_cast( counters.data() ), reinterpret_cast( counterDescriptions.data() ) ) ); + result = static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + m_physicalDevice, + queueFamilyIndex, + counters.size(), + reinterpret_cast( counters.data() ), + reinterpret_cast( counterDescriptions.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -73877,29 +86327,46 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( counterCount <= counterDescriptions.size() ); counterDescriptions.resize( counterCount ); } - return createResultValue( result, counterDescriptions, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); + return createResultValue( result, + counterDescriptions, + VULKAN_HPP_NAMESPACE_STRING + "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast( display ), pPropertyCount, reinterpret_cast( pProperties ) ) ); + return static_cast( + d.vkGetDisplayModeProperties2KHR( m_physicalDevice, + static_cast( display ), + pPropertyCount, + reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const { - std::vector properties; - uint32_t propertyCount; - Result result; + std::vector properties; + uint32_t propertyCount; + Result result; do { - result = static_cast( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); + result = static_cast( d.vkGetDisplayModeProperties2KHR( + m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); if ( ( result == Result::eSuccess ) && propertyCount ) { properties.resize( propertyCount ); - result = static_cast( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + result = static_cast( + d.vkGetDisplayModeProperties2KHR( m_physicalDevice, + static_cast( display ), + &propertyCount, + reinterpret_cast( properties.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -73907,21 +86374,30 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); properties.resize( propertyCount ); } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayModeProperties2KHR" ); + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + Allocator const & vectorAllocator, + Dispatch const & d ) const { - std::vector properties( vectorAllocator ); - uint32_t propertyCount; - Result result; + std::vector properties( vectorAllocator ); + uint32_t propertyCount; + Result result; do { - result = static_cast( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); + result = static_cast( d.vkGetDisplayModeProperties2KHR( + m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); if ( ( result == Result::eSuccess ) && propertyCount ) { properties.resize( propertyCount ); - result = static_cast( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + result = static_cast( + d.vkGetDisplayModeProperties2KHR( m_physicalDevice, + static_cast( display ), + &propertyCount, + reinterpret_cast( properties.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -73929,29 +86405,44 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); properties.resize( propertyCount ); } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayModeProperties2KHR" ); + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast( display ), pPropertyCount, reinterpret_cast( pProperties ) ) ); + return static_cast( + d.vkGetDisplayModePropertiesKHR( m_physicalDevice, + static_cast( display ), + pPropertyCount, + reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const { - std::vector properties; - uint32_t propertyCount; - Result result; + std::vector properties; + uint32_t propertyCount; + Result result; do { - result = static_cast( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); + result = static_cast( d.vkGetDisplayModePropertiesKHR( + m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); if ( ( result == Result::eSuccess ) && propertyCount ) { properties.resize( propertyCount ); - result = static_cast( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + result = static_cast( + d.vkGetDisplayModePropertiesKHR( m_physicalDevice, + static_cast( display ), + &propertyCount, + reinterpret_cast( properties.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -73959,21 +86450,30 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); properties.resize( propertyCount ); } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayModePropertiesKHR" ); + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + Allocator const & vectorAllocator, + Dispatch const & d ) const { - std::vector properties( vectorAllocator ); - uint32_t propertyCount; - Result result; + std::vector properties( vectorAllocator ); + uint32_t propertyCount; + Result result; do { - result = static_cast( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); + result = static_cast( d.vkGetDisplayModePropertiesKHR( + m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); if ( ( result == Result::eSuccess ) && propertyCount ) { properties.resize( propertyCount ); - result = static_cast( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + result = static_cast( + d.vkGetDisplayModePropertiesKHR( m_physicalDevice, + static_cast( display ), + &propertyCount, + reinterpret_cast( properties.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -73981,59 +86481,96 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); properties.resize( propertyCount ); } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayModePropertiesKHR" ); + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR* pDisplayPlaneInfo, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR* pCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilities2KHR( + const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR * pDisplayPlaneInfo, + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR * pCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, reinterpret_cast( pDisplayPlaneInfo ), reinterpret_cast( pCapabilities ) ) ); + return static_cast( + d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, + reinterpret_cast( pDisplayPlaneInfo ), + reinterpret_cast( pCapabilities ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR capabilities; - Result result = static_cast( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, reinterpret_cast( &displayPlaneInfo ), reinterpret_cast( &capabilities ) ) ); - return createResultValue( result, capabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneCapabilities2KHR" ); + Result result = static_cast( + d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, + reinterpret_cast( &displayPlaneInfo ), + reinterpret_cast( &capabilities ) ) ); + return createResultValue( + result, capabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilities2KHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR* pCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, + uint32_t planeIndex, + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR * pCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, static_cast( mode ), planeIndex, reinterpret_cast( pCapabilities ) ) ); + return static_cast( + d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, + static_cast( mode ), + planeIndex, + reinterpret_cast( pCapabilities ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, + uint32_t planeIndex, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities; - Result result = static_cast( d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, static_cast( mode ), planeIndex, reinterpret_cast( &capabilities ) ) ); - return createResultValue( result, capabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" ); + Result result = static_cast( + d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, + static_cast( mode ), + planeIndex, + reinterpret_cast( &capabilities ) ) ); + return createResultValue( + result, capabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, uint32_t* pDisplayCount, VULKAN_HPP_NAMESPACE::DisplayKHR* pDisplays, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, + uint32_t * pDisplayCount, + VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, pDisplayCount, reinterpret_cast( pDisplays ) ) ); + return static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( + m_physicalDevice, planeIndex, pDisplayCount, reinterpret_cast( pDisplays ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const & d ) const { - std::vector displays; - uint32_t displayCount; - Result result; + std::vector displays; + uint32_t displayCount; + Result result; do { - result = static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) ); + result = static_cast( + d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) ); if ( ( result == Result::eSuccess ) && displayCount ) { displays.resize( displayCount ); - result = static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast( displays.data() ) ) ); + result = static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( + m_physicalDevice, planeIndex, &displayCount, reinterpret_cast( displays.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -74041,21 +86578,27 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( displayCount <= displays.size() ); displays.resize( displayCount ); } - return createResultValue( result, displays, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); + return createResultValue( + result, displays, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, + Allocator const & vectorAllocator, + Dispatch const & d ) const { - std::vector displays( vectorAllocator ); - uint32_t displayCount; - Result result; + std::vector displays( vectorAllocator ); + uint32_t displayCount; + Result result; do { - result = static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) ); + result = static_cast( + d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) ); if ( ( result == Result::eSuccess ) && displayCount ) { displays.resize( displayCount ); - result = static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast( displays.data() ) ) ); + result = static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( + m_physicalDevice, planeIndex, &displayCount, reinterpret_cast( displays.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -74063,29 +86606,37 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( displayCount <= displays.size() ); displays.resize( displayCount ); } - return createResultValue( result, displays, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); + return createResultValue( + result, displays, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result PhysicalDevice::getCalibrateableTimeDomainsEXT( uint32_t* pTimeDomainCount, VULKAN_HPP_NAMESPACE::TimeDomainEXT* pTimeDomains, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + PhysicalDevice::getCalibrateableTimeDomainsEXT( uint32_t * pTimeDomainCount, + VULKAN_HPP_NAMESPACE::TimeDomainEXT * pTimeDomains, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, pTimeDomainCount, reinterpret_cast( pTimeDomains ) ) ); + return static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( + m_physicalDevice, pTimeDomainCount, reinterpret_cast( pTimeDomains ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getCalibrateableTimeDomainsEXT(Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getCalibrateableTimeDomainsEXT( Dispatch const & d ) const { - std::vector timeDomains; - uint32_t timeDomainCount; - Result result; + std::vector timeDomains; + uint32_t timeDomainCount; + Result result; do { - result = static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) ); + result = static_cast( + d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) ); if ( ( result == Result::eSuccess ) && timeDomainCount ) { timeDomains.resize( timeDomainCount ); - result = static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast( timeDomains.data() ) ) ); + result = static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( + m_physicalDevice, &timeDomainCount, reinterpret_cast( timeDomains.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -74093,21 +86644,25 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); timeDomains.resize( timeDomainCount ); } - return createResultValue( result, timeDomains, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); + return createResultValue( + result, timeDomains, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getCalibrateableTimeDomainsEXT(Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getCalibrateableTimeDomainsEXT( Allocator const & vectorAllocator, Dispatch const & d ) const { - std::vector timeDomains( vectorAllocator ); - uint32_t timeDomainCount; - Result result; + std::vector timeDomains( vectorAllocator ); + uint32_t timeDomainCount; + Result result; do { - result = static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) ); + result = static_cast( + d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) ); if ( ( result == Result::eSuccess ) && timeDomainCount ) { timeDomains.resize( timeDomainCount ); - result = static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast( timeDomains.data() ) ) ); + result = static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( + m_physicalDevice, &timeDomainCount, reinterpret_cast( timeDomains.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -74115,29 +86670,39 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); timeDomains.resize( timeDomainCount ); } - return createResultValue( result, timeDomains, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); + return createResultValue( + result, timeDomains, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result PhysicalDevice::getCooperativeMatrixPropertiesNV( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + PhysicalDevice::getCooperativeMatrixPropertiesNV( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + return static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( + m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getCooperativeMatrixPropertiesNV(Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getCooperativeMatrixPropertiesNV( Dispatch const & d ) const { - std::vector properties; - uint32_t propertyCount; - Result result; + std::vector properties; + uint32_t propertyCount; + Result result; do { - result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); + result = static_cast( + d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); if ( ( result == Result::eSuccess ) && propertyCount ) { properties.resize( propertyCount ); - result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( + m_physicalDevice, + &propertyCount, + reinterpret_cast( properties.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -74145,21 +86710,27 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); properties.resize( propertyCount ); } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getCooperativeMatrixPropertiesNV(Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getCooperativeMatrixPropertiesNV( Allocator const & vectorAllocator, Dispatch const & d ) const { - std::vector properties( vectorAllocator ); - uint32_t propertyCount; - Result result; + std::vector properties( vectorAllocator ); + uint32_t propertyCount; + Result result; do { - result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); + result = static_cast( + d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); if ( ( result == Result::eSuccess ) && propertyCount ) { properties.resize( propertyCount ); - result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( + m_physicalDevice, + &propertyCount, + reinterpret_cast( properties.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -74167,29 +86738,37 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); properties.resize( propertyCount ); } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneProperties2KHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + PhysicalDevice::getDisplayPlaneProperties2KHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + return static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( + m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlaneProperties2KHR(Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getDisplayPlaneProperties2KHR( Dispatch const & d ) const { - std::vector properties; - uint32_t propertyCount; - Result result; + std::vector properties; + uint32_t propertyCount; + Result result; do { - result = static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); + result = static_cast( + d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); if ( ( result == Result::eSuccess ) && propertyCount ) { properties.resize( propertyCount ); - result = static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -74197,21 +86776,25 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); properties.resize( propertyCount ); } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneProperties2KHR" ); + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlaneProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getDisplayPlaneProperties2KHR( Allocator const & vectorAllocator, Dispatch const & d ) const { - std::vector properties( vectorAllocator ); - uint32_t propertyCount; - Result result; + std::vector properties( vectorAllocator ); + uint32_t propertyCount; + Result result; do { - result = static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); + result = static_cast( + d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); if ( ( result == Result::eSuccess ) && propertyCount ) { properties.resize( propertyCount ); - result = static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -74219,29 +86802,37 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); properties.resize( propertyCount ); } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneProperties2KHR" ); + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlanePropertiesKHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + PhysicalDevice::getDisplayPlanePropertiesKHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + return static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlanePropertiesKHR(Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getDisplayPlanePropertiesKHR( Dispatch const & d ) const { - std::vector properties; - uint32_t propertyCount; - Result result; + std::vector properties; + uint32_t propertyCount; + Result result; do { - result = static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); + result = static_cast( + d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); if ( ( result == Result::eSuccess ) && propertyCount ) { properties.resize( propertyCount ); - result = static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -74249,21 +86840,25 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); properties.resize( propertyCount ); } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlanePropertiesKHR" ); + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlanePropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getDisplayPlanePropertiesKHR( Allocator const & vectorAllocator, Dispatch const & d ) const { - std::vector properties( vectorAllocator ); - uint32_t propertyCount; - Result result; + std::vector properties( vectorAllocator ); + uint32_t propertyCount; + Result result; do { - result = static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); + result = static_cast( + d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); if ( ( result == Result::eSuccess ) && propertyCount ) { properties.resize( propertyCount ); - result = static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -74271,29 +86866,37 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); properties.resize( propertyCount ); } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlanePropertiesKHR" ); + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayProperties2KHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayProperties2KHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + PhysicalDevice::getDisplayProperties2KHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayProperties2KHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + return static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( + m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayProperties2KHR(Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getDisplayProperties2KHR( Dispatch const & d ) const { - std::vector properties; - uint32_t propertyCount; - Result result; + std::vector properties; + uint32_t propertyCount; + Result result; do { - result = static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); + result = + static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); if ( ( result == Result::eSuccess ) && propertyCount ) { properties.resize( propertyCount ); - result = static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + result = static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -74301,21 +86904,25 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); properties.resize( propertyCount ); } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayProperties2KHR" ); + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getDisplayProperties2KHR( Allocator const & vectorAllocator, Dispatch const & d ) const { - std::vector properties( vectorAllocator ); - uint32_t propertyCount; - Result result; + std::vector properties( vectorAllocator ); + uint32_t propertyCount; + Result result; do { - result = static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); + result = + static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); if ( ( result == Result::eSuccess ) && propertyCount ) { properties.resize( propertyCount ); - result = static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + result = static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -74323,29 +86930,37 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); properties.resize( propertyCount ); } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayProperties2KHR" ); + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPropertiesKHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + PhysicalDevice::getDisplayPropertiesKHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + return static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( + m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPropertiesKHR(Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getDisplayPropertiesKHR( Dispatch const & d ) const { - std::vector properties; - uint32_t propertyCount; - Result result; + std::vector properties; + uint32_t propertyCount; + Result result; do { - result = static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); + result = + static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); if ( ( result == Result::eSuccess ) && propertyCount ) { properties.resize( propertyCount ); - result = static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -74353,21 +86968,25 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); properties.resize( propertyCount ); } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPropertiesKHR" ); + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getDisplayPropertiesKHR( Allocator const & vectorAllocator, Dispatch const & d ) const { - std::vector properties( vectorAllocator ); - uint32_t propertyCount; - Result result; + std::vector properties( vectorAllocator ); + uint32_t propertyCount; + Result result; do { - result = static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); + result = + static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); if ( ( result == Result::eSuccess ) && propertyCount ) { properties.resize( propertyCount ); - result = static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -74375,393 +86994,640 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); properties.resize( propertyCount ); } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPropertiesKHR" ); + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferProperties( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, reinterpret_cast( pExternalBufferInfo ), reinterpret_cast( pExternalBufferProperties ) ); + d.vkGetPhysicalDeviceExternalBufferProperties( + m_physicalDevice, + reinterpret_cast( pExternalBufferInfo ), + reinterpret_cast( pExternalBufferProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties PhysicalDevice::getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties + PhysicalDevice::getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties; - d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, reinterpret_cast( &externalBufferInfo ), reinterpret_cast( &externalBufferProperties ) ); + d.vkGetPhysicalDeviceExternalBufferProperties( + m_physicalDevice, + reinterpret_cast( &externalBufferInfo ), + reinterpret_cast( &externalBufferProperties ) ); return externalBufferProperties; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferPropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice, reinterpret_cast( pExternalBufferInfo ), reinterpret_cast( pExternalBufferProperties ) ); + d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( + m_physicalDevice, + reinterpret_cast( pExternalBufferInfo ), + reinterpret_cast( pExternalBufferProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties PhysicalDevice::getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties + PhysicalDevice::getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties; - d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice, reinterpret_cast( &externalBufferInfo ), reinterpret_cast( &externalBufferProperties ) ); + d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( + m_physicalDevice, + reinterpret_cast( &externalBufferInfo ), + reinterpret_cast( &externalBufferProperties ) ); return externalBufferProperties; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void PhysicalDevice::getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalFenceProperties( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, reinterpret_cast( pExternalFenceInfo ), reinterpret_cast( pExternalFenceProperties ) ); + d.vkGetPhysicalDeviceExternalFenceProperties( + m_physicalDevice, + reinterpret_cast( pExternalFenceInfo ), + reinterpret_cast( pExternalFenceProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties PhysicalDevice::getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties + PhysicalDevice::getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties; - d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, reinterpret_cast( &externalFenceInfo ), reinterpret_cast( &externalFenceProperties ) ); + d.vkGetPhysicalDeviceExternalFenceProperties( + m_physicalDevice, + reinterpret_cast( &externalFenceInfo ), + reinterpret_cast( &externalFenceProperties ) ); return externalFenceProperties; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void PhysicalDevice::getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalFencePropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice, reinterpret_cast( pExternalFenceInfo ), reinterpret_cast( pExternalFenceProperties ) ); + d.vkGetPhysicalDeviceExternalFencePropertiesKHR( + m_physicalDevice, + reinterpret_cast( pExternalFenceInfo ), + reinterpret_cast( pExternalFenceProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties PhysicalDevice::getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties + PhysicalDevice::getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties; - d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice, reinterpret_cast( &externalFenceInfo ), reinterpret_cast( &externalFenceProperties ) ); + d.vkGetPhysicalDeviceExternalFencePropertiesKHR( + m_physicalDevice, + reinterpret_cast( &externalFenceInfo ), + reinterpret_cast( &externalFenceProperties ) ); return externalFenceProperties; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV* pExternalImageFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result PhysicalDevice::getExternalImageFormatPropertiesNV( + VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, + VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV * pExternalImageFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( tiling ), static_cast( usage ), static_cast( flags ), static_cast( externalHandleType ), reinterpret_cast( pExternalImageFormatProperties ) ) ); + return static_cast( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( + m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( tiling ), + static_cast( usage ), + static_cast( flags ), + static_cast( externalHandleType ), + reinterpret_cast( pExternalImageFormatProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getExternalImageFormatPropertiesNV( + VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV externalImageFormatProperties; - Result result = static_cast( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( tiling ), static_cast( usage ), static_cast( flags ), static_cast( externalHandleType ), reinterpret_cast( &externalImageFormatProperties ) ) ); - return createResultValue( result, externalImageFormatProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getExternalImageFormatPropertiesNV" ); + Result result = static_cast( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( + m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( tiling ), + static_cast( usage ), + static_cast( flags ), + static_cast( externalHandleType ), + reinterpret_cast( &externalImageFormatProperties ) ) ); + return createResultValue( result, + externalImageFormatProperties, + VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getExternalImageFormatPropertiesNV" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphoreProperties( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, reinterpret_cast( pExternalSemaphoreInfo ), reinterpret_cast( pExternalSemaphoreProperties ) ); + d.vkGetPhysicalDeviceExternalSemaphoreProperties( + m_physicalDevice, + reinterpret_cast( pExternalSemaphoreInfo ), + reinterpret_cast( pExternalSemaphoreProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties + PhysicalDevice::getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties; - d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, reinterpret_cast( &externalSemaphoreInfo ), reinterpret_cast( &externalSemaphoreProperties ) ); + d.vkGetPhysicalDeviceExternalSemaphoreProperties( + m_physicalDevice, + reinterpret_cast( &externalSemaphoreInfo ), + reinterpret_cast( &externalSemaphoreProperties ) ); return externalSemaphoreProperties; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphorePropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice, reinterpret_cast( pExternalSemaphoreInfo ), reinterpret_cast( pExternalSemaphoreProperties ) ); + d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( + m_physicalDevice, + reinterpret_cast( pExternalSemaphoreInfo ), + reinterpret_cast( pExternalSemaphoreProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphorePropertiesKHR( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphorePropertiesKHR( + const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties; - d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice, reinterpret_cast( &externalSemaphoreInfo ), reinterpret_cast( &externalSemaphoreProperties ) ); + d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( + m_physicalDevice, + reinterpret_cast( &externalSemaphoreInfo ), + reinterpret_cast( &externalSemaphoreProperties ) ); return externalSemaphoreProperties; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void PhysicalDevice::getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pFeatures, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void PhysicalDevice::getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pFeatures, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast( pFeatures ) ); + d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast( pFeatures ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures PhysicalDevice::getFeatures(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures + PhysicalDevice::getFeatures( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features; - d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast( &features ) ); + d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast( &features ) ); return features; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2* pFeatures, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast( pFeatures ) ); + d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast( pFeatures ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 + PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features; - d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast( &features ) ); + d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast( &features ) ); return features; } - template - VULKAN_HPP_INLINE StructureChain PhysicalDevice::getFeatures2(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE StructureChain + PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2& features = structureChain.template get(); - d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast( &features ) ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = + structureChain.template get(); + d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast( &features ) ); return structureChain; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2* pFeatures, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast( pFeatures ) ); + d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast( pFeatures ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2KHR(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 + PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features; - d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast( &features ) ); + d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast( &features ) ); return features; } - template - VULKAN_HPP_INLINE StructureChain PhysicalDevice::getFeatures2KHR(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE StructureChain + PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2& features = structureChain.template get(); - d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast( &features ) ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = + structureChain.template get(); + d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast( &features ) ); return structureChain; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties* pFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::FormatProperties * pFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast( format ), reinterpret_cast( pFormatProperties ) ); + d.vkGetPhysicalDeviceFormatProperties( + m_physicalDevice, static_cast( format ), reinterpret_cast( pFormatProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties + PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_NAMESPACE::FormatProperties formatProperties; - d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + d.vkGetPhysicalDeviceFormatProperties( + m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); return formatProperties; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2* pFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast( format ), reinterpret_cast( pFormatProperties ) ); + d.vkGetPhysicalDeviceFormatProperties2( + m_physicalDevice, static_cast( format ), reinterpret_cast( pFormatProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 + PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties; - d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + d.vkGetPhysicalDeviceFormatProperties2( + m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); return formatProperties; } - template - VULKAN_HPP_INLINE StructureChain PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE StructureChain + PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::FormatProperties2& formatProperties = structureChain.template get(); - d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = + structureChain.template get(); + d.vkGetPhysicalDeviceFormatProperties2( + m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); return structureChain; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2* pFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetPhysicalDeviceFormatProperties2KHR( m_physicalDevice, static_cast( format ), reinterpret_cast( pFormatProperties ) ); + d.vkGetPhysicalDeviceFormatProperties2KHR( + m_physicalDevice, static_cast( format ), reinterpret_cast( pFormatProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 + PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties; - d.vkGetPhysicalDeviceFormatProperties2KHR( m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + d.vkGetPhysicalDeviceFormatProperties2KHR( + m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); return formatProperties; } - template - VULKAN_HPP_INLINE StructureChain PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE StructureChain + PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::FormatProperties2& formatProperties = structureChain.template get(); - d.vkGetPhysicalDeviceFormatProperties2KHR( m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = + structureChain.template get(); + d.vkGetPhysicalDeviceFormatProperties2KHR( + m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); return structureChain; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ImageFormatProperties* pImageFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, + VULKAN_HPP_NAMESPACE::ImageFormatProperties * pImageFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( tiling ), static_cast( usage ), static_cast( flags ), reinterpret_cast( pImageFormatProperties ) ) ); + return static_cast( d.vkGetPhysicalDeviceImageFormatProperties( + m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( tiling ), + static_cast( usage ), + static_cast( flags ), + reinterpret_cast( pImageFormatProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties; - Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( tiling ), static_cast( usage ), static_cast( flags ), reinterpret_cast( &imageFormatProperties ) ) ); - return createResultValue( result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties" ); + Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties( + m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( tiling ), + static_cast( usage ), + static_cast( flags ), + reinterpret_cast( &imageFormatProperties ) ) ); + return createResultValue( + result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2* pImageFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast( pImageFormatInfo ), reinterpret_cast( pImageFormatProperties ) ) ); + return static_cast( d.vkGetPhysicalDeviceImageFormatProperties2( + m_physicalDevice, + reinterpret_cast( pImageFormatInfo ), + reinterpret_cast( pImageFormatProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties; - Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast( &imageFormatInfo ), reinterpret_cast( &imageFormatProperties ) ) ); - return createResultValue( result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2" ); + Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties2( + m_physicalDevice, + reinterpret_cast( &imageFormatInfo ), + reinterpret_cast( &imageFormatProperties ) ) ); + return createResultValue( + result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, + Dispatch const & d ) const { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::ImageFormatProperties2& imageFormatProperties = structureChain.template get(); - Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast( &imageFormatInfo ), reinterpret_cast( &imageFormatProperties ) ) ); - return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2" ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = + structureChain.template get(); + Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties2( + m_physicalDevice, + reinterpret_cast( &imageFormatInfo ), + reinterpret_cast( &imageFormatProperties ) ) ); + return createResultValue( + result, structureChain, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2* pImageFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2KHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast( pImageFormatInfo ), reinterpret_cast( pImageFormatProperties ) ) ); + return static_cast( d.vkGetPhysicalDeviceImageFormatProperties2KHR( + m_physicalDevice, + reinterpret_cast( pImageFormatInfo ), + reinterpret_cast( pImageFormatProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties; - Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast( &imageFormatInfo ), reinterpret_cast( &imageFormatProperties ) ) ); - return createResultValue( result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2KHR" ); + Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties2KHR( + m_physicalDevice, + reinterpret_cast( &imageFormatInfo ), + reinterpret_cast( &imageFormatProperties ) ) ); + return createResultValue( + result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, + Dispatch const & d ) const { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::ImageFormatProperties2& imageFormatProperties = structureChain.template get(); - Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast( &imageFormatInfo ), reinterpret_cast( &imageFormatProperties ) ) ); - return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2KHR" ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = + structureChain.template get(); + Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties2KHR( + m_physicalDevice, + reinterpret_cast( &imageFormatInfo ), + reinterpret_cast( &imageFormatProperties ) ) ); + return createResultValue( + result, structureChain, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties* pMemoryProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + PhysicalDevice::getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties * pMemoryProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast( pMemoryProperties ) ); + d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, + reinterpret_cast( pMemoryProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties PhysicalDevice::getMemoryProperties(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties + PhysicalDevice::getMemoryProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties; - d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast( &memoryProperties ) ); + d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, + reinterpret_cast( &memoryProperties ) ); return memoryProperties; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + PhysicalDevice::getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast( pMemoryProperties ) ); + d.vkGetPhysicalDeviceMemoryProperties2( + m_physicalDevice, reinterpret_cast( pMemoryProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 PhysicalDevice::getMemoryProperties2(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 + PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties; - d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast( &memoryProperties ) ); + d.vkGetPhysicalDeviceMemoryProperties2( + m_physicalDevice, reinterpret_cast( &memoryProperties ) ); return memoryProperties; } - template - VULKAN_HPP_INLINE StructureChain PhysicalDevice::getMemoryProperties2(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE StructureChain + PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2& memoryProperties = structureChain.template get(); - d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast( &memoryProperties ) ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = + structureChain.template get(); + d.vkGetPhysicalDeviceMemoryProperties2( + m_physicalDevice, reinterpret_cast( &memoryProperties ) ); return structureChain; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + PhysicalDevice::getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast( pMemoryProperties ) ); + d.vkGetPhysicalDeviceMemoryProperties2KHR( + m_physicalDevice, reinterpret_cast( pMemoryProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 PhysicalDevice::getMemoryProperties2KHR(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 + PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties; - d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast( &memoryProperties ) ); + d.vkGetPhysicalDeviceMemoryProperties2KHR( + m_physicalDevice, reinterpret_cast( &memoryProperties ) ); return memoryProperties; } - template - VULKAN_HPP_INLINE StructureChain PhysicalDevice::getMemoryProperties2KHR(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE StructureChain + PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2& memoryProperties = structureChain.template get(); - d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast( &memoryProperties ) ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = + structureChain.template get(); + d.vkGetPhysicalDeviceMemoryProperties2KHR( + m_physicalDevice, reinterpret_cast( &memoryProperties ) ); return structureChain; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT* pMultisampleProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void PhysicalDevice::getMultisamplePropertiesEXT( + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT * pMultisampleProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetPhysicalDeviceMultisamplePropertiesEXT( m_physicalDevice, static_cast( samples ), reinterpret_cast( pMultisampleProperties ) ); + d.vkGetPhysicalDeviceMultisamplePropertiesEXT( + m_physicalDevice, + static_cast( samples ), + reinterpret_cast( pMultisampleProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT + PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT multisampleProperties; - d.vkGetPhysicalDeviceMultisamplePropertiesEXT( m_physicalDevice, static_cast( samples ), reinterpret_cast( &multisampleProperties ) ); + d.vkGetPhysicalDeviceMultisamplePropertiesEXT( + m_physicalDevice, + static_cast( samples ), + reinterpret_cast( &multisampleProperties ) ); return multisampleProperties; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pRectCount, VULKAN_HPP_NAMESPACE::Rect2D* pRects, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + uint32_t * pRectCount, + VULKAN_HPP_NAMESPACE::Rect2D * pRects, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast( surface ), pRectCount, reinterpret_cast( pRects ) ) ); + return static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( + m_physicalDevice, static_cast( surface ), pRectCount, reinterpret_cast( pRects ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const { - std::vector rects; - uint32_t rectCount; - Result result; + std::vector rects; + uint32_t rectCount; + Result result; do { - result = static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast( surface ), &rectCount, nullptr ) ); + result = static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( + m_physicalDevice, static_cast( surface ), &rectCount, nullptr ) ); if ( ( result == Result::eSuccess ) && rectCount ) { rects.resize( rectCount ); - result = static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast( surface ), &rectCount, reinterpret_cast( rects.data() ) ) ); + result = static_cast( + d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, + static_cast( surface ), + &rectCount, + reinterpret_cast( rects.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -74769,21 +87635,29 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( rectCount <= rects.size() ); rects.resize( rectCount ); } - return createResultValue( result, rects, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getPresentRectanglesKHR" ); + return createResultValue( result, rects, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Allocator const & vectorAllocator, + Dispatch const & d ) const { - std::vector rects( vectorAllocator ); - uint32_t rectCount; - Result result; + std::vector rects( vectorAllocator ); + uint32_t rectCount; + Result result; do { - result = static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast( surface ), &rectCount, nullptr ) ); + result = static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( + m_physicalDevice, static_cast( surface ), &rectCount, nullptr ) ); if ( ( result == Result::eSuccess ) && rectCount ) { rects.resize( rectCount ); - result = static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast( surface ), &rectCount, reinterpret_cast( rects.data() ) ) ); + result = static_cast( + d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, + static_cast( surface ), + &rectCount, + reinterpret_cast( rects.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -74791,173 +87665,233 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( rectCount <= rects.size() ); rects.resize( rectCount ); } - return createResultValue( result, rects, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getPresentRectanglesKHR" ); + return createResultValue( result, rects, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void PhysicalDevice::getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void PhysicalDevice::getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast( pProperties ) ); + d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast( pProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties PhysicalDevice::getProperties(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties + PhysicalDevice::getProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties; - d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast( &properties ) ); + d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast( &properties ) ); return properties; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void PhysicalDevice::getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void PhysicalDevice::getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast( pProperties ) ); + d.vkGetPhysicalDeviceProperties2( m_physicalDevice, + reinterpret_cast( pProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 PhysicalDevice::getProperties2(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 + PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties; - d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast( &properties ) ); + d.vkGetPhysicalDeviceProperties2( m_physicalDevice, + reinterpret_cast( &properties ) ); return properties; } - template - VULKAN_HPP_INLINE StructureChain PhysicalDevice::getProperties2(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE StructureChain + PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2& properties = structureChain.template get(); - d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast( &properties ) ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = + structureChain.template get(); + d.vkGetPhysicalDeviceProperties2( m_physicalDevice, + reinterpret_cast( &properties ) ); return structureChain; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void PhysicalDevice::getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + PhysicalDevice::getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast( pProperties ) ); + d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, + reinterpret_cast( pProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 PhysicalDevice::getProperties2KHR(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 + PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties; - d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast( &properties ) ); + d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, + reinterpret_cast( &properties ) ); return properties; } - template - VULKAN_HPP_INLINE StructureChain PhysicalDevice::getProperties2KHR(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE StructureChain + PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2& properties = structureChain.template get(); - d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast( &properties ) ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = + structureChain.template get(); + d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, + reinterpret_cast( &properties ) ); return structureChain; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, uint32_t* pNumPasses, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( + const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo, + uint32_t * pNumPasses, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( m_physicalDevice, reinterpret_cast( pPerformanceQueryCreateInfo ), pNumPasses ); + d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + m_physicalDevice, + reinterpret_cast( pPerformanceQueryCreateInfo ), + pNumPasses ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE uint32_t PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( const QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE uint32_t PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( + const QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { uint32_t numPasses; - d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( m_physicalDevice, reinterpret_cast( &performanceQueryCreateInfo ), &numPasses ); + d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + m_physicalDevice, + reinterpret_cast( &performanceQueryCreateInfo ), + &numPasses ); return numPasses; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties* pQueueFamilyProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + PhysicalDevice::getQueueFamilyProperties( uint32_t * pQueueFamilyPropertyCount, + VULKAN_HPP_NAMESPACE::QueueFamilyProperties * pQueueFamilyProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); + d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, + pQueueFamilyPropertyCount, + reinterpret_cast( pQueueFamilyProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties(Dispatch const &d ) const + template + VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties( Dispatch const & d ) const { - std::vector queueFamilyProperties; - uint32_t queueFamilyPropertyCount; + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); queueFamilyProperties.resize( queueFamilyPropertyCount ); - d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + d.vkGetPhysicalDeviceQueueFamilyProperties( + m_physicalDevice, + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); return queueFamilyProperties; } - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties(Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties( Allocator const & vectorAllocator, Dispatch const & d ) const { - std::vector queueFamilyProperties( vectorAllocator ); - uint32_t queueFamilyPropertyCount; + std::vector queueFamilyProperties( vectorAllocator ); + uint32_t queueFamilyPropertyCount; d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); queueFamilyProperties.resize( queueFamilyPropertyCount ); - d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + d.vkGetPhysicalDeviceQueueFamilyProperties( + m_physicalDevice, + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); return queueFamilyProperties; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + PhysicalDevice::getQueueFamilyProperties2( uint32_t * pQueueFamilyPropertyCount, + VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); + d.vkGetPhysicalDeviceQueueFamilyProperties2( + m_physicalDevice, + pQueueFamilyPropertyCount, + reinterpret_cast( pQueueFamilyProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2(Dispatch const &d ) const + template + VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const { - std::vector queueFamilyProperties; - uint32_t queueFamilyPropertyCount; + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); queueFamilyProperties.resize( queueFamilyPropertyCount ); - d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + d.vkGetPhysicalDeviceQueueFamilyProperties2( + m_physicalDevice, + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); return queueFamilyProperties; } - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2(Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2( Allocator const & vectorAllocator, Dispatch const & d ) const { - std::vector queueFamilyProperties( vectorAllocator ); - uint32_t queueFamilyPropertyCount; + std::vector queueFamilyProperties( vectorAllocator ); + uint32_t queueFamilyPropertyCount; d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); queueFamilyProperties.resize( queueFamilyPropertyCount ); - d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + d.vkGetPhysicalDeviceQueueFamilyProperties2( + m_physicalDevice, + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); return queueFamilyProperties; } - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2(Dispatch const &d ) const + template + VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const { - std::vector queueFamilyProperties; - uint32_t queueFamilyPropertyCount; + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); queueFamilyProperties.resize( queueFamilyPropertyCount ); std::vector localVector( queueFamilyPropertyCount ); - for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ ) + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) { - localVector[i].pNext = queueFamilyProperties[i].template get().pNext; + localVector[i].pNext = + queueFamilyProperties[i].template get().pNext; } - d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( localVector.data() ) ); - for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ ) + d.vkGetPhysicalDeviceQueueFamilyProperties2( + m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( localVector.data() ) ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) { queueFamilyProperties[i].template get() = localVector[i]; } return queueFamilyProperties; } - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2(Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2( Allocator const & vectorAllocator, Dispatch const & d ) const { - std::vector queueFamilyProperties( vectorAllocator ); - uint32_t queueFamilyPropertyCount; + std::vector queueFamilyProperties( vectorAllocator ); + uint32_t queueFamilyPropertyCount; d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); queueFamilyProperties.resize( queueFamilyPropertyCount ); std::vector localVector( queueFamilyPropertyCount ); - for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ ) + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) { - localVector[i].pNext = queueFamilyProperties[i].template get().pNext; + localVector[i].pNext = + queueFamilyProperties[i].template get().pNext; } - d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( localVector.data() ) ); - for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ ) + d.vkGetPhysicalDeviceQueueFamilyProperties2( + m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( localVector.data() ) ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) { queueFamilyProperties[i].template get() = localVector[i]; } @@ -74965,65 +87899,85 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2KHR( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + PhysicalDevice::getQueueFamilyProperties2KHR( uint32_t * pQueueFamilyPropertyCount, + VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( + m_physicalDevice, + pQueueFamilyPropertyCount, + reinterpret_cast( pQueueFamilyProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2KHR(Dispatch const &d ) const + template + VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const { - std::vector queueFamilyProperties; - uint32_t queueFamilyPropertyCount; + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); queueFamilyProperties.resize( queueFamilyPropertyCount ); - d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( + m_physicalDevice, + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); return queueFamilyProperties; } - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2KHR( Allocator const & vectorAllocator, Dispatch const & d ) const { - std::vector queueFamilyProperties( vectorAllocator ); - uint32_t queueFamilyPropertyCount; + std::vector queueFamilyProperties( vectorAllocator ); + uint32_t queueFamilyPropertyCount; d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); queueFamilyProperties.resize( queueFamilyPropertyCount ); - d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( + m_physicalDevice, + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); return queueFamilyProperties; } - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2KHR(Dispatch const &d ) const + template + VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const { - std::vector queueFamilyProperties; - uint32_t queueFamilyPropertyCount; + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); queueFamilyProperties.resize( queueFamilyPropertyCount ); std::vector localVector( queueFamilyPropertyCount ); - for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ ) + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) { - localVector[i].pNext = queueFamilyProperties[i].template get().pNext; + localVector[i].pNext = + queueFamilyProperties[i].template get().pNext; } - d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( localVector.data() ) ); - for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ ) + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( + m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( localVector.data() ) ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) { queueFamilyProperties[i].template get() = localVector[i]; } return queueFamilyProperties; } - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2KHR( Allocator const & vectorAllocator, Dispatch const & d ) const { - std::vector queueFamilyProperties( vectorAllocator ); - uint32_t queueFamilyPropertyCount; + std::vector queueFamilyProperties( vectorAllocator ); + uint32_t queueFamilyPropertyCount; d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); queueFamilyProperties.resize( queueFamilyPropertyCount ); std::vector localVector( queueFamilyPropertyCount ); - for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ ) + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) { - localVector[i].pNext = queueFamilyProperties[i].template get().pNext; + localVector[i].pNext = + queueFamilyProperties[i].template get().pNext; } - d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( localVector.data() ) ); - for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ ) + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( + m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( localVector.data() ) ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) { queueFamilyProperties[i].template get() = localVector[i]; } @@ -75031,109 +87985,237 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void + PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( samples ), static_cast( usage ), static_cast( tiling ), pPropertyCount, reinterpret_cast( pProperties ) ); + d.vkGetPhysicalDeviceSparseImageFormatProperties( + m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( samples ), + static_cast( usage ), + static_cast( tiling ), + pPropertyCount, + reinterpret_cast( pProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, Dispatch const &d ) const + template + VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + Dispatch const & d ) const { - std::vector properties; - uint32_t propertyCount; - d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( samples ), static_cast( usage ), static_cast( tiling ), &propertyCount, nullptr ); + std::vector properties; + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( samples ), + static_cast( usage ), + static_cast( tiling ), + &propertyCount, + nullptr ); properties.resize( propertyCount ); - d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( samples ), static_cast( usage ), static_cast( tiling ), &propertyCount, reinterpret_cast( properties.data() ) ); + d.vkGetPhysicalDeviceSparseImageFormatProperties( + m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( samples ), + static_cast( usage ), + static_cast( tiling ), + &propertyCount, + reinterpret_cast( properties.data() ) ); return properties; } - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + Allocator const & vectorAllocator, + Dispatch const & d ) const { - std::vector properties( vectorAllocator ); - uint32_t propertyCount; - d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( samples ), static_cast( usage ), static_cast( tiling ), &propertyCount, nullptr ); + std::vector properties( vectorAllocator ); + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( samples ), + static_cast( usage ), + static_cast( tiling ), + &propertyCount, + nullptr ); properties.resize( propertyCount ); - d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( samples ), static_cast( usage ), static_cast( tiling ), &propertyCount, reinterpret_cast( properties.data() ) ); + d.vkGetPhysicalDeviceSparseImageFormatProperties( + m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( samples ), + static_cast( usage ), + static_cast( tiling ), + &propertyCount, + reinterpret_cast( properties.data() ) ); return properties; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast( pFormatInfo ), pPropertyCount, reinterpret_cast( pProperties ) ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2( + m_physicalDevice, + reinterpret_cast( pFormatInfo ), + pPropertyCount, + reinterpret_cast( pProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d ) const + template + VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + Dispatch const & d ) const { - std::vector properties; - uint32_t propertyCount; - d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, nullptr ); + std::vector properties; + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties2( + m_physicalDevice, + reinterpret_cast( &formatInfo ), + &propertyCount, + nullptr ); properties.resize( propertyCount ); - d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, reinterpret_cast( properties.data() ) ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2( + m_physicalDevice, + reinterpret_cast( &formatInfo ), + &propertyCount, + reinterpret_cast( properties.data() ) ); return properties; } - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + Allocator const & vectorAllocator, + Dispatch const & d ) const { - std::vector properties( vectorAllocator ); - uint32_t propertyCount; - d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, nullptr ); + std::vector properties( vectorAllocator ); + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties2( + m_physicalDevice, + reinterpret_cast( &formatInfo ), + &propertyCount, + nullptr ); properties.resize( propertyCount ); - d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, reinterpret_cast( properties.data() ) ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2( + m_physicalDevice, + reinterpret_cast( &formatInfo ), + &propertyCount, + reinterpret_cast( properties.data() ) ); return properties; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2KHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast( pFormatInfo ), pPropertyCount, reinterpret_cast( pProperties ) ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + m_physicalDevice, + reinterpret_cast( pFormatInfo ), + pPropertyCount, + reinterpret_cast( pProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d ) const + template + VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + Dispatch const & d ) const { - std::vector properties; - uint32_t propertyCount; - d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, nullptr ); + std::vector properties; + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + m_physicalDevice, + reinterpret_cast( &formatInfo ), + &propertyCount, + nullptr ); properties.resize( propertyCount ); - d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, reinterpret_cast( properties.data() ) ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + m_physicalDevice, + reinterpret_cast( &formatInfo ), + &propertyCount, + reinterpret_cast( properties.data() ) ); return properties; } - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + Allocator const & vectorAllocator, + Dispatch const & d ) const { - std::vector properties( vectorAllocator ); - uint32_t propertyCount; - d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, nullptr ); + std::vector properties( vectorAllocator ); + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + m_physicalDevice, + reinterpret_cast( &formatInfo ), + &propertyCount, + nullptr ); properties.resize( propertyCount ); - d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, reinterpret_cast( properties.data() ) ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + m_physicalDevice, + reinterpret_cast( &formatInfo ), + &propertyCount, + reinterpret_cast( properties.data() ) ); return properties; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( uint32_t* pCombinationCount, VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV* pCombinations, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( + uint32_t * pCombinationCount, + VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV * pCombinations, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, pCombinationCount, reinterpret_cast( pCombinations ) ) ); + return static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + m_physicalDevice, + pCombinationCount, + reinterpret_cast( pCombinations ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV(Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( Dispatch const & d ) const { - std::vector combinations; - uint32_t combinationCount; - Result result; + std::vector combinations; + uint32_t combinationCount; + Result result; do { - result = static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, nullptr ) ); + result = static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + m_physicalDevice, &combinationCount, nullptr ) ); if ( ( result == Result::eSuccess ) && combinationCount ) { combinations.resize( combinationCount ); - result = static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, reinterpret_cast( combinations.data() ) ) ); + result = static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + m_physicalDevice, + &combinationCount, + reinterpret_cast( combinations.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -75141,21 +88223,30 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( combinationCount <= combinations.size() ); combinations.resize( combinationCount ); } - return createResultValue( result, combinations, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); + return createResultValue( result, + combinations, + VULKAN_HPP_NAMESPACE_STRING + "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV(Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( Allocator const & vectorAllocator, + Dispatch const & d ) const { - std::vector combinations( vectorAllocator ); - uint32_t combinationCount; - Result result; + std::vector combinations( vectorAllocator ); + uint32_t combinationCount; + Result result; do { - result = static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, nullptr ) ); + result = static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + m_physicalDevice, &combinationCount, nullptr ) ); if ( ( result == Result::eSuccess ) && combinationCount ) { combinations.resize( combinationCount ); - result = static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, reinterpret_cast( combinations.data() ) ) ); + result = static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + m_physicalDevice, + &combinationCount, + reinterpret_cast( combinations.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -75163,82 +88254,143 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( combinationCount <= combinations.size() ); combinations.resize( combinationCount ); } - return createResultValue( result, combinations, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); + return createResultValue( result, + combinations, + VULKAN_HPP_NAMESPACE_STRING + "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT* pSurfaceCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT * pSurfaceCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( m_physicalDevice, static_cast( surface ), reinterpret_cast( pSurfaceCapabilities ) ) ); + return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( + m_physicalDevice, + static_cast( surface ), + reinterpret_cast( pSurfaceCapabilities ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT surfaceCapabilities; - Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( m_physicalDevice, static_cast( surface ), reinterpret_cast( &surfaceCapabilities ) ) ); - return createResultValue( result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceCapabilities2EXT" ); + Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( + m_physicalDevice, + static_cast( surface ), + reinterpret_cast( &surfaceCapabilities ) ) ); + return createResultValue( + result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2EXT" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR* pSurfaceCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2KHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR * pSurfaceCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast( pSurfaceInfo ), reinterpret_cast( pSurfaceCapabilities ) ) ); + return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( + m_physicalDevice, + reinterpret_cast( pSurfaceInfo ), + reinterpret_cast( pSurfaceCapabilities ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR surfaceCapabilities; - Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast( &surfaceInfo ), reinterpret_cast( &surfaceCapabilities ) ) ); - return createResultValue( result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceCapabilities2KHR" ); + Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( + m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + reinterpret_cast( &surfaceCapabilities ) ) ); + return createResultValue( + result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d ) const { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR& surfaceCapabilities = structureChain.template get(); - Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast( &surfaceInfo ), reinterpret_cast( &surfaceCapabilities ) ) ); - return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceCapabilities2KHR" ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR & surfaceCapabilities = + structureChain.template get(); + Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( + m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + reinterpret_cast( &surfaceCapabilities ) ) ); + return createResultValue( + result, structureChain, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR* pSurfaceCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR * pSurfaceCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( m_physicalDevice, static_cast( surface ), reinterpret_cast( pSurfaceCapabilities ) ) ); + return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( + m_physicalDevice, + static_cast( surface ), + reinterpret_cast( pSurfaceCapabilities ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities; - Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( m_physicalDevice, static_cast( surface ), reinterpret_cast( &surfaceCapabilities ) ) ); - return createResultValue( result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceCapabilitiesKHR" ); + Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( + m_physicalDevice, + static_cast( surface ), + reinterpret_cast( &surfaceCapabilities ) ) ); + return createResultValue( + result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilitiesKHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR* pSurfaceFormats, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pSurfaceFormatCount, + VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR * pSurfaceFormats, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast( pSurfaceInfo ), pSurfaceFormatCount, reinterpret_cast( pSurfaceFormats ) ) ); + return static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( + m_physicalDevice, + reinterpret_cast( pSurfaceInfo ), + pSurfaceFormatCount, + reinterpret_cast( pSurfaceFormats ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const { - std::vector surfaceFormats; - uint32_t surfaceFormatCount; - Result result; + std::vector surfaceFormats; + uint32_t surfaceFormatCount; + Result result; do { - result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast( &surfaceInfo ), &surfaceFormatCount, nullptr ) ); + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( + m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &surfaceFormatCount, + nullptr ) ); if ( ( result == Result::eSuccess ) && surfaceFormatCount ) { surfaceFormats.resize( surfaceFormatCount ); - result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast( &surfaceInfo ), &surfaceFormatCount, reinterpret_cast( surfaceFormats.data() ) ) ); + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( + m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &surfaceFormatCount, + reinterpret_cast( surfaceFormats.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -75246,21 +88398,33 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); surfaceFormats.resize( surfaceFormatCount ); } - return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormats2KHR" ); + return createResultValue( + result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Allocator const & vectorAllocator, + Dispatch const & d ) const { - std::vector surfaceFormats( vectorAllocator ); - uint32_t surfaceFormatCount; - Result result; + std::vector surfaceFormats( vectorAllocator ); + uint32_t surfaceFormatCount; + Result result; do { - result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast( &surfaceInfo ), &surfaceFormatCount, nullptr ) ); + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( + m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &surfaceFormatCount, + nullptr ) ); if ( ( result == Result::eSuccess ) && surfaceFormatCount ) { surfaceFormats.resize( surfaceFormatCount ); - result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast( &surfaceInfo ), &surfaceFormatCount, reinterpret_cast( surfaceFormats.data() ) ) ); + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( + m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &surfaceFormatCount, + reinterpret_cast( surfaceFormats.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -75268,29 +88432,44 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); surfaceFormats.resize( surfaceFormatCount ); } - return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormats2KHR" ); + return createResultValue( + result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormatKHR* pSurfaceFormats, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + uint32_t * pSurfaceFormatCount, + VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast( surface ), pSurfaceFormatCount, reinterpret_cast( pSurfaceFormats ) ) ); + return static_cast( + d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, + static_cast( surface ), + pSurfaceFormatCount, + reinterpret_cast( pSurfaceFormats ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const { - std::vector surfaceFormats; - uint32_t surfaceFormatCount; - Result result; + std::vector surfaceFormats; + uint32_t surfaceFormatCount; + Result result; do { - result = static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast( surface ), &surfaceFormatCount, nullptr ) ); + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( + m_physicalDevice, static_cast( surface ), &surfaceFormatCount, nullptr ) ); if ( ( result == Result::eSuccess ) && surfaceFormatCount ) { surfaceFormats.resize( surfaceFormatCount ); - result = static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast( surface ), &surfaceFormatCount, reinterpret_cast( surfaceFormats.data() ) ) ); + result = static_cast( + d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, + static_cast( surface ), + &surfaceFormatCount, + reinterpret_cast( surfaceFormats.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -75298,21 +88477,30 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); surfaceFormats.resize( surfaceFormatCount ); } - return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormatsKHR" ); + return createResultValue( + result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Allocator const & vectorAllocator, + Dispatch const & d ) const { - std::vector surfaceFormats( vectorAllocator ); - uint32_t surfaceFormatCount; - Result result; + std::vector surfaceFormats( vectorAllocator ); + uint32_t surfaceFormatCount; + Result result; do { - result = static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast( surface ), &surfaceFormatCount, nullptr ) ); + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( + m_physicalDevice, static_cast( surface ), &surfaceFormatCount, nullptr ) ); if ( ( result == Result::eSuccess ) && surfaceFormatCount ) { surfaceFormats.resize( surfaceFormatCount ); - result = static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast( surface ), &surfaceFormatCount, reinterpret_cast( surfaceFormats.data() ) ) ); + result = static_cast( + d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, + static_cast( surface ), + &surfaceFormatCount, + reinterpret_cast( surfaceFormats.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -75320,30 +88508,49 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); surfaceFormats.resize( surfaceFormatCount ); } - return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormatsKHR" ); + return createResultValue( + result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_USE_PLATFORM_WIN32_KHR - template - VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR* pPresentModes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModes2EXT( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pPresentModeCount, + VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, reinterpret_cast( pSurfaceInfo ), pPresentModeCount, reinterpret_cast( pPresentModes ) ) ); + return static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( + m_physicalDevice, + reinterpret_cast( pSurfaceInfo ), + pPresentModeCount, + reinterpret_cast( pPresentModes ) ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d ) const { - std::vector presentModes; - uint32_t presentModeCount; - Result result; + std::vector presentModes; + uint32_t presentModeCount; + Result result; do { - result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, reinterpret_cast( &surfaceInfo ), &presentModeCount, nullptr ) ); + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( + m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &presentModeCount, + nullptr ) ); if ( ( result == Result::eSuccess ) && presentModeCount ) { presentModes.resize( presentModeCount ); - result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, reinterpret_cast( &surfaceInfo ), &presentModeCount, reinterpret_cast( presentModes.data() ) ) ); + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( + m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &presentModeCount, + reinterpret_cast( presentModes.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -75351,21 +88558,33 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); presentModes.resize( presentModeCount ); } - return createResultValue( result, presentModes, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfacePresentModes2EXT" ); + return createResultValue( + result, presentModes, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Allocator const & vectorAllocator, + Dispatch const & d ) const { - std::vector presentModes( vectorAllocator ); - uint32_t presentModeCount; - Result result; + std::vector presentModes( vectorAllocator ); + uint32_t presentModeCount; + Result result; do { - result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, reinterpret_cast( &surfaceInfo ), &presentModeCount, nullptr ) ); + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( + m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &presentModeCount, + nullptr ) ); if ( ( result == Result::eSuccess ) && presentModeCount ) { presentModes.resize( presentModeCount ); - result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, reinterpret_cast( &surfaceInfo ), &presentModeCount, reinterpret_cast( presentModes.data() ) ) ); + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( + m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &presentModeCount, + reinterpret_cast( presentModes.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -75373,30 +88592,45 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); presentModes.resize( presentModeCount ); } - return createResultValue( result, presentModes, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfacePresentModes2EXT" ); + return createResultValue( + result, presentModes, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template - VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR* pPresentModes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + uint32_t * pPresentModeCount, + VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast( surface ), pPresentModeCount, reinterpret_cast( pPresentModes ) ) ); + return static_cast( + d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, + static_cast( surface ), + pPresentModeCount, + reinterpret_cast( pPresentModes ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const { - std::vector presentModes; - uint32_t presentModeCount; - Result result; + std::vector presentModes; + uint32_t presentModeCount; + Result result; do { - result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast( surface ), &presentModeCount, nullptr ) ); + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( + m_physicalDevice, static_cast( surface ), &presentModeCount, nullptr ) ); if ( ( result == Result::eSuccess ) && presentModeCount ) { presentModes.resize( presentModeCount ); - result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast( surface ), &presentModeCount, reinterpret_cast( presentModes.data() ) ) ); + result = static_cast( + d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, + static_cast( surface ), + &presentModeCount, + reinterpret_cast( presentModes.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -75404,21 +88638,30 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); presentModes.resize( presentModeCount ); } - return createResultValue( result, presentModes, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfacePresentModesKHR" ); + return createResultValue( + result, presentModes, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Allocator const & vectorAllocator, + Dispatch const & d ) const { - std::vector presentModes( vectorAllocator ); - uint32_t presentModeCount; - Result result; + std::vector presentModes( vectorAllocator ); + uint32_t presentModeCount; + Result result; do { - result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast( surface ), &presentModeCount, nullptr ) ); + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( + m_physicalDevice, static_cast( surface ), &presentModeCount, nullptr ) ); if ( ( result == Result::eSuccess ) && presentModeCount ) { presentModes.resize( presentModeCount ); - result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast( surface ), &presentModeCount, reinterpret_cast( presentModes.data() ) ) ); + result = static_cast( + d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, + static_cast( surface ), + &presentModeCount, + reinterpret_cast( presentModes.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -75426,44 +88669,64 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); presentModes.resize( presentModeCount ); } - return createResultValue( result, presentModes, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfacePresentModesKHR" ); + return createResultValue( + result, presentModes, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::Bool32* pSupported, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, + VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::Bool32 * pSupported, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, queueFamilyIndex, static_cast( surface ), reinterpret_cast( pSupported ) ) ); + return static_cast( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, + queueFamilyIndex, + static_cast( surface ), + reinterpret_cast( pSupported ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getSurfaceSupportKHR( + uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::Bool32 supported; - Result result = static_cast( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, queueFamilyIndex, static_cast( surface ), reinterpret_cast( &supported ) ) ); - return createResultValue( result, supported, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceSupportKHR" ); + Result result = + static_cast( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, + queueFamilyIndex, + static_cast( surface ), + reinterpret_cast( &supported ) ) ); + return createResultValue( result, supported, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceSupportKHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result PhysicalDevice::getToolPropertiesEXT( uint32_t* pToolCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT* pToolProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result + PhysicalDevice::getToolPropertiesEXT( uint32_t * pToolCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT * pToolProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, pToolCount, reinterpret_cast( pToolProperties ) ) ); + return static_cast( d.vkGetPhysicalDeviceToolPropertiesEXT( + m_physicalDevice, pToolCount, reinterpret_cast( pToolProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getToolPropertiesEXT(Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getToolPropertiesEXT( Dispatch const & d ) const { - std::vector toolProperties; - uint32_t toolCount; - Result result; + std::vector toolProperties; + uint32_t toolCount; + Result result; do { result = static_cast( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ) ); if ( ( result == Result::eSuccess ) && toolCount ) { toolProperties.resize( toolCount ); - result = static_cast( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast( toolProperties.data() ) ) ); + result = static_cast( d.vkGetPhysicalDeviceToolPropertiesEXT( + m_physicalDevice, + &toolCount, + reinterpret_cast( toolProperties.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -75471,21 +88734,26 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); toolProperties.resize( toolCount ); } - return createResultValue( result, toolProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getToolPropertiesEXT" ); + return createResultValue( + result, toolProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getToolPropertiesEXT(Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getToolPropertiesEXT( Allocator const & vectorAllocator, Dispatch const & d ) const { - std::vector toolProperties( vectorAllocator ); - uint32_t toolCount; - Result result; + std::vector toolProperties( vectorAllocator ); + uint32_t toolCount; + Result result; do { result = static_cast( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ) ); if ( ( result == Result::eSuccess ) && toolCount ) { toolProperties.resize( toolCount ); - result = static_cast( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast( toolProperties.data() ) ) ); + result = static_cast( d.vkGetPhysicalDeviceToolPropertiesEXT( + m_physicalDevice, + &toolCount, + reinterpret_cast( toolProperties.data() ) ) ); } } while ( result == Result::eIncomplete ); if ( result == Result::eSuccess ) @@ -75493,641 +88761,2886 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); toolProperties.resize( toolCount ); } - return createResultValue( result, toolProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getToolPropertiesEXT" ); + return createResultValue( + result, toolProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_USE_PLATFORM_WAYLAND_KHR - template - VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display* display, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( + uint32_t queueFamilyIndex, struct wl_display * display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, display ) ); + return static_cast( + d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, display ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( + uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { return d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &display ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ #ifdef VK_USE_PLATFORM_WIN32_KHR -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const &d) const VULKAN_HPP_NOEXCEPT +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( + uint32_t queueFamilyIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex ) ); + return static_cast( + d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex ) ); } -#else - template - VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT +# else + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( + uint32_t queueFamilyIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { return d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ #ifdef VK_USE_PLATFORM_XCB_KHR - template - VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, + xcb_connection_t * connection, + xcb_visualid_t visual_id, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection, visual_id ) ); + return static_cast( + d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection, visual_id ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, + xcb_connection_t & connection, + xcb_visualid_t visual_id, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { return d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection, visual_id ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XCB_KHR*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XCB_KHR*/ #ifdef VK_USE_PLATFORM_XLIB_KHR - template - VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display* dpy, VisualID visualID, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, + Display * dpy, + VisualID visualID, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, dpy, visualID ) ); + return static_cast( + d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, dpy, visualID ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, + Display & dpy, + VisualID visualID, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { return d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &dpy, visualID ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XLIB_KHR*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ #ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT - template - VULKAN_HPP_INLINE Result PhysicalDevice::getRandROutputDisplayEXT( Display* dpy, RROutput rrOutput, VULKAN_HPP_NAMESPACE::DisplayKHR* pDisplay, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result PhysicalDevice::getRandROutputDisplayEXT( Display * dpy, + RROutput rrOutput, + VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetRandROutputDisplayEXT( m_physicalDevice, dpy, rrOutput, reinterpret_cast( pDisplay ) ) ); + return static_cast( + d.vkGetRandROutputDisplayEXT( m_physicalDevice, dpy, rrOutput, reinterpret_cast( pDisplay ) ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const &d ) const +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::DisplayKHR display; - Result result = static_cast( d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast( &display ) ) ); - return createResultValue( result, display, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getRandROutputDisplayEXT" ); + Result result = static_cast( + d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast( &display ) ) ); + return createResultValue( + result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXT" ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast( display ) ) ); } #else - template - VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const { - Result result = static_cast( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast( display ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::releaseDisplayEXT" ); + Result result = + static_cast( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast( display ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::releaseDisplayEXT" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Queue::getCheckpointDataNV( uint32_t* pCheckpointDataCount, VULKAN_HPP_NAMESPACE::CheckpointDataNV* pCheckpointData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Queue::getCheckpointDataNV( uint32_t * pCheckpointDataCount, + VULKAN_HPP_NAMESPACE::CheckpointDataNV * pCheckpointData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkGetQueueCheckpointDataNV( m_queue, pCheckpointDataCount, reinterpret_cast( pCheckpointData ) ); + d.vkGetQueueCheckpointDataNV( + m_queue, pCheckpointDataCount, reinterpret_cast( pCheckpointData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE std::vector Queue::getCheckpointDataNV(Dispatch const &d ) const + template + VULKAN_HPP_INLINE std::vector Queue::getCheckpointDataNV( Dispatch const & d ) const { - std::vector checkpointData; - uint32_t checkpointDataCount; + std::vector checkpointData; + uint32_t checkpointDataCount; d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr ); checkpointData.resize( checkpointDataCount ); - d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); + d.vkGetQueueCheckpointDataNV( + m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); return checkpointData; } - template - VULKAN_HPP_INLINE std::vector Queue::getCheckpointDataNV(Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE std::vector + Queue::getCheckpointDataNV( Allocator const & vectorAllocator, Dispatch const & d ) const { - std::vector checkpointData( vectorAllocator ); - uint32_t checkpointDataCount; + std::vector checkpointData( vectorAllocator ); + uint32_t checkpointDataCount; d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr ); checkpointData.resize( checkpointDataCount ); - d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); + d.vkGetQueueCheckpointDataNV( + m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); return checkpointData; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast( pLabelInfo ) ); + d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast( pLabelInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast( &labelInfo ) ); + d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast( &labelInfo ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Queue::bindSparse( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindSparseInfo* pBindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Queue::bindSparse( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindSparseInfo * pBindInfo, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkQueueBindSparse( m_queue, bindInfoCount, reinterpret_cast( pBindInfo ), static_cast( fence ) ) ); + return static_cast( d.vkQueueBindSparse( m_queue, + bindInfoCount, + reinterpret_cast( pBindInfo ), + static_cast( fence ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Queue::bindSparse( ArrayProxy bindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Queue::bindSparse( ArrayProxy bindInfo, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const { - Result result = static_cast( d.vkQueueBindSparse( m_queue, bindInfo.size() , reinterpret_cast( bindInfo.data() ), static_cast( fence ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::bindSparse" ); + Result result = + static_cast( d.vkQueueBindSparse( m_queue, + bindInfo.size(), + reinterpret_cast( bindInfo.data() ), + static_cast( fence ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::bindSparse" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT(Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkQueueEndDebugUtilsLabelEXT( m_queue ); } #else - template - VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkQueueEndDebugUtilsLabelEXT( m_queue ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast( pLabelInfo ) ); + d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast( pLabelInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast( &labelInfo ) ); + d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast( &labelInfo ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR* pPresentInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR * pPresentInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkQueuePresentKHR( m_queue, reinterpret_cast( pPresentInfo ) ) ); + return static_cast( + d.vkQueuePresentKHR( m_queue, reinterpret_cast( pPresentInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Queue::presentKHR( const PresentInfoKHR & presentInfo, Dispatch const &d ) const + template + VULKAN_HPP_INLINE Result Queue::presentKHR( const PresentInfoKHR & presentInfo, Dispatch const & d ) const { - Result result = static_cast( d.vkQueuePresentKHR( m_queue, reinterpret_cast( &presentInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::presentKHR", { Result::eSuccess, Result::eSuboptimalKHR } ); + Result result = + static_cast( d.vkQueuePresentKHR( m_queue, reinterpret_cast( &presentInfo ) ) ); + return createResultValue( + result, VULKAN_HPP_NAMESPACE_STRING "::Queue::presentKHR", { Result::eSuccess, Result::eSuboptimalKHR } ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Queue::setPerformanceConfigurationINTEL( + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast( configuration ) ) ); + return static_cast( d.vkQueueSetPerformanceConfigurationINTEL( + m_queue, static_cast( configuration ) ) ); } #else - template - VULKAN_HPP_INLINE typename ResultValueType::type Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d ) const { - Result result = static_cast( d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast( configuration ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::setPerformanceConfigurationINTEL" ); + Result result = static_cast( d.vkQueueSetPerformanceConfigurationINTEL( + m_queue, static_cast( configuration ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::setPerformanceConfigurationINTEL" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Queue::submit( uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo* pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Queue::submit( uint32_t submitCount, + const VULKAN_HPP_NAMESPACE::SubmitInfo * pSubmits, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkQueueSubmit( m_queue, submitCount, reinterpret_cast( pSubmits ), static_cast( fence ) ) ); + return static_cast( d.vkQueueSubmit( + m_queue, submitCount, reinterpret_cast( pSubmits ), static_cast( fence ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Queue::submit( ArrayProxy submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type + Queue::submit( ArrayProxy submits, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const { - Result result = static_cast( d.vkQueueSubmit( m_queue, submits.size() , reinterpret_cast( submits.data() ), static_cast( fence ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::submit" ); + Result result = static_cast( d.vkQueueSubmit( m_queue, + submits.size(), + reinterpret_cast( submits.data() ), + static_cast( fence ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Queue::waitIdle(Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Queue::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkQueueWaitIdle( m_queue ) ); } #else - template - VULKAN_HPP_INLINE typename ResultValueType::type Queue::waitIdle(Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type Queue::waitIdle( Dispatch const & d ) const { Result result = static_cast( d.vkQueueWaitIdle( m_queue ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::waitIdle" ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_USE_PLATFORM_ANDROID_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ #ifdef VK_USE_PLATFORM_ANDROID_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; #ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; #ifdef VK_ENABLE_BETA_EXTENSIONS - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; #ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; #ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; #ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #ifdef VK_USE_PLATFORM_ANDROID_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; #ifdef VK_USE_PLATFORM_ANDROID_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; #ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; #ifdef VK_ENABLE_BETA_EXTENSIONS - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; #ifdef VK_ENABLE_BETA_EXTENSIONS - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; #ifdef VK_USE_PLATFORM_GGP - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; #endif /*VK_USE_PLATFORM_GGP*/ - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; #ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; #ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; + template <> + struct isStructureChainValid + { + enum + { + value = true + }; + }; #if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL class DynamicLoader { public: -#ifdef VULKAN_HPP_NO_EXCEPTIONS +# ifdef VULKAN_HPP_NO_EXCEPTIONS DynamicLoader() VULKAN_HPP_NOEXCEPT : m_success( false ) -#else +# else DynamicLoader() : m_success( false ) -#endif +# endif { -#if defined(__linux__) +# if defined( __linux__ ) m_library = dlopen( "libvulkan.so", RTLD_NOW | RTLD_LOCAL ); -#elif defined(__APPLE__) +# elif defined( __APPLE__ ) m_library = dlopen( "libvulkan.dylib", RTLD_NOW | RTLD_LOCAL ); -#elif defined(_WIN32) +# elif defined( _WIN32 ) m_library = LoadLibrary( TEXT( "vulkan-1.dll" ) ); -#else +# else VULKAN_HPP_ASSERT( false && "unsupported platform" ); -#endif +# endif m_success = m_library != 0; -#ifndef VULKAN_HPP_NO_EXCEPTIONS +# ifndef VULKAN_HPP_NO_EXCEPTIONS if ( !m_success ) { - // NOTE there should be an InitializationFailedError, but msvc insists on the symbol does not exist within the scope of this function. + // NOTE there should be an InitializationFailedError, but msvc insists on the symbol does not exist within the + // scope of this function. throw std::runtime_error( "Failed to load vulkan library!" ); } -#endif +# endif } - DynamicLoader( DynamicLoader const& ) = delete; + DynamicLoader( DynamicLoader const & ) = delete; DynamicLoader( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT - : m_success(other.m_success) - , m_library(other.m_library) + : m_success( other.m_success ) + , m_library( other.m_library ) { other.m_library = nullptr; } - DynamicLoader &operator=( DynamicLoader const& ) = delete; + DynamicLoader & operator=( DynamicLoader const & ) = delete; - DynamicLoader &operator=( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT + DynamicLoader & operator=( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT { m_success = other.m_success; - std::swap(m_library, other.m_library); + std::swap( m_library, other.m_library ); return *this; } @@ -76135,62 +91648,65 @@ namespace VULKAN_HPP_NAMESPACE { if ( m_library ) { -#if defined(__linux__) || defined(__APPLE__) +# if defined( __linux__ ) || defined( __APPLE__ ) dlclose( m_library ); -#elif defined(_WIN32) +# elif defined( _WIN32 ) FreeLibrary( m_library ); -#endif +# endif } } template - T getProcAddress( const char* function ) const VULKAN_HPP_NOEXCEPT + T getProcAddress( const char * function ) const VULKAN_HPP_NOEXCEPT { -#if defined(__linux__) || defined(__APPLE__) +# if defined( __linux__ ) || defined( __APPLE__ ) return (T)dlsym( m_library, function ); -#elif defined(_WIN32) +# elif defined( _WIN32 ) return (T)GetProcAddress( m_library, function ); -#endif +# endif } - bool success() const VULKAN_HPP_NOEXCEPT { return m_success; } + bool success() const VULKAN_HPP_NOEXCEPT + { + return m_success; + } private: bool m_success; -#if defined(__linux__) || defined(__APPLE__) - void *m_library; -#elif defined(_WIN32) +# if defined( __linux__ ) || defined( __APPLE__ ) + void * m_library; +# elif defined( _WIN32 ) HMODULE m_library; -#else -#error unsupported platform -#endif +# else +# error unsupported platform +# endif }; #endif class DispatchLoaderDynamic { public: - PFN_vkCreateInstance vkCreateInstance = 0; + PFN_vkCreateInstance vkCreateInstance = 0; PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties = 0; - PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties = 0; - PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion = 0; - PFN_vkBeginCommandBuffer vkBeginCommandBuffer = 0; - PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT = 0; - PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = 0; - PFN_vkCmdBeginQuery vkCmdBeginQuery = 0; - PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT = 0; - PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass = 0; - PFN_vkCmdBeginRenderPass2 vkCmdBeginRenderPass2 = 0; - PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR = 0; - PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = 0; - PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets = 0; - PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer = 0; - PFN_vkCmdBindPipeline vkCmdBindPipeline = 0; - PFN_vkCmdBindPipelineShaderGroupNV vkCmdBindPipelineShaderGroupNV = 0; - PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV = 0; - PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = 0; - PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers = 0; - PFN_vkCmdBlitImage vkCmdBlitImage = 0; + PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties = 0; + PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion = 0; + PFN_vkBeginCommandBuffer vkBeginCommandBuffer = 0; + PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT = 0; + PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = 0; + PFN_vkCmdBeginQuery vkCmdBeginQuery = 0; + PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT = 0; + PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass = 0; + PFN_vkCmdBeginRenderPass2 vkCmdBeginRenderPass2 = 0; + PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR = 0; + PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = 0; + PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets = 0; + PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer = 0; + PFN_vkCmdBindPipeline vkCmdBindPipeline = 0; + PFN_vkCmdBindPipelineShaderGroupNV vkCmdBindPipelineShaderGroupNV = 0; + PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV = 0; + PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = 0; + PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers = 0; + PFN_vkCmdBlitImage vkCmdBlitImage = 0; #ifdef VK_ENABLE_BETA_EXTENSIONS PFN_vkCmdBuildAccelerationStructureIndirectKHR vkCmdBuildAccelerationStructureIndirectKHR = 0; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ @@ -76198,9 +91714,9 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkCmdBuildAccelerationStructureKHR vkCmdBuildAccelerationStructureKHR = 0; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV = 0; - PFN_vkCmdClearAttachments vkCmdClearAttachments = 0; - PFN_vkCmdClearColorImage vkCmdClearColorImage = 0; - PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage = 0; + PFN_vkCmdClearAttachments vkCmdClearAttachments = 0; + PFN_vkCmdClearColorImage vkCmdClearColorImage = 0; + PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage = 0; #ifdef VK_ENABLE_BETA_EXTENSIONS PFN_vkCmdCopyAccelerationStructureKHR vkCmdCopyAccelerationStructureKHR = 0; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ @@ -76208,114 +91724,114 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VK_ENABLE_BETA_EXTENSIONS PFN_vkCmdCopyAccelerationStructureToMemoryKHR vkCmdCopyAccelerationStructureToMemoryKHR = 0; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - PFN_vkCmdCopyBuffer vkCmdCopyBuffer = 0; + PFN_vkCmdCopyBuffer vkCmdCopyBuffer = 0; PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage = 0; - PFN_vkCmdCopyImage vkCmdCopyImage = 0; + PFN_vkCmdCopyImage vkCmdCopyImage = 0; PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer = 0; #ifdef VK_ENABLE_BETA_EXTENSIONS PFN_vkCmdCopyMemoryToAccelerationStructureKHR vkCmdCopyMemoryToAccelerationStructureKHR = 0; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults = 0; - PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT = 0; - PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT = 0; - PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT = 0; - PFN_vkCmdDispatch vkCmdDispatch = 0; - PFN_vkCmdDispatchBase vkCmdDispatchBase = 0; - PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR = 0; - PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect = 0; - PFN_vkCmdDraw vkCmdDraw = 0; - PFN_vkCmdDrawIndexed vkCmdDrawIndexed = 0; - PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect = 0; - PFN_vkCmdDrawIndexedIndirectCount vkCmdDrawIndexedIndirectCount = 0; - PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD = 0; - PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR = 0; - PFN_vkCmdDrawIndirect vkCmdDrawIndirect = 0; - PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = 0; - PFN_vkCmdDrawIndirectCount vkCmdDrawIndirectCount = 0; - PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0; - PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR = 0; - PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV = 0; - PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV = 0; - PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV = 0; - PFN_vkCmdEndConditionalRenderingEXT vkCmdEndConditionalRenderingEXT = 0; - PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = 0; - PFN_vkCmdEndQuery vkCmdEndQuery = 0; - PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = 0; - PFN_vkCmdEndRenderPass vkCmdEndRenderPass = 0; - PFN_vkCmdEndRenderPass2 vkCmdEndRenderPass2 = 0; - PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR = 0; - PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = 0; - PFN_vkCmdExecuteCommands vkCmdExecuteCommands = 0; - PFN_vkCmdExecuteGeneratedCommandsNV vkCmdExecuteGeneratedCommandsNV = 0; - PFN_vkCmdFillBuffer vkCmdFillBuffer = 0; - PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = 0; - PFN_vkCmdNextSubpass vkCmdNextSubpass = 0; - PFN_vkCmdNextSubpass2 vkCmdNextSubpass2 = 0; - PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = 0; - PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier = 0; - PFN_vkCmdPreprocessGeneratedCommandsNV vkCmdPreprocessGeneratedCommandsNV = 0; - PFN_vkCmdPushConstants vkCmdPushConstants = 0; - PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = 0; + PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults = 0; + PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT = 0; + PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT = 0; + PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT = 0; + PFN_vkCmdDispatch vkCmdDispatch = 0; + PFN_vkCmdDispatchBase vkCmdDispatchBase = 0; + PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR = 0; + PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect = 0; + PFN_vkCmdDraw vkCmdDraw = 0; + PFN_vkCmdDrawIndexed vkCmdDrawIndexed = 0; + PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect = 0; + PFN_vkCmdDrawIndexedIndirectCount vkCmdDrawIndexedIndirectCount = 0; + PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD = 0; + PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR = 0; + PFN_vkCmdDrawIndirect vkCmdDrawIndirect = 0; + PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = 0; + PFN_vkCmdDrawIndirectCount vkCmdDrawIndirectCount = 0; + PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0; + PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR = 0; + PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV = 0; + PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV = 0; + PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV = 0; + PFN_vkCmdEndConditionalRenderingEXT vkCmdEndConditionalRenderingEXT = 0; + PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = 0; + PFN_vkCmdEndQuery vkCmdEndQuery = 0; + PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = 0; + PFN_vkCmdEndRenderPass vkCmdEndRenderPass = 0; + PFN_vkCmdEndRenderPass2 vkCmdEndRenderPass2 = 0; + PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR = 0; + PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = 0; + PFN_vkCmdExecuteCommands vkCmdExecuteCommands = 0; + PFN_vkCmdExecuteGeneratedCommandsNV vkCmdExecuteGeneratedCommandsNV = 0; + PFN_vkCmdFillBuffer vkCmdFillBuffer = 0; + PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = 0; + PFN_vkCmdNextSubpass vkCmdNextSubpass = 0; + PFN_vkCmdNextSubpass2 vkCmdNextSubpass2 = 0; + PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = 0; + PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier = 0; + PFN_vkCmdPreprocessGeneratedCommandsNV vkCmdPreprocessGeneratedCommandsNV = 0; + PFN_vkCmdPushConstants vkCmdPushConstants = 0; + PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = 0; PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR = 0; - PFN_vkCmdResetEvent vkCmdResetEvent = 0; - PFN_vkCmdResetQueryPool vkCmdResetQueryPool = 0; - PFN_vkCmdResolveImage vkCmdResolveImage = 0; - PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants = 0; - PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV = 0; - PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV = 0; - PFN_vkCmdSetDepthBias vkCmdSetDepthBias = 0; - PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds = 0; - PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask = 0; - PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR = 0; - PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT = 0; - PFN_vkCmdSetEvent vkCmdSetEvent = 0; - PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV = 0; - PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT = 0; - PFN_vkCmdSetLineWidth vkCmdSetLineWidth = 0; - PFN_vkCmdSetPerformanceMarkerINTEL vkCmdSetPerformanceMarkerINTEL = 0; - PFN_vkCmdSetPerformanceOverrideINTEL vkCmdSetPerformanceOverrideINTEL = 0; - PFN_vkCmdSetPerformanceStreamMarkerINTEL vkCmdSetPerformanceStreamMarkerINTEL = 0; - PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0; - PFN_vkCmdSetScissor vkCmdSetScissor = 0; - PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask = 0; - PFN_vkCmdSetStencilReference vkCmdSetStencilReference = 0; - PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask = 0; - PFN_vkCmdSetViewport vkCmdSetViewport = 0; - PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV = 0; - PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV = 0; + PFN_vkCmdResetEvent vkCmdResetEvent = 0; + PFN_vkCmdResetQueryPool vkCmdResetQueryPool = 0; + PFN_vkCmdResolveImage vkCmdResolveImage = 0; + PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants = 0; + PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV = 0; + PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV = 0; + PFN_vkCmdSetDepthBias vkCmdSetDepthBias = 0; + PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds = 0; + PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask = 0; + PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR = 0; + PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT = 0; + PFN_vkCmdSetEvent vkCmdSetEvent = 0; + PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV = 0; + PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT = 0; + PFN_vkCmdSetLineWidth vkCmdSetLineWidth = 0; + PFN_vkCmdSetPerformanceMarkerINTEL vkCmdSetPerformanceMarkerINTEL = 0; + PFN_vkCmdSetPerformanceOverrideINTEL vkCmdSetPerformanceOverrideINTEL = 0; + PFN_vkCmdSetPerformanceStreamMarkerINTEL vkCmdSetPerformanceStreamMarkerINTEL = 0; + PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0; + PFN_vkCmdSetScissor vkCmdSetScissor = 0; + PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask = 0; + PFN_vkCmdSetStencilReference vkCmdSetStencilReference = 0; + PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask = 0; + PFN_vkCmdSetViewport vkCmdSetViewport = 0; + PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV = 0; + PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV = 0; #ifdef VK_ENABLE_BETA_EXTENSIONS PFN_vkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHR = 0; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_ENABLE_BETA_EXTENSIONS PFN_vkCmdTraceRaysKHR vkCmdTraceRaysKHR = 0; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV = 0; - PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer = 0; - PFN_vkCmdWaitEvents vkCmdWaitEvents = 0; + PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV = 0; + PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer = 0; + PFN_vkCmdWaitEvents vkCmdWaitEvents = 0; PFN_vkCmdWriteAccelerationStructuresPropertiesKHR vkCmdWriteAccelerationStructuresPropertiesKHR = 0; - PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV = 0; - PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0; - PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp = 0; - PFN_vkEndCommandBuffer vkEndCommandBuffer = 0; - PFN_vkResetCommandBuffer vkResetCommandBuffer = 0; + PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV = 0; + PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0; + PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp = 0; + PFN_vkEndCommandBuffer vkEndCommandBuffer = 0; + PFN_vkResetCommandBuffer vkResetCommandBuffer = 0; #ifdef VK_USE_PLATFORM_WIN32_KHR PFN_vkAcquireFullScreenExclusiveModeEXT vkAcquireFullScreenExclusiveModeEXT = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR = 0; - PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = 0; + PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR = 0; + PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = 0; PFN_vkAcquirePerformanceConfigurationINTEL vkAcquirePerformanceConfigurationINTEL = 0; - PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR = 0; - PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers = 0; - PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets = 0; - PFN_vkAllocateMemory vkAllocateMemory = 0; - PFN_vkBindAccelerationStructureMemoryKHR vkBindAccelerationStructureMemoryKHR = 0; - PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV = 0; - PFN_vkBindBufferMemory vkBindBufferMemory = 0; - PFN_vkBindBufferMemory2 vkBindBufferMemory2 = 0; - PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = 0; - PFN_vkBindImageMemory vkBindImageMemory = 0; - PFN_vkBindImageMemory2 vkBindImageMemory2 = 0; - PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = 0; + PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR = 0; + PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers = 0; + PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets = 0; + PFN_vkAllocateMemory vkAllocateMemory = 0; + PFN_vkBindAccelerationStructureMemoryKHR vkBindAccelerationStructureMemoryKHR = 0; + PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV = 0; + PFN_vkBindBufferMemory vkBindBufferMemory = 0; + PFN_vkBindBufferMemory2 vkBindBufferMemory2 = 0; + PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = 0; + PFN_vkBindImageMemory vkBindImageMemory = 0; + PFN_vkBindImageMemory2 vkBindImageMemory2 = 0; + PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = 0; #ifdef VK_ENABLE_BETA_EXTENSIONS PFN_vkBuildAccelerationStructureKHR vkBuildAccelerationStructureKHR = 0; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ @@ -76333,84 +91849,84 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkCreateAccelerationStructureKHR vkCreateAccelerationStructureKHR = 0; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV = 0; - PFN_vkCreateBuffer vkCreateBuffer = 0; - PFN_vkCreateBufferView vkCreateBufferView = 0; - PFN_vkCreateCommandPool vkCreateCommandPool = 0; - PFN_vkCreateComputePipelines vkCreateComputePipelines = 0; + PFN_vkCreateBuffer vkCreateBuffer = 0; + PFN_vkCreateBufferView vkCreateBufferView = 0; + PFN_vkCreateCommandPool vkCreateCommandPool = 0; + PFN_vkCreateComputePipelines vkCreateComputePipelines = 0; #ifdef VK_ENABLE_BETA_EXTENSIONS PFN_vkCreateDeferredOperationKHR vkCreateDeferredOperationKHR = 0; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - PFN_vkCreateDescriptorPool vkCreateDescriptorPool = 0; - PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout = 0; - PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate = 0; + PFN_vkCreateDescriptorPool vkCreateDescriptorPool = 0; + PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout = 0; + PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate = 0; PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR = 0; - PFN_vkCreateEvent vkCreateEvent = 0; - PFN_vkCreateFence vkCreateFence = 0; - PFN_vkCreateFramebuffer vkCreateFramebuffer = 0; - PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines = 0; - PFN_vkCreateImage vkCreateImage = 0; - PFN_vkCreateImageView vkCreateImageView = 0; - PFN_vkCreateIndirectCommandsLayoutNV vkCreateIndirectCommandsLayoutNV = 0; - PFN_vkCreatePipelineCache vkCreatePipelineCache = 0; - PFN_vkCreatePipelineLayout vkCreatePipelineLayout = 0; - PFN_vkCreateQueryPool vkCreateQueryPool = 0; + PFN_vkCreateEvent vkCreateEvent = 0; + PFN_vkCreateFence vkCreateFence = 0; + PFN_vkCreateFramebuffer vkCreateFramebuffer = 0; + PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines = 0; + PFN_vkCreateImage vkCreateImage = 0; + PFN_vkCreateImageView vkCreateImageView = 0; + PFN_vkCreateIndirectCommandsLayoutNV vkCreateIndirectCommandsLayoutNV = 0; + PFN_vkCreatePipelineCache vkCreatePipelineCache = 0; + PFN_vkCreatePipelineLayout vkCreatePipelineLayout = 0; + PFN_vkCreateQueryPool vkCreateQueryPool = 0; #ifdef VK_ENABLE_BETA_EXTENSIONS PFN_vkCreateRayTracingPipelinesKHR vkCreateRayTracingPipelinesKHR = 0; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV = 0; - PFN_vkCreateRenderPass vkCreateRenderPass = 0; - PFN_vkCreateRenderPass2 vkCreateRenderPass2 = 0; - PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = 0; - PFN_vkCreateSampler vkCreateSampler = 0; - PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion = 0; + PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV = 0; + PFN_vkCreateRenderPass vkCreateRenderPass = 0; + PFN_vkCreateRenderPass2 vkCreateRenderPass2 = 0; + PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = 0; + PFN_vkCreateSampler vkCreateSampler = 0; + PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion = 0; PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = 0; - PFN_vkCreateSemaphore vkCreateSemaphore = 0; - PFN_vkCreateShaderModule vkCreateShaderModule = 0; - PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR = 0; - PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = 0; - PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT = 0; - PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT = 0; - PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT = 0; + PFN_vkCreateSemaphore vkCreateSemaphore = 0; + PFN_vkCreateShaderModule vkCreateShaderModule = 0; + PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR = 0; + PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = 0; + PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT = 0; + PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT = 0; + PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT = 0; #ifdef VK_ENABLE_BETA_EXTENSIONS PFN_vkDeferredOperationJoinKHR vkDeferredOperationJoinKHR = 0; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ PFN_vkDestroyAccelerationStructureKHR vkDestroyAccelerationStructureKHR = 0; - PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV = 0; - PFN_vkDestroyBuffer vkDestroyBuffer = 0; - PFN_vkDestroyBufferView vkDestroyBufferView = 0; - PFN_vkDestroyCommandPool vkDestroyCommandPool = 0; + PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV = 0; + PFN_vkDestroyBuffer vkDestroyBuffer = 0; + PFN_vkDestroyBufferView vkDestroyBufferView = 0; + PFN_vkDestroyCommandPool vkDestroyCommandPool = 0; #ifdef VK_ENABLE_BETA_EXTENSIONS PFN_vkDestroyDeferredOperationKHR vkDestroyDeferredOperationKHR = 0; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool = 0; - PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout = 0; - PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate = 0; + PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool = 0; + PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout = 0; + PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate = 0; PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR = 0; - PFN_vkDestroyDevice vkDestroyDevice = 0; - PFN_vkDestroyEvent vkDestroyEvent = 0; - PFN_vkDestroyFence vkDestroyFence = 0; - PFN_vkDestroyFramebuffer vkDestroyFramebuffer = 0; - PFN_vkDestroyImage vkDestroyImage = 0; - PFN_vkDestroyImageView vkDestroyImageView = 0; - PFN_vkDestroyIndirectCommandsLayoutNV vkDestroyIndirectCommandsLayoutNV = 0; - PFN_vkDestroyPipeline vkDestroyPipeline = 0; - PFN_vkDestroyPipelineCache vkDestroyPipelineCache = 0; - PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout = 0; - PFN_vkDestroyQueryPool vkDestroyQueryPool = 0; - PFN_vkDestroyRenderPass vkDestroyRenderPass = 0; - PFN_vkDestroySampler vkDestroySampler = 0; - PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion = 0; - PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = 0; - PFN_vkDestroySemaphore vkDestroySemaphore = 0; - PFN_vkDestroyShaderModule vkDestroyShaderModule = 0; - PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = 0; - PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT = 0; - PFN_vkDeviceWaitIdle vkDeviceWaitIdle = 0; - PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT = 0; - PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges = 0; - PFN_vkFreeCommandBuffers vkFreeCommandBuffers = 0; - PFN_vkFreeDescriptorSets vkFreeDescriptorSets = 0; - PFN_vkFreeMemory vkFreeMemory = 0; + PFN_vkDestroyDevice vkDestroyDevice = 0; + PFN_vkDestroyEvent vkDestroyEvent = 0; + PFN_vkDestroyFence vkDestroyFence = 0; + PFN_vkDestroyFramebuffer vkDestroyFramebuffer = 0; + PFN_vkDestroyImage vkDestroyImage = 0; + PFN_vkDestroyImageView vkDestroyImageView = 0; + PFN_vkDestroyIndirectCommandsLayoutNV vkDestroyIndirectCommandsLayoutNV = 0; + PFN_vkDestroyPipeline vkDestroyPipeline = 0; + PFN_vkDestroyPipelineCache vkDestroyPipelineCache = 0; + PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout = 0; + PFN_vkDestroyQueryPool vkDestroyQueryPool = 0; + PFN_vkDestroyRenderPass vkDestroyRenderPass = 0; + PFN_vkDestroySampler vkDestroySampler = 0; + PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion = 0; + PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = 0; + PFN_vkDestroySemaphore vkDestroySemaphore = 0; + PFN_vkDestroyShaderModule vkDestroyShaderModule = 0; + PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = 0; + PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT = 0; + PFN_vkDeviceWaitIdle vkDeviceWaitIdle = 0; + PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT = 0; + PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges = 0; + PFN_vkFreeCommandBuffers vkFreeCommandBuffers = 0; + PFN_vkFreeDescriptorSets vkFreeDescriptorSets = 0; + PFN_vkFreeMemory vkFreeMemory = 0; #ifdef VK_ENABLE_BETA_EXTENSIONS PFN_vkGetAccelerationStructureDeviceAddressKHR vkGetAccelerationStructureDeviceAddressKHR = 0; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ @@ -76422,61 +91938,61 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VK_USE_PLATFORM_ANDROID_KHR PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID = 0; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - PFN_vkGetBufferDeviceAddress vkGetBufferDeviceAddress = 0; - PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT = 0; - PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR = 0; - PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements = 0; - PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2 = 0; - PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = 0; - PFN_vkGetBufferOpaqueCaptureAddress vkGetBufferOpaqueCaptureAddress = 0; + PFN_vkGetBufferDeviceAddress vkGetBufferDeviceAddress = 0; + PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT = 0; + PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR = 0; + PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements = 0; + PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2 = 0; + PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = 0; + PFN_vkGetBufferOpaqueCaptureAddress vkGetBufferOpaqueCaptureAddress = 0; PFN_vkGetBufferOpaqueCaptureAddressKHR vkGetBufferOpaqueCaptureAddressKHR = 0; - PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT = 0; + PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT = 0; #ifdef VK_ENABLE_BETA_EXTENSIONS PFN_vkGetDeferredOperationMaxConcurrencyKHR vkGetDeferredOperationMaxConcurrencyKHR = 0; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_ENABLE_BETA_EXTENSIONS PFN_vkGetDeferredOperationResultKHR vkGetDeferredOperationResultKHR = 0; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0; + PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0; PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR = 0; #ifdef VK_ENABLE_BETA_EXTENSIONS PFN_vkGetDeviceAccelerationStructureCompatibilityKHR vkGetDeviceAccelerationStructureCompatibilityKHR = 0; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures = 0; - PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR = 0; + PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures = 0; + PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR = 0; PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR = 0; #ifdef VK_USE_PLATFORM_WIN32_KHR PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR = 0; - PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment = 0; - PFN_vkGetDeviceMemoryOpaqueCaptureAddress vkGetDeviceMemoryOpaqueCaptureAddress = 0; + PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR = 0; + PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment = 0; + PFN_vkGetDeviceMemoryOpaqueCaptureAddress vkGetDeviceMemoryOpaqueCaptureAddress = 0; PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR vkGetDeviceMemoryOpaqueCaptureAddressKHR = 0; - PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0; - PFN_vkGetDeviceQueue vkGetDeviceQueue = 0; - PFN_vkGetDeviceQueue2 vkGetDeviceQueue2 = 0; - PFN_vkGetEventStatus vkGetEventStatus = 0; - PFN_vkGetFenceFdKHR vkGetFenceFdKHR = 0; - PFN_vkGetFenceStatus vkGetFenceStatus = 0; + PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0; + PFN_vkGetDeviceQueue vkGetDeviceQueue = 0; + PFN_vkGetDeviceQueue2 vkGetDeviceQueue2 = 0; + PFN_vkGetEventStatus vkGetEventStatus = 0; + PFN_vkGetFenceFdKHR vkGetFenceFdKHR = 0; + PFN_vkGetFenceStatus vkGetFenceStatus = 0; #ifdef VK_USE_PLATFORM_WIN32_KHR PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ PFN_vkGetGeneratedCommandsMemoryRequirementsNV vkGetGeneratedCommandsMemoryRequirementsNV = 0; - PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT = 0; - PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements = 0; - PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2 = 0; - PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = 0; - PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements = 0; - PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2 = 0; - PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR = 0; - PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout = 0; - PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX = 0; - PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0; + PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT = 0; + PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements = 0; + PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2 = 0; + PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = 0; + PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements = 0; + PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2 = 0; + PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR = 0; + PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout = 0; + PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX = 0; + PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0; #ifdef VK_USE_PLATFORM_ANDROID_KHR PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = 0; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR = 0; - PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR = 0; + PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR = 0; + PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR = 0; PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = 0; #ifdef VK_USE_PLATFORM_WIN32_KHR PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR = 0; @@ -76487,32 +92003,32 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VK_USE_PLATFORM_WIN32_KHR PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE = 0; - PFN_vkGetPerformanceParameterINTEL vkGetPerformanceParameterINTEL = 0; - PFN_vkGetPipelineCacheData vkGetPipelineCacheData = 0; + PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE = 0; + PFN_vkGetPerformanceParameterINTEL vkGetPerformanceParameterINTEL = 0; + PFN_vkGetPipelineCacheData vkGetPipelineCacheData = 0; PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR = 0; - PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR = 0; - PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0; - PFN_vkGetQueryPoolResults vkGetQueryPoolResults = 0; + PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR = 0; + PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0; + PFN_vkGetQueryPoolResults vkGetQueryPoolResults = 0; #ifdef VK_ENABLE_BETA_EXTENSIONS PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = 0; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ PFN_vkGetRayTracingShaderGroupHandlesKHR vkGetRayTracingShaderGroupHandlesKHR = 0; - PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV = 0; - PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE = 0; - PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity = 0; - PFN_vkGetSemaphoreCounterValue vkGetSemaphoreCounterValue = 0; - PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR = 0; - PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR = 0; + PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV = 0; + PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE = 0; + PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity = 0; + PFN_vkGetSemaphoreCounterValue vkGetSemaphoreCounterValue = 0; + PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR = 0; + PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR = 0; #ifdef VK_USE_PLATFORM_WIN32_KHR PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD = 0; - PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT = 0; - PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = 0; - PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = 0; + PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD = 0; + PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT = 0; + PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = 0; + PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = 0; PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT = 0; - PFN_vkImportFenceFdKHR vkImportFenceFdKHR = 0; + PFN_vkImportFenceFdKHR vkImportFenceFdKHR = 0; #ifdef VK_USE_PLATFORM_WIN32_KHR PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ @@ -76521,40 +92037,40 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ PFN_vkInitializePerformanceApiINTEL vkInitializePerformanceApiINTEL = 0; - PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges = 0; - PFN_vkMapMemory vkMapMemory = 0; - PFN_vkMergePipelineCaches vkMergePipelineCaches = 0; - PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT = 0; - PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0; - PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0; + PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges = 0; + PFN_vkMapMemory vkMapMemory = 0; + PFN_vkMergePipelineCaches vkMergePipelineCaches = 0; + PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT = 0; + PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0; + PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0; #ifdef VK_USE_PLATFORM_WIN32_KHR PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL = 0; - PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR = 0; - PFN_vkResetCommandPool vkResetCommandPool = 0; - PFN_vkResetDescriptorPool vkResetDescriptorPool = 0; - PFN_vkResetEvent vkResetEvent = 0; - PFN_vkResetFences vkResetFences = 0; - PFN_vkResetQueryPool vkResetQueryPool = 0; - PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = 0; - PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = 0; - PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT = 0; - PFN_vkSetEvent vkSetEvent = 0; - PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT = 0; - PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD = 0; - PFN_vkSignalSemaphore vkSignalSemaphore = 0; - PFN_vkSignalSemaphoreKHR vkSignalSemaphoreKHR = 0; - PFN_vkTrimCommandPool vkTrimCommandPool = 0; - PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR = 0; - PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL = 0; - PFN_vkUnmapMemory vkUnmapMemory = 0; - PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate = 0; - PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR = 0; - PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets = 0; - PFN_vkWaitForFences vkWaitForFences = 0; - PFN_vkWaitSemaphores vkWaitSemaphores = 0; - PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR = 0; + PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR = 0; + PFN_vkResetCommandPool vkResetCommandPool = 0; + PFN_vkResetDescriptorPool vkResetDescriptorPool = 0; + PFN_vkResetEvent vkResetEvent = 0; + PFN_vkResetFences vkResetFences = 0; + PFN_vkResetQueryPool vkResetQueryPool = 0; + PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = 0; + PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = 0; + PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT = 0; + PFN_vkSetEvent vkSetEvent = 0; + PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT = 0; + PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD = 0; + PFN_vkSignalSemaphore vkSignalSemaphore = 0; + PFN_vkSignalSemaphoreKHR vkSignalSemaphoreKHR = 0; + PFN_vkTrimCommandPool vkTrimCommandPool = 0; + PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR = 0; + PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL = 0; + PFN_vkUnmapMemory vkUnmapMemory = 0; + PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate = 0; + PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR = 0; + PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets = 0; + PFN_vkWaitForFences vkWaitForFences = 0; + PFN_vkWaitSemaphores vkWaitSemaphores = 0; + PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR = 0; #ifdef VK_ENABLE_BETA_EXTENSIONS PFN_vkWriteAccelerationStructuresPropertiesKHR vkWriteAccelerationStructuresPropertiesKHR = 0; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ @@ -76564,7 +92080,7 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = 0; PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = 0; PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR = 0; - PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT = 0; + PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT = 0; #ifdef VK_USE_PLATFORM_IOS_MVK PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK = 0; #endif /*VK_USE_PLATFORM_IOS_MVK*/ @@ -76595,78 +92111,81 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VK_USE_PLATFORM_XLIB_KHR PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR = 0; #endif /*VK_USE_PLATFORM_XLIB_KHR*/ - PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT = 0; - PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = 0; - PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = 0; - PFN_vkDestroyInstance vkDestroyInstance = 0; - PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR = 0; - PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups = 0; + PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT = 0; + PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = 0; + PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = 0; + PFN_vkDestroyInstance vkDestroyInstance = 0; + PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR = 0; + PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups = 0; PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR = 0; - PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices = 0; - PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0; - PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT = 0; + PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices = 0; + PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0; + PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT = 0; #ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT = 0; #endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ - PFN_vkCreateDevice vkCreateDevice = 0; - PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR = 0; + PFN_vkCreateDevice vkCreateDevice = 0; + PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR = 0; PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties = 0; - PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties = 0; - PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = 0; - PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR = 0; - PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR = 0; - PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR = 0; - PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR = 0; - PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR = 0; - PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = 0; - PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = 0; - PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR vkGetPhysicalDeviceDisplayPlaneProperties2KHR = 0; - PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR = 0; - PFN_vkGetPhysicalDeviceDisplayProperties2KHR vkGetPhysicalDeviceDisplayProperties2KHR = 0; - PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR = 0; - PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties = 0; - PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR = 0; - PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties = 0; - PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR = 0; + PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties = 0; + PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR + vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = 0; + PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR = 0; + PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR = 0; + PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR = 0; + PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR = 0; + PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR = 0; + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = 0; + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = 0; + PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR vkGetPhysicalDeviceDisplayPlaneProperties2KHR = 0; + PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR = 0; + PFN_vkGetPhysicalDeviceDisplayProperties2KHR vkGetPhysicalDeviceDisplayProperties2KHR = 0; + PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR = 0; + PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties = 0; + PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR = 0; + PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties = 0; + PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR = 0; PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV = 0; - PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties = 0; - PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = 0; - PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures = 0; - PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 = 0; - PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = 0; - PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties = 0; - PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2 = 0; - PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR = 0; - PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties = 0; - PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2 = 0; - PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR = 0; - PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties = 0; - PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2 = 0; - PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = 0; - PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0; - PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR = 0; - PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties = 0; - PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2 = 0; - PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = 0; - PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = 0; - PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties = 0; - PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2 = 0; - PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR = 0; - PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties = 0; - PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2 = 0; - PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR = 0; - PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = 0; + PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties = 0; + PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = 0; + PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures = 0; + PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 = 0; + PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = 0; + PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties = 0; + PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR = 0; + PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties = 0; + PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2 = 0; + PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = 0; + PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0; + PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR = 0; + PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties = 0; + PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2 = 0; + PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = 0; + PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR + vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2 = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR = 0; + PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV + vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = 0; PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT = 0; PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR = 0; - PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR = 0; - PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR = 0; - PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR = 0; + PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR = 0; #ifdef VK_USE_PLATFORM_WIN32_KHR PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT vkGetPhysicalDeviceSurfacePresentModes2EXT = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR = 0; - PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR = 0; - PFN_vkGetPhysicalDeviceToolPropertiesEXT vkGetPhysicalDeviceToolPropertiesEXT = 0; + PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR = 0; + PFN_vkGetPhysicalDeviceToolPropertiesEXT vkGetPhysicalDeviceToolPropertiesEXT = 0; #ifdef VK_USE_PLATFORM_WAYLAND_KHR PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR = 0; #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ @@ -76682,1305 +92201,1822 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT = 0; #endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ - PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT = 0; - PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0; - PFN_vkQueueBeginDebugUtilsLabelEXT vkQueueBeginDebugUtilsLabelEXT = 0; - PFN_vkQueueBindSparse vkQueueBindSparse = 0; - PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT = 0; - PFN_vkQueueInsertDebugUtilsLabelEXT vkQueueInsertDebugUtilsLabelEXT = 0; - PFN_vkQueuePresentKHR vkQueuePresentKHR = 0; + PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT = 0; + PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0; + PFN_vkQueueBeginDebugUtilsLabelEXT vkQueueBeginDebugUtilsLabelEXT = 0; + PFN_vkQueueBindSparse vkQueueBindSparse = 0; + PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT = 0; + PFN_vkQueueInsertDebugUtilsLabelEXT vkQueueInsertDebugUtilsLabelEXT = 0; + PFN_vkQueuePresentKHR vkQueuePresentKHR = 0; PFN_vkQueueSetPerformanceConfigurationINTEL vkQueueSetPerformanceConfigurationINTEL = 0; - PFN_vkQueueSubmit vkQueueSubmit = 0; - PFN_vkQueueWaitIdle vkQueueWaitIdle = 0; + PFN_vkQueueSubmit vkQueueSubmit = 0; + PFN_vkQueueWaitIdle vkQueueWaitIdle = 0; public: DispatchLoaderDynamic() VULKAN_HPP_NOEXCEPT = default; -#if !defined(VK_NO_PROTOTYPES) - // This interface is designed to be used for per-device function pointers in combination with a linked vulkan library. +#if !defined( VK_NO_PROTOTYPES ) + // This interface is designed to be used for per-device function pointers in combination with a linked vulkan + // library. template - void init(VULKAN_HPP_NAMESPACE::Instance const& instance, VULKAN_HPP_NAMESPACE::Device const& device, DynamicLoader const& dl) VULKAN_HPP_NOEXCEPT + void init( VULKAN_HPP_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::Device const & device, + DynamicLoader const & dl ) VULKAN_HPP_NOEXCEPT { - PFN_vkGetInstanceProcAddr getInstanceProcAddr = dl.template getProcAddress("vkGetInstanceProcAddr"); - PFN_vkGetDeviceProcAddr getDeviceProcAddr = dl.template getProcAddress("vkGetDeviceProcAddr"); - init(static_cast(instance), getInstanceProcAddr, static_cast(device), device ? getDeviceProcAddr : nullptr); + PFN_vkGetInstanceProcAddr getInstanceProcAddr = + dl.template getProcAddress( "vkGetInstanceProcAddr" ); + PFN_vkGetDeviceProcAddr getDeviceProcAddr = + dl.template getProcAddress( "vkGetDeviceProcAddr" ); + init( static_cast( instance ), + getInstanceProcAddr, + static_cast( device ), + device ? getDeviceProcAddr : nullptr ); } - // This interface is designed to be used for per-device function pointers in combination with a linked vulkan library. + // This interface is designed to be used for per-device function pointers in combination with a linked vulkan + // library. template - void init(VULKAN_HPP_NAMESPACE::Instance const& instance, VULKAN_HPP_NAMESPACE::Device const& device) VULKAN_HPP_NOEXCEPT +# if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL + = vk::DynamicLoader +# endif + > + void init( VULKAN_HPP_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::Device const & device ) VULKAN_HPP_NOEXCEPT { static DynamicLoader dl; - init(instance, device, dl); + init( instance, device, dl ); } -#endif // !defined(VK_NO_PROTOTYPES) +#endif // !defined(VK_NO_PROTOTYPES) - DispatchLoaderDynamic(PFN_vkGetInstanceProcAddr getInstanceProcAddr) VULKAN_HPP_NOEXCEPT + DispatchLoaderDynamic( PFN_vkGetInstanceProcAddr getInstanceProcAddr ) VULKAN_HPP_NOEXCEPT { - init(getInstanceProcAddr); + init( getInstanceProcAddr ); } void init( PFN_vkGetInstanceProcAddr getInstanceProcAddr ) VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT(getInstanceProcAddr); + VULKAN_HPP_ASSERT( getInstanceProcAddr ); vkGetInstanceProcAddr = getInstanceProcAddr; - vkCreateInstance = PFN_vkCreateInstance( vkGetInstanceProcAddr( NULL, "vkCreateInstance" ) ); - vkEnumerateInstanceExtensionProperties = PFN_vkEnumerateInstanceExtensionProperties( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceExtensionProperties" ) ); - vkEnumerateInstanceLayerProperties = PFN_vkEnumerateInstanceLayerProperties( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceLayerProperties" ) ); - vkEnumerateInstanceVersion = PFN_vkEnumerateInstanceVersion( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceVersion" ) ); + vkCreateInstance = PFN_vkCreateInstance( vkGetInstanceProcAddr( NULL, "vkCreateInstance" ) ); + vkEnumerateInstanceExtensionProperties = PFN_vkEnumerateInstanceExtensionProperties( + vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceExtensionProperties" ) ); + vkEnumerateInstanceLayerProperties = + PFN_vkEnumerateInstanceLayerProperties( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceLayerProperties" ) ); + vkEnumerateInstanceVersion = + PFN_vkEnumerateInstanceVersion( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceVersion" ) ); } // This interface does not require a linked vulkan library. - DispatchLoaderDynamic( VkInstance instance, PFN_vkGetInstanceProcAddr getInstanceProcAddr, VkDevice device = VK_NULL_HANDLE, PFN_vkGetDeviceProcAddr getDeviceProcAddr = nullptr ) VULKAN_HPP_NOEXCEPT + DispatchLoaderDynamic( VkInstance instance, + PFN_vkGetInstanceProcAddr getInstanceProcAddr, + VkDevice device = VK_NULL_HANDLE, + PFN_vkGetDeviceProcAddr getDeviceProcAddr = nullptr ) VULKAN_HPP_NOEXCEPT { init( instance, getInstanceProcAddr, device, getDeviceProcAddr ); } // This interface does not require a linked vulkan library. - void init( VkInstance instance, PFN_vkGetInstanceProcAddr getInstanceProcAddr, VkDevice device = VK_NULL_HANDLE, PFN_vkGetDeviceProcAddr /*getDeviceProcAddr*/ = nullptr ) VULKAN_HPP_NOEXCEPT + void init( VkInstance instance, + PFN_vkGetInstanceProcAddr getInstanceProcAddr, + VkDevice device = VK_NULL_HANDLE, + PFN_vkGetDeviceProcAddr /*getDeviceProcAddr*/ = nullptr ) VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT(instance && getInstanceProcAddr); + VULKAN_HPP_ASSERT( instance && getInstanceProcAddr ); vkGetInstanceProcAddr = getInstanceProcAddr; - init( VULKAN_HPP_NAMESPACE::Instance(instance) ); - if (device) { - init( VULKAN_HPP_NAMESPACE::Device(device) ); + init( VULKAN_HPP_NAMESPACE::Instance( instance ) ); + if ( device ) + { + init( VULKAN_HPP_NAMESPACE::Device( device ) ); } } void init( VULKAN_HPP_NAMESPACE::Instance instanceCpp ) VULKAN_HPP_NOEXCEPT { - VkInstance instance = static_cast(instanceCpp); + VkInstance instance = static_cast( instanceCpp ); #ifdef VK_USE_PLATFORM_ANDROID_KHR - vkCreateAndroidSurfaceKHR = PFN_vkCreateAndroidSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateAndroidSurfaceKHR" ) ); + vkCreateAndroidSurfaceKHR = + PFN_vkCreateAndroidSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateAndroidSurfaceKHR" ) ); #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - vkCreateDebugReportCallbackEXT = PFN_vkCreateDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugReportCallbackEXT" ) ); - vkCreateDebugUtilsMessengerEXT = PFN_vkCreateDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugUtilsMessengerEXT" ) ); - vkCreateDisplayPlaneSurfaceKHR = PFN_vkCreateDisplayPlaneSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayPlaneSurfaceKHR" ) ); - vkCreateHeadlessSurfaceEXT = PFN_vkCreateHeadlessSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateHeadlessSurfaceEXT" ) ); + vkCreateDebugReportCallbackEXT = + PFN_vkCreateDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugReportCallbackEXT" ) ); + vkCreateDebugUtilsMessengerEXT = + PFN_vkCreateDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugUtilsMessengerEXT" ) ); + vkCreateDisplayPlaneSurfaceKHR = + PFN_vkCreateDisplayPlaneSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayPlaneSurfaceKHR" ) ); + vkCreateHeadlessSurfaceEXT = + PFN_vkCreateHeadlessSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateHeadlessSurfaceEXT" ) ); #ifdef VK_USE_PLATFORM_IOS_MVK vkCreateIOSSurfaceMVK = PFN_vkCreateIOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateIOSSurfaceMVK" ) ); #endif /*VK_USE_PLATFORM_IOS_MVK*/ #ifdef VK_USE_PLATFORM_FUCHSIA - vkCreateImagePipeSurfaceFUCHSIA = PFN_vkCreateImagePipeSurfaceFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateImagePipeSurfaceFUCHSIA" ) ); + vkCreateImagePipeSurfaceFUCHSIA = + PFN_vkCreateImagePipeSurfaceFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateImagePipeSurfaceFUCHSIA" ) ); #endif /*VK_USE_PLATFORM_FUCHSIA*/ #ifdef VK_USE_PLATFORM_MACOS_MVK - vkCreateMacOSSurfaceMVK = PFN_vkCreateMacOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateMacOSSurfaceMVK" ) ); + vkCreateMacOSSurfaceMVK = + PFN_vkCreateMacOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateMacOSSurfaceMVK" ) ); #endif /*VK_USE_PLATFORM_MACOS_MVK*/ #ifdef VK_USE_PLATFORM_METAL_EXT - vkCreateMetalSurfaceEXT = PFN_vkCreateMetalSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateMetalSurfaceEXT" ) ); + vkCreateMetalSurfaceEXT = + PFN_vkCreateMetalSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateMetalSurfaceEXT" ) ); #endif /*VK_USE_PLATFORM_METAL_EXT*/ #ifdef VK_USE_PLATFORM_GGP - vkCreateStreamDescriptorSurfaceGGP = PFN_vkCreateStreamDescriptorSurfaceGGP( vkGetInstanceProcAddr( instance, "vkCreateStreamDescriptorSurfaceGGP" ) ); + vkCreateStreamDescriptorSurfaceGGP = PFN_vkCreateStreamDescriptorSurfaceGGP( + vkGetInstanceProcAddr( instance, "vkCreateStreamDescriptorSurfaceGGP" ) ); #endif /*VK_USE_PLATFORM_GGP*/ #ifdef VK_USE_PLATFORM_VI_NN vkCreateViSurfaceNN = PFN_vkCreateViSurfaceNN( vkGetInstanceProcAddr( instance, "vkCreateViSurfaceNN" ) ); #endif /*VK_USE_PLATFORM_VI_NN*/ #ifdef VK_USE_PLATFORM_WAYLAND_KHR - vkCreateWaylandSurfaceKHR = PFN_vkCreateWaylandSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWaylandSurfaceKHR" ) ); + vkCreateWaylandSurfaceKHR = + PFN_vkCreateWaylandSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWaylandSurfaceKHR" ) ); #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ #ifdef VK_USE_PLATFORM_WIN32_KHR - vkCreateWin32SurfaceKHR = PFN_vkCreateWin32SurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWin32SurfaceKHR" ) ); + vkCreateWin32SurfaceKHR = + PFN_vkCreateWin32SurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWin32SurfaceKHR" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #ifdef VK_USE_PLATFORM_XCB_KHR vkCreateXcbSurfaceKHR = PFN_vkCreateXcbSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXcbSurfaceKHR" ) ); #endif /*VK_USE_PLATFORM_XCB_KHR*/ #ifdef VK_USE_PLATFORM_XLIB_KHR - vkCreateXlibSurfaceKHR = PFN_vkCreateXlibSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXlibSurfaceKHR" ) ); + vkCreateXlibSurfaceKHR = + PFN_vkCreateXlibSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXlibSurfaceKHR" ) ); #endif /*VK_USE_PLATFORM_XLIB_KHR*/ - vkDebugReportMessageEXT = PFN_vkDebugReportMessageEXT( vkGetInstanceProcAddr( instance, "vkDebugReportMessageEXT" ) ); - vkDestroyDebugReportCallbackEXT = PFN_vkDestroyDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugReportCallbackEXT" ) ); - vkDestroyDebugUtilsMessengerEXT = PFN_vkDestroyDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugUtilsMessengerEXT" ) ); - vkDestroyInstance = PFN_vkDestroyInstance( vkGetInstanceProcAddr( instance, "vkDestroyInstance" ) ); + vkDebugReportMessageEXT = + PFN_vkDebugReportMessageEXT( vkGetInstanceProcAddr( instance, "vkDebugReportMessageEXT" ) ); + vkDestroyDebugReportCallbackEXT = + PFN_vkDestroyDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugReportCallbackEXT" ) ); + vkDestroyDebugUtilsMessengerEXT = + PFN_vkDestroyDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugUtilsMessengerEXT" ) ); + vkDestroyInstance = PFN_vkDestroyInstance( vkGetInstanceProcAddr( instance, "vkDestroyInstance" ) ); vkDestroySurfaceKHR = PFN_vkDestroySurfaceKHR( vkGetInstanceProcAddr( instance, "vkDestroySurfaceKHR" ) ); - vkEnumeratePhysicalDeviceGroups = PFN_vkEnumeratePhysicalDeviceGroups( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroups" ) ); - vkEnumeratePhysicalDeviceGroupsKHR = PFN_vkEnumeratePhysicalDeviceGroupsKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroupsKHR" ) ); - vkEnumeratePhysicalDevices = PFN_vkEnumeratePhysicalDevices( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDevices" ) ); - vkSubmitDebugUtilsMessageEXT = PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) ); + vkEnumeratePhysicalDeviceGroups = + PFN_vkEnumeratePhysicalDeviceGroups( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroups" ) ); + vkEnumeratePhysicalDeviceGroupsKHR = PFN_vkEnumeratePhysicalDeviceGroupsKHR( + vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroupsKHR" ) ); + vkEnumeratePhysicalDevices = + PFN_vkEnumeratePhysicalDevices( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDevices" ) ); + vkSubmitDebugUtilsMessageEXT = + PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) ); #ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT - vkAcquireXlibDisplayEXT = PFN_vkAcquireXlibDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireXlibDisplayEXT" ) ); + vkAcquireXlibDisplayEXT = + PFN_vkAcquireXlibDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireXlibDisplayEXT" ) ); #endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ vkCreateDevice = PFN_vkCreateDevice( vkGetInstanceProcAddr( instance, "vkCreateDevice" ) ); - vkCreateDisplayModeKHR = PFN_vkCreateDisplayModeKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayModeKHR" ) ); - vkEnumerateDeviceExtensionProperties = PFN_vkEnumerateDeviceExtensionProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceExtensionProperties" ) ); - vkEnumerateDeviceLayerProperties = PFN_vkEnumerateDeviceLayerProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceLayerProperties" ) ); - vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR" ) ); - vkGetDisplayModeProperties2KHR = PFN_vkGetDisplayModeProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModeProperties2KHR" ) ); - vkGetDisplayModePropertiesKHR = PFN_vkGetDisplayModePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModePropertiesKHR" ) ); - vkGetDisplayPlaneCapabilities2KHR = PFN_vkGetDisplayPlaneCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilities2KHR" ) ); - vkGetDisplayPlaneCapabilitiesKHR = PFN_vkGetDisplayPlaneCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilitiesKHR" ) ); - vkGetDisplayPlaneSupportedDisplaysKHR = PFN_vkGetDisplayPlaneSupportedDisplaysKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneSupportedDisplaysKHR" ) ); - vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT" ) ); - vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV" ) ); - vkGetPhysicalDeviceDisplayPlaneProperties2KHR = PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR" ) ); - vkGetPhysicalDeviceDisplayPlanePropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR" ) ); - vkGetPhysicalDeviceDisplayProperties2KHR = PFN_vkGetPhysicalDeviceDisplayProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayProperties2KHR" ) ); - vkGetPhysicalDeviceDisplayPropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPropertiesKHR" ) ); - vkGetPhysicalDeviceExternalBufferProperties = PFN_vkGetPhysicalDeviceExternalBufferProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferProperties" ) ); - vkGetPhysicalDeviceExternalBufferPropertiesKHR = PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR" ) ); - vkGetPhysicalDeviceExternalFenceProperties = PFN_vkGetPhysicalDeviceExternalFenceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFenceProperties" ) ); - vkGetPhysicalDeviceExternalFencePropertiesKHR = PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR" ) ); - vkGetPhysicalDeviceExternalImageFormatPropertiesNV = PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV" ) ); - vkGetPhysicalDeviceExternalSemaphoreProperties = PFN_vkGetPhysicalDeviceExternalSemaphoreProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphoreProperties" ) ); - vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR" ) ); - vkGetPhysicalDeviceFeatures = PFN_vkGetPhysicalDeviceFeatures( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures" ) ); - vkGetPhysicalDeviceFeatures2 = PFN_vkGetPhysicalDeviceFeatures2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2" ) ); - vkGetPhysicalDeviceFeatures2KHR = PFN_vkGetPhysicalDeviceFeatures2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2KHR" ) ); - vkGetPhysicalDeviceFormatProperties = PFN_vkGetPhysicalDeviceFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties" ) ); - vkGetPhysicalDeviceFormatProperties2 = PFN_vkGetPhysicalDeviceFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2" ) ); - vkGetPhysicalDeviceFormatProperties2KHR = PFN_vkGetPhysicalDeviceFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2KHR" ) ); - vkGetPhysicalDeviceImageFormatProperties = PFN_vkGetPhysicalDeviceImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties" ) ); - vkGetPhysicalDeviceImageFormatProperties2 = PFN_vkGetPhysicalDeviceImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2" ) ); - vkGetPhysicalDeviceImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2KHR" ) ); - vkGetPhysicalDeviceMemoryProperties = PFN_vkGetPhysicalDeviceMemoryProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties" ) ); - vkGetPhysicalDeviceMemoryProperties2 = PFN_vkGetPhysicalDeviceMemoryProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2" ) ); - vkGetPhysicalDeviceMemoryProperties2KHR = PFN_vkGetPhysicalDeviceMemoryProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2KHR" ) ); - vkGetPhysicalDeviceMultisamplePropertiesEXT = PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT" ) ); - vkGetPhysicalDevicePresentRectanglesKHR = PFN_vkGetPhysicalDevicePresentRectanglesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDevicePresentRectanglesKHR" ) ); - vkGetPhysicalDeviceProperties = PFN_vkGetPhysicalDeviceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties" ) ); - vkGetPhysicalDeviceProperties2 = PFN_vkGetPhysicalDeviceProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2" ) ); - vkGetPhysicalDeviceProperties2KHR = PFN_vkGetPhysicalDeviceProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2KHR" ) ); - vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR" ) ); - vkGetPhysicalDeviceQueueFamilyProperties = PFN_vkGetPhysicalDeviceQueueFamilyProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties" ) ); - vkGetPhysicalDeviceQueueFamilyProperties2 = PFN_vkGetPhysicalDeviceQueueFamilyProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2" ) ); - vkGetPhysicalDeviceQueueFamilyProperties2KHR = PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR" ) ); - vkGetPhysicalDeviceSparseImageFormatProperties = PFN_vkGetPhysicalDeviceSparseImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties" ) ); - vkGetPhysicalDeviceSparseImageFormatProperties2 = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2" ) ); - vkGetPhysicalDeviceSparseImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR" ) ); - vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV" ) ); - vkGetPhysicalDeviceSurfaceCapabilities2EXT = PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT" ) ); - vkGetPhysicalDeviceSurfaceCapabilities2KHR = PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR" ) ); - vkGetPhysicalDeviceSurfaceCapabilitiesKHR = PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR" ) ); - vkGetPhysicalDeviceSurfaceFormats2KHR = PFN_vkGetPhysicalDeviceSurfaceFormats2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormats2KHR" ) ); - vkGetPhysicalDeviceSurfaceFormatsKHR = PFN_vkGetPhysicalDeviceSurfaceFormatsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormatsKHR" ) ); + vkCreateDisplayModeKHR = + PFN_vkCreateDisplayModeKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayModeKHR" ) ); + vkEnumerateDeviceExtensionProperties = PFN_vkEnumerateDeviceExtensionProperties( + vkGetInstanceProcAddr( instance, "vkEnumerateDeviceExtensionProperties" ) ); + vkEnumerateDeviceLayerProperties = + PFN_vkEnumerateDeviceLayerProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceLayerProperties" ) ); + vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = + PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR" ) ); + vkGetDisplayModeProperties2KHR = + PFN_vkGetDisplayModeProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModeProperties2KHR" ) ); + vkGetDisplayModePropertiesKHR = + PFN_vkGetDisplayModePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModePropertiesKHR" ) ); + vkGetDisplayPlaneCapabilities2KHR = + PFN_vkGetDisplayPlaneCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilities2KHR" ) ); + vkGetDisplayPlaneCapabilitiesKHR = + PFN_vkGetDisplayPlaneCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilitiesKHR" ) ); + vkGetDisplayPlaneSupportedDisplaysKHR = PFN_vkGetDisplayPlaneSupportedDisplaysKHR( + vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneSupportedDisplaysKHR" ) ); + vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT" ) ); + vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV" ) ); + vkGetPhysicalDeviceDisplayPlaneProperties2KHR = PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR" ) ); + vkGetPhysicalDeviceDisplayPlanePropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR" ) ); + vkGetPhysicalDeviceDisplayProperties2KHR = PFN_vkGetPhysicalDeviceDisplayProperties2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayProperties2KHR" ) ); + vkGetPhysicalDeviceDisplayPropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPropertiesKHR" ) ); + vkGetPhysicalDeviceExternalBufferProperties = PFN_vkGetPhysicalDeviceExternalBufferProperties( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferProperties" ) ); + vkGetPhysicalDeviceExternalBufferPropertiesKHR = PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR" ) ); + vkGetPhysicalDeviceExternalFenceProperties = PFN_vkGetPhysicalDeviceExternalFenceProperties( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFenceProperties" ) ); + vkGetPhysicalDeviceExternalFencePropertiesKHR = PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR" ) ); + vkGetPhysicalDeviceExternalImageFormatPropertiesNV = PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV" ) ); + vkGetPhysicalDeviceExternalSemaphoreProperties = PFN_vkGetPhysicalDeviceExternalSemaphoreProperties( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphoreProperties" ) ); + vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR" ) ); + vkGetPhysicalDeviceFeatures = + PFN_vkGetPhysicalDeviceFeatures( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures" ) ); + vkGetPhysicalDeviceFeatures2 = + PFN_vkGetPhysicalDeviceFeatures2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2" ) ); + vkGetPhysicalDeviceFeatures2KHR = + PFN_vkGetPhysicalDeviceFeatures2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2KHR" ) ); + vkGetPhysicalDeviceFormatProperties = PFN_vkGetPhysicalDeviceFormatProperties( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties" ) ); + vkGetPhysicalDeviceFormatProperties2 = PFN_vkGetPhysicalDeviceFormatProperties2( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2" ) ); + vkGetPhysicalDeviceFormatProperties2KHR = PFN_vkGetPhysicalDeviceFormatProperties2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2KHR" ) ); + vkGetPhysicalDeviceImageFormatProperties = PFN_vkGetPhysicalDeviceImageFormatProperties( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties" ) ); + vkGetPhysicalDeviceImageFormatProperties2 = PFN_vkGetPhysicalDeviceImageFormatProperties2( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2" ) ); + vkGetPhysicalDeviceImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceImageFormatProperties2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2KHR" ) ); + vkGetPhysicalDeviceMemoryProperties = PFN_vkGetPhysicalDeviceMemoryProperties( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties" ) ); + vkGetPhysicalDeviceMemoryProperties2 = PFN_vkGetPhysicalDeviceMemoryProperties2( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2" ) ); + vkGetPhysicalDeviceMemoryProperties2KHR = PFN_vkGetPhysicalDeviceMemoryProperties2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2KHR" ) ); + vkGetPhysicalDeviceMultisamplePropertiesEXT = PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT" ) ); + vkGetPhysicalDevicePresentRectanglesKHR = PFN_vkGetPhysicalDevicePresentRectanglesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDevicePresentRectanglesKHR" ) ); + vkGetPhysicalDeviceProperties = + PFN_vkGetPhysicalDeviceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties" ) ); + vkGetPhysicalDeviceProperties2 = + PFN_vkGetPhysicalDeviceProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2" ) ); + vkGetPhysicalDeviceProperties2KHR = + PFN_vkGetPhysicalDeviceProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2KHR" ) ); + vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = + PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR" ) ); + vkGetPhysicalDeviceQueueFamilyProperties = PFN_vkGetPhysicalDeviceQueueFamilyProperties( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties" ) ); + vkGetPhysicalDeviceQueueFamilyProperties2 = PFN_vkGetPhysicalDeviceQueueFamilyProperties2( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2" ) ); + vkGetPhysicalDeviceQueueFamilyProperties2KHR = PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR" ) ); + vkGetPhysicalDeviceSparseImageFormatProperties = PFN_vkGetPhysicalDeviceSparseImageFormatProperties( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties" ) ); + vkGetPhysicalDeviceSparseImageFormatProperties2 = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2" ) ); + vkGetPhysicalDeviceSparseImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR" ) ); + vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = + PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV" ) ); + vkGetPhysicalDeviceSurfaceCapabilities2EXT = PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT" ) ); + vkGetPhysicalDeviceSurfaceCapabilities2KHR = PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR" ) ); + vkGetPhysicalDeviceSurfaceCapabilitiesKHR = PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR" ) ); + vkGetPhysicalDeviceSurfaceFormats2KHR = PFN_vkGetPhysicalDeviceSurfaceFormats2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormats2KHR" ) ); + vkGetPhysicalDeviceSurfaceFormatsKHR = PFN_vkGetPhysicalDeviceSurfaceFormatsKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormatsKHR" ) ); #ifdef VK_USE_PLATFORM_WIN32_KHR - vkGetPhysicalDeviceSurfacePresentModes2EXT = PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT" ) ); + vkGetPhysicalDeviceSurfacePresentModes2EXT = PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - vkGetPhysicalDeviceSurfacePresentModesKHR = PFN_vkGetPhysicalDeviceSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModesKHR" ) ); - vkGetPhysicalDeviceSurfaceSupportKHR = PFN_vkGetPhysicalDeviceSurfaceSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceSupportKHR" ) ); - vkGetPhysicalDeviceToolPropertiesEXT = PFN_vkGetPhysicalDeviceToolPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolPropertiesEXT" ) ); + vkGetPhysicalDeviceSurfacePresentModesKHR = PFN_vkGetPhysicalDeviceSurfacePresentModesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModesKHR" ) ); + vkGetPhysicalDeviceSurfaceSupportKHR = PFN_vkGetPhysicalDeviceSurfaceSupportKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceSupportKHR" ) ); + vkGetPhysicalDeviceToolPropertiesEXT = PFN_vkGetPhysicalDeviceToolPropertiesEXT( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolPropertiesEXT" ) ); #ifdef VK_USE_PLATFORM_WAYLAND_KHR - vkGetPhysicalDeviceWaylandPresentationSupportKHR = PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR" ) ); + vkGetPhysicalDeviceWaylandPresentationSupportKHR = PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR" ) ); #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ #ifdef VK_USE_PLATFORM_WIN32_KHR - vkGetPhysicalDeviceWin32PresentationSupportKHR = PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR" ) ); + vkGetPhysicalDeviceWin32PresentationSupportKHR = PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #ifdef VK_USE_PLATFORM_XCB_KHR - vkGetPhysicalDeviceXcbPresentationSupportKHR = PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR" ) ); + vkGetPhysicalDeviceXcbPresentationSupportKHR = PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR" ) ); #endif /*VK_USE_PLATFORM_XCB_KHR*/ #ifdef VK_USE_PLATFORM_XLIB_KHR - vkGetPhysicalDeviceXlibPresentationSupportKHR = PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR" ) ); + vkGetPhysicalDeviceXlibPresentationSupportKHR = PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR" ) ); #endif /*VK_USE_PLATFORM_XLIB_KHR*/ #ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT - vkGetRandROutputDisplayEXT = PFN_vkGetRandROutputDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetRandROutputDisplayEXT" ) ); + vkGetRandROutputDisplayEXT = + PFN_vkGetRandROutputDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetRandROutputDisplayEXT" ) ); #endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ - vkReleaseDisplayEXT = PFN_vkReleaseDisplayEXT( vkGetInstanceProcAddr( instance, "vkReleaseDisplayEXT" ) ); + vkReleaseDisplayEXT = PFN_vkReleaseDisplayEXT( vkGetInstanceProcAddr( instance, "vkReleaseDisplayEXT" ) ); vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetInstanceProcAddr( instance, "vkBeginCommandBuffer" ) ); - vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginConditionalRenderingEXT" ) ); - vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginDebugUtilsLabelEXT" ) ); + vkCmdBeginConditionalRenderingEXT = + PFN_vkCmdBeginConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginConditionalRenderingEXT" ) ); + vkCmdBeginDebugUtilsLabelEXT = + PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginDebugUtilsLabelEXT" ) ); vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetInstanceProcAddr( instance, "vkCmdBeginQuery" ) ); - vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginQueryIndexedEXT" ) ); - vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass" ) ); + vkCmdBeginQueryIndexedEXT = + PFN_vkCmdBeginQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginQueryIndexedEXT" ) ); + vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass" ) ); vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2" ) ); - vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2KHR" ) ); - vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginTransformFeedbackEXT" ) ); - vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorSets" ) ); + vkCmdBeginRenderPass2KHR = + PFN_vkCmdBeginRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2KHR" ) ); + vkCmdBeginTransformFeedbackEXT = + PFN_vkCmdBeginTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginTransformFeedbackEXT" ) ); + vkCmdBindDescriptorSets = + PFN_vkCmdBindDescriptorSets( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorSets" ) ); vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetInstanceProcAddr( instance, "vkCmdBindIndexBuffer" ) ); - vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetInstanceProcAddr( instance, "vkCmdBindPipeline" ) ); - vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetInstanceProcAddr( instance, "vkCmdBindPipelineShaderGroupNV" ) ); - vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetInstanceProcAddr( instance, "vkCmdBindShadingRateImageNV" ) ); - vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindTransformFeedbackBuffersEXT" ) ); - vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers" ) ); + vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetInstanceProcAddr( instance, "vkCmdBindPipeline" ) ); + vkCmdBindPipelineShaderGroupNV = + PFN_vkCmdBindPipelineShaderGroupNV( vkGetInstanceProcAddr( instance, "vkCmdBindPipelineShaderGroupNV" ) ); + vkCmdBindShadingRateImageNV = + PFN_vkCmdBindShadingRateImageNV( vkGetInstanceProcAddr( instance, "vkCmdBindShadingRateImageNV" ) ); + vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT( + vkGetInstanceProcAddr( instance, "vkCmdBindTransformFeedbackBuffersEXT" ) ); + vkCmdBindVertexBuffers = + PFN_vkCmdBindVertexBuffers( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers" ) ); vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetInstanceProcAddr( instance, "vkCmdBlitImage" ) ); #ifdef VK_ENABLE_BETA_EXTENSIONS - vkCmdBuildAccelerationStructureIndirectKHR = PFN_vkCmdBuildAccelerationStructureIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructureIndirectKHR" ) ); + vkCmdBuildAccelerationStructureIndirectKHR = PFN_vkCmdBuildAccelerationStructureIndirectKHR( + vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructureIndirectKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - vkCmdBuildAccelerationStructureKHR = PFN_vkCmdBuildAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructureKHR" ) ); + vkCmdBuildAccelerationStructureKHR = PFN_vkCmdBuildAccelerationStructureKHR( + vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructureKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructureNV" ) ); + vkCmdBuildAccelerationStructureNV = + PFN_vkCmdBuildAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructureNV" ) ); vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetInstanceProcAddr( instance, "vkCmdClearAttachments" ) ); - vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetInstanceProcAddr( instance, "vkCmdClearColorImage" ) ); - vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetInstanceProcAddr( instance, "vkCmdClearDepthStencilImage" ) ); + vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetInstanceProcAddr( instance, "vkCmdClearColorImage" ) ); + vkCmdClearDepthStencilImage = + PFN_vkCmdClearDepthStencilImage( vkGetInstanceProcAddr( instance, "vkCmdClearDepthStencilImage" ) ); #ifdef VK_ENABLE_BETA_EXTENSIONS - vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureKHR" ) ); + vkCmdCopyAccelerationStructureKHR = + PFN_vkCmdCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureNV" ) ); + vkCmdCopyAccelerationStructureNV = + PFN_vkCmdCopyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureNV" ) ); #ifdef VK_ENABLE_BETA_EXTENSIONS - vkCmdCopyAccelerationStructureToMemoryKHR = PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); + vkCmdCopyAccelerationStructureToMemoryKHR = PFN_vkCmdCopyAccelerationStructureToMemoryKHR( + vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer" ) ); - vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage" ) ); + vkCmdCopyBufferToImage = + PFN_vkCmdCopyBufferToImage( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage" ) ); vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetInstanceProcAddr( instance, "vkCmdCopyImage" ) ); - vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer" ) ); + vkCmdCopyImageToBuffer = + PFN_vkCmdCopyImageToBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer" ) ); #ifdef VK_ENABLE_BETA_EXTENSIONS - vkCmdCopyMemoryToAccelerationStructureKHR = PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToAccelerationStructureKHR" ) ); + vkCmdCopyMemoryToAccelerationStructureKHR = PFN_vkCmdCopyMemoryToAccelerationStructureKHR( + vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToAccelerationStructureKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetInstanceProcAddr( instance, "vkCmdCopyQueryPoolResults" ) ); - vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerBeginEXT" ) ); - vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerEndEXT" ) ); - vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerInsertEXT" ) ); - vkCmdDispatch = PFN_vkCmdDispatch( vkGetInstanceProcAddr( instance, "vkCmdDispatch" ) ); - vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetInstanceProcAddr( instance, "vkCmdDispatchBase" ) ); - vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetInstanceProcAddr( instance, "vkCmdDispatchBaseKHR" ) ); + vkCmdCopyQueryPoolResults = + PFN_vkCmdCopyQueryPoolResults( vkGetInstanceProcAddr( instance, "vkCmdCopyQueryPoolResults" ) ); + vkCmdDebugMarkerBeginEXT = + PFN_vkCmdDebugMarkerBeginEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerBeginEXT" ) ); + vkCmdDebugMarkerEndEXT = + PFN_vkCmdDebugMarkerEndEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerEndEXT" ) ); + vkCmdDebugMarkerInsertEXT = + PFN_vkCmdDebugMarkerInsertEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerInsertEXT" ) ); + vkCmdDispatch = PFN_vkCmdDispatch( vkGetInstanceProcAddr( instance, "vkCmdDispatch" ) ); + vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetInstanceProcAddr( instance, "vkCmdDispatchBase" ) ); + vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetInstanceProcAddr( instance, "vkCmdDispatchBaseKHR" ) ); vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetInstanceProcAddr( instance, "vkCmdDispatchIndirect" ) ); - vkCmdDraw = PFN_vkCmdDraw( vkGetInstanceProcAddr( instance, "vkCmdDraw" ) ); - vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexed" ) ); - vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirect" ) ); - vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCount" ) ); - vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountAMD" ) ); - vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountKHR" ) ); + vkCmdDraw = PFN_vkCmdDraw( vkGetInstanceProcAddr( instance, "vkCmdDraw" ) ); + vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexed" ) ); + vkCmdDrawIndexedIndirect = + PFN_vkCmdDrawIndexedIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirect" ) ); + vkCmdDrawIndexedIndirectCount = + PFN_vkCmdDrawIndexedIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCount" ) ); + vkCmdDrawIndexedIndirectCountAMD = + PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountAMD" ) ); + vkCmdDrawIndexedIndirectCountKHR = + PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountKHR" ) ); vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirect" ) ); - vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectByteCountEXT" ) ); - vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCount" ) ); - vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountAMD" ) ); - vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountKHR" ) ); - vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectCountNV" ) ); - vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectNV" ) ); + vkCmdDrawIndirectByteCountEXT = + PFN_vkCmdDrawIndirectByteCountEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectByteCountEXT" ) ); + vkCmdDrawIndirectCount = + PFN_vkCmdDrawIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCount" ) ); + vkCmdDrawIndirectCountAMD = + PFN_vkCmdDrawIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountAMD" ) ); + vkCmdDrawIndirectCountKHR = + PFN_vkCmdDrawIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountKHR" ) ); + vkCmdDrawMeshTasksIndirectCountNV = + PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectCountNV" ) ); + vkCmdDrawMeshTasksIndirectNV = + PFN_vkCmdDrawMeshTasksIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectNV" ) ); vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksNV" ) ); - vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdEndConditionalRenderingEXT" ) ); - vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdEndDebugUtilsLabelEXT" ) ); + vkCmdEndConditionalRenderingEXT = + PFN_vkCmdEndConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdEndConditionalRenderingEXT" ) ); + vkCmdEndDebugUtilsLabelEXT = + PFN_vkCmdEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdEndDebugUtilsLabelEXT" ) ); vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetInstanceProcAddr( instance, "vkCmdEndQuery" ) ); - vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdEndQueryIndexedEXT" ) ); - vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass" ) ); + vkCmdEndQueryIndexedEXT = + PFN_vkCmdEndQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdEndQueryIndexedEXT" ) ); + vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass" ) ); vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2" ) ); - vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2KHR" ) ); - vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdEndTransformFeedbackEXT" ) ); + vkCmdEndRenderPass2KHR = + PFN_vkCmdEndRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2KHR" ) ); + vkCmdEndTransformFeedbackEXT = + PFN_vkCmdEndTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdEndTransformFeedbackEXT" ) ); vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetInstanceProcAddr( instance, "vkCmdExecuteCommands" ) ); - vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdExecuteGeneratedCommandsNV" ) ); + vkCmdExecuteGeneratedCommandsNV = + PFN_vkCmdExecuteGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdExecuteGeneratedCommandsNV" ) ); vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetInstanceProcAddr( instance, "vkCmdFillBuffer" ) ); - vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdInsertDebugUtilsLabelEXT" ) ); - vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass" ) ); - vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2" ) ); + vkCmdInsertDebugUtilsLabelEXT = + PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdInsertDebugUtilsLabelEXT" ) ); + vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass" ) ); + vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2" ) ); vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2KHR" ) ); vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier" ) ); - vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdPreprocessGeneratedCommandsNV" ) ); + vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( + vkGetInstanceProcAddr( instance, "vkCmdPreprocessGeneratedCommandsNV" ) ); vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetInstanceProcAddr( instance, "vkCmdPushConstants" ) ); - vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetKHR" ) ); - vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); - vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetInstanceProcAddr( instance, "vkCmdResetEvent" ) ); + vkCmdPushDescriptorSetKHR = + PFN_vkCmdPushDescriptorSetKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetKHR" ) ); + vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR( + vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); + vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetInstanceProcAddr( instance, "vkCmdResetEvent" ) ); vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetInstanceProcAddr( instance, "vkCmdResetQueryPool" ) ); - vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetInstanceProcAddr( instance, "vkCmdResolveImage" ) ); - vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetInstanceProcAddr( instance, "vkCmdSetBlendConstants" ) ); + vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetInstanceProcAddr( instance, "vkCmdResolveImage" ) ); + vkCmdSetBlendConstants = + PFN_vkCmdSetBlendConstants( vkGetInstanceProcAddr( instance, "vkCmdSetBlendConstants" ) ); vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetInstanceProcAddr( instance, "vkCmdSetCheckpointNV" ) ); - vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoarseSampleOrderNV" ) ); - vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBias" ) ); - vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBounds" ) ); - vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMask" ) ); + vkCmdSetCoarseSampleOrderNV = + PFN_vkCmdSetCoarseSampleOrderNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoarseSampleOrderNV" ) ); + vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBias" ) ); + vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBounds" ) ); + vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMask" ) ); vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMaskKHR" ) ); - vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleEXT" ) ); + vkCmdSetDiscardRectangleEXT = + PFN_vkCmdSetDiscardRectangleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleEXT" ) ); vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetInstanceProcAddr( instance, "vkCmdSetEvent" ) ); - vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetInstanceProcAddr( instance, "vkCmdSetExclusiveScissorNV" ) ); - vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleEXT" ) ); + vkCmdSetExclusiveScissorNV = + PFN_vkCmdSetExclusiveScissorNV( vkGetInstanceProcAddr( instance, "vkCmdSetExclusiveScissorNV" ) ); + vkCmdSetLineStippleEXT = + PFN_vkCmdSetLineStippleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleEXT" ) ); vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetInstanceProcAddr( instance, "vkCmdSetLineWidth" ) ); - vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceMarkerINTEL" ) ); - vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceOverrideINTEL" ) ); - vkCmdSetPerformanceStreamMarkerINTEL = PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); - vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleLocationsEXT" ) ); + vkCmdSetPerformanceMarkerINTEL = + PFN_vkCmdSetPerformanceMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceMarkerINTEL" ) ); + vkCmdSetPerformanceOverrideINTEL = + PFN_vkCmdSetPerformanceOverrideINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceOverrideINTEL" ) ); + vkCmdSetPerformanceStreamMarkerINTEL = PFN_vkCmdSetPerformanceStreamMarkerINTEL( + vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); + vkCmdSetSampleLocationsEXT = + PFN_vkCmdSetSampleLocationsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleLocationsEXT" ) ); vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetInstanceProcAddr( instance, "vkCmdSetScissor" ) ); - vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilCompareMask" ) ); - vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetInstanceProcAddr( instance, "vkCmdSetStencilReference" ) ); - vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilWriteMask" ) ); + vkCmdSetStencilCompareMask = + PFN_vkCmdSetStencilCompareMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilCompareMask" ) ); + vkCmdSetStencilReference = + PFN_vkCmdSetStencilReference( vkGetInstanceProcAddr( instance, "vkCmdSetStencilReference" ) ); + vkCmdSetStencilWriteMask = + PFN_vkCmdSetStencilWriteMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilWriteMask" ) ); vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetInstanceProcAddr( instance, "vkCmdSetViewport" ) ); - vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportShadingRatePaletteNV" ) ); - vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWScalingNV" ) ); + vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV( + vkGetInstanceProcAddr( instance, "vkCmdSetViewportShadingRatePaletteNV" ) ); + vkCmdSetViewportWScalingNV = + PFN_vkCmdSetViewportWScalingNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWScalingNV" ) ); #ifdef VK_ENABLE_BETA_EXTENSIONS - vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysIndirectKHR" ) ); + vkCmdTraceRaysIndirectKHR = + PFN_vkCmdTraceRaysIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysIndirectKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_ENABLE_BETA_EXTENSIONS vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysNV" ) ); + vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysNV" ) ); vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetInstanceProcAddr( instance, "vkCmdUpdateBuffer" ) ); - vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents" ) ); - vkCmdWriteAccelerationStructuresPropertiesKHR = PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) ); - vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); - vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarkerAMD" ) ); - vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp" ) ); - vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetInstanceProcAddr( instance, "vkEndCommandBuffer" ) ); + vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents" ) ); + vkCmdWriteAccelerationStructuresPropertiesKHR = PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( + vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) ); + vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV( + vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); + vkCmdWriteBufferMarkerAMD = + PFN_vkCmdWriteBufferMarkerAMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarkerAMD" ) ); + vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp" ) ); + vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetInstanceProcAddr( instance, "vkEndCommandBuffer" ) ); vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetInstanceProcAddr( instance, "vkResetCommandBuffer" ) ); #ifdef VK_USE_PLATFORM_WIN32_KHR - vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkAcquireFullScreenExclusiveModeEXT" ) ); + vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( + vkGetInstanceProcAddr( instance, "vkAcquireFullScreenExclusiveModeEXT" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImage2KHR" ) ); + vkAcquireNextImage2KHR = + PFN_vkAcquireNextImage2KHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImage2KHR" ) ); vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImageKHR" ) ); - vkAcquirePerformanceConfigurationINTEL = PFN_vkAcquirePerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkAcquirePerformanceConfigurationINTEL" ) ); - vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkAcquireProfilingLockKHR" ) ); - vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetInstanceProcAddr( instance, "vkAllocateCommandBuffers" ) ); - vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetInstanceProcAddr( instance, "vkAllocateDescriptorSets" ) ); + vkAcquirePerformanceConfigurationINTEL = PFN_vkAcquirePerformanceConfigurationINTEL( + vkGetInstanceProcAddr( instance, "vkAcquirePerformanceConfigurationINTEL" ) ); + vkAcquireProfilingLockKHR = + PFN_vkAcquireProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkAcquireProfilingLockKHR" ) ); + vkAllocateCommandBuffers = + PFN_vkAllocateCommandBuffers( vkGetInstanceProcAddr( instance, "vkAllocateCommandBuffers" ) ); + vkAllocateDescriptorSets = + PFN_vkAllocateDescriptorSets( vkGetInstanceProcAddr( instance, "vkAllocateDescriptorSets" ) ); vkAllocateMemory = PFN_vkAllocateMemory( vkGetInstanceProcAddr( instance, "vkAllocateMemory" ) ); - vkBindAccelerationStructureMemoryKHR = PFN_vkBindAccelerationStructureMemoryKHR( vkGetInstanceProcAddr( instance, "vkBindAccelerationStructureMemoryKHR" ) ); - vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetInstanceProcAddr( instance, "vkBindAccelerationStructureMemoryNV" ) ); - vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetInstanceProcAddr( instance, "vkBindBufferMemory" ) ); + vkBindAccelerationStructureMemoryKHR = PFN_vkBindAccelerationStructureMemoryKHR( + vkGetInstanceProcAddr( instance, "vkBindAccelerationStructureMemoryKHR" ) ); + vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( + vkGetInstanceProcAddr( instance, "vkBindAccelerationStructureMemoryNV" ) ); + vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetInstanceProcAddr( instance, "vkBindBufferMemory" ) ); vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2" ) ); - vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2KHR" ) ); - vkBindImageMemory = PFN_vkBindImageMemory( vkGetInstanceProcAddr( instance, "vkBindImageMemory" ) ); - vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetInstanceProcAddr( instance, "vkBindImageMemory2" ) ); + vkBindBufferMemory2KHR = + PFN_vkBindBufferMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2KHR" ) ); + vkBindImageMemory = PFN_vkBindImageMemory( vkGetInstanceProcAddr( instance, "vkBindImageMemory" ) ); + vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetInstanceProcAddr( instance, "vkBindImageMemory2" ) ); vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindImageMemory2KHR" ) ); #ifdef VK_ENABLE_BETA_EXTENSIONS - vkBuildAccelerationStructureKHR = PFN_vkBuildAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkBuildAccelerationStructureKHR" ) ); + vkBuildAccelerationStructureKHR = + PFN_vkBuildAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkBuildAccelerationStructureKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetInstanceProcAddr( instance, "vkCompileDeferredNV" ) ); #ifdef VK_ENABLE_BETA_EXTENSIONS - vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureKHR" ) ); + vkCopyAccelerationStructureKHR = + PFN_vkCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - vkCopyAccelerationStructureToMemoryKHR = PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureToMemoryKHR" ) ); + vkCopyAccelerationStructureToMemoryKHR = PFN_vkCopyAccelerationStructureToMemoryKHR( + vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureToMemoryKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - vkCopyMemoryToAccelerationStructureKHR = PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyMemoryToAccelerationStructureKHR" ) ); + vkCopyMemoryToAccelerationStructureKHR = PFN_vkCopyMemoryToAccelerationStructureKHR( + vkGetInstanceProcAddr( instance, "vkCopyMemoryToAccelerationStructureKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureKHR" ) ); + vkCreateAccelerationStructureKHR = + PFN_vkCreateAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureNV" ) ); - vkCreateBuffer = PFN_vkCreateBuffer( vkGetInstanceProcAddr( instance, "vkCreateBuffer" ) ); - vkCreateBufferView = PFN_vkCreateBufferView( vkGetInstanceProcAddr( instance, "vkCreateBufferView" ) ); + vkCreateAccelerationStructureNV = + PFN_vkCreateAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureNV" ) ); + vkCreateBuffer = PFN_vkCreateBuffer( vkGetInstanceProcAddr( instance, "vkCreateBuffer" ) ); + vkCreateBufferView = PFN_vkCreateBufferView( vkGetInstanceProcAddr( instance, "vkCreateBufferView" ) ); vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetInstanceProcAddr( instance, "vkCreateCommandPool" ) ); - vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetInstanceProcAddr( instance, "vkCreateComputePipelines" ) ); + vkCreateComputePipelines = + PFN_vkCreateComputePipelines( vkGetInstanceProcAddr( instance, "vkCreateComputePipelines" ) ); #ifdef VK_ENABLE_BETA_EXTENSIONS - vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkCreateDeferredOperationKHR" ) ); + vkCreateDeferredOperationKHR = + PFN_vkCreateDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkCreateDeferredOperationKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetInstanceProcAddr( instance, "vkCreateDescriptorPool" ) ); - vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkCreateDescriptorSetLayout" ) ); - vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplate" ) ); - vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplateKHR" ) ); - vkCreateEvent = PFN_vkCreateEvent( vkGetInstanceProcAddr( instance, "vkCreateEvent" ) ); - vkCreateFence = PFN_vkCreateFence( vkGetInstanceProcAddr( instance, "vkCreateFence" ) ); + vkCreateDescriptorPool = + PFN_vkCreateDescriptorPool( vkGetInstanceProcAddr( instance, "vkCreateDescriptorPool" ) ); + vkCreateDescriptorSetLayout = + PFN_vkCreateDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkCreateDescriptorSetLayout" ) ); + vkCreateDescriptorUpdateTemplate = + PFN_vkCreateDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplate" ) ); + vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( + vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplateKHR" ) ); + vkCreateEvent = PFN_vkCreateEvent( vkGetInstanceProcAddr( instance, "vkCreateEvent" ) ); + vkCreateFence = PFN_vkCreateFence( vkGetInstanceProcAddr( instance, "vkCreateFence" ) ); vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetInstanceProcAddr( instance, "vkCreateFramebuffer" ) ); - vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetInstanceProcAddr( instance, "vkCreateGraphicsPipelines" ) ); - vkCreateImage = PFN_vkCreateImage( vkGetInstanceProcAddr( instance, "vkCreateImage" ) ); + vkCreateGraphicsPipelines = + PFN_vkCreateGraphicsPipelines( vkGetInstanceProcAddr( instance, "vkCreateGraphicsPipelines" ) ); + vkCreateImage = PFN_vkCreateImage( vkGetInstanceProcAddr( instance, "vkCreateImage" ) ); vkCreateImageView = PFN_vkCreateImageView( vkGetInstanceProcAddr( instance, "vkCreateImageView" ) ); - vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkCreateIndirectCommandsLayoutNV" ) ); + vkCreateIndirectCommandsLayoutNV = + PFN_vkCreateIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkCreateIndirectCommandsLayoutNV" ) ); vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetInstanceProcAddr( instance, "vkCreatePipelineCache" ) ); - vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetInstanceProcAddr( instance, "vkCreatePipelineLayout" ) ); + vkCreatePipelineLayout = + PFN_vkCreatePipelineLayout( vkGetInstanceProcAddr( instance, "vkCreatePipelineLayout" ) ); vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetInstanceProcAddr( instance, "vkCreateQueryPool" ) ); #ifdef VK_ENABLE_BETA_EXTENSIONS - vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesKHR" ) ); + vkCreateRayTracingPipelinesKHR = + PFN_vkCreateRayTracingPipelinesKHR( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesNV" ) ); - vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetInstanceProcAddr( instance, "vkCreateRenderPass" ) ); + vkCreateRayTracingPipelinesNV = + PFN_vkCreateRayTracingPipelinesNV( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesNV" ) ); + vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetInstanceProcAddr( instance, "vkCreateRenderPass" ) ); vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2" ) ); - vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2KHR" ) ); + vkCreateRenderPass2KHR = + PFN_vkCreateRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2KHR" ) ); vkCreateSampler = PFN_vkCreateSampler( vkGetInstanceProcAddr( instance, "vkCreateSampler" ) ); - vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversion" ) ); - vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversionKHR" ) ); - vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetInstanceProcAddr( instance, "vkCreateSemaphore" ) ); + vkCreateSamplerYcbcrConversion = + PFN_vkCreateSamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversion" ) ); + vkCreateSamplerYcbcrConversionKHR = + PFN_vkCreateSamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversionKHR" ) ); + vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetInstanceProcAddr( instance, "vkCreateSemaphore" ) ); vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetInstanceProcAddr( instance, "vkCreateShaderModule" ) ); - vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetInstanceProcAddr( instance, "vkCreateSharedSwapchainsKHR" ) ); + vkCreateSharedSwapchainsKHR = + PFN_vkCreateSharedSwapchainsKHR( vkGetInstanceProcAddr( instance, "vkCreateSharedSwapchainsKHR" ) ); vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetInstanceProcAddr( instance, "vkCreateSwapchainKHR" ) ); - vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkCreateValidationCacheEXT" ) ); - vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectNameEXT" ) ); - vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectTagEXT" ) ); + vkCreateValidationCacheEXT = + PFN_vkCreateValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkCreateValidationCacheEXT" ) ); + vkDebugMarkerSetObjectNameEXT = + PFN_vkDebugMarkerSetObjectNameEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectNameEXT" ) ); + vkDebugMarkerSetObjectTagEXT = + PFN_vkDebugMarkerSetObjectTagEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectTagEXT" ) ); #ifdef VK_ENABLE_BETA_EXTENSIONS - vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetInstanceProcAddr( instance, "vkDeferredOperationJoinKHR" ) ); + vkDeferredOperationJoinKHR = + PFN_vkDeferredOperationJoinKHR( vkGetInstanceProcAddr( instance, "vkDeferredOperationJoinKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureKHR" ) ); - vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureNV" ) ); - vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetInstanceProcAddr( instance, "vkDestroyBuffer" ) ); - vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetInstanceProcAddr( instance, "vkDestroyBufferView" ) ); + vkDestroyAccelerationStructureKHR = + PFN_vkDestroyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureKHR" ) ); + vkDestroyAccelerationStructureNV = + PFN_vkDestroyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureNV" ) ); + vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetInstanceProcAddr( instance, "vkDestroyBuffer" ) ); + vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetInstanceProcAddr( instance, "vkDestroyBufferView" ) ); vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetInstanceProcAddr( instance, "vkDestroyCommandPool" ) ); #ifdef VK_ENABLE_BETA_EXTENSIONS - vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkDestroyDeferredOperationKHR" ) ); + vkDestroyDeferredOperationKHR = + PFN_vkDestroyDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkDestroyDeferredOperationKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorPool" ) ); - vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorSetLayout" ) ); - vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplate" ) ); - vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplateKHR" ) ); - vkDestroyDevice = PFN_vkDestroyDevice( vkGetInstanceProcAddr( instance, "vkDestroyDevice" ) ); - vkDestroyEvent = PFN_vkDestroyEvent( vkGetInstanceProcAddr( instance, "vkDestroyEvent" ) ); - vkDestroyFence = PFN_vkDestroyFence( vkGetInstanceProcAddr( instance, "vkDestroyFence" ) ); + vkDestroyDescriptorPool = + PFN_vkDestroyDescriptorPool( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorPool" ) ); + vkDestroyDescriptorSetLayout = + PFN_vkDestroyDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorSetLayout" ) ); + vkDestroyDescriptorUpdateTemplate = + PFN_vkDestroyDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplate" ) ); + vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR( + vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplateKHR" ) ); + vkDestroyDevice = PFN_vkDestroyDevice( vkGetInstanceProcAddr( instance, "vkDestroyDevice" ) ); + vkDestroyEvent = PFN_vkDestroyEvent( vkGetInstanceProcAddr( instance, "vkDestroyEvent" ) ); + vkDestroyFence = PFN_vkDestroyFence( vkGetInstanceProcAddr( instance, "vkDestroyFence" ) ); vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetInstanceProcAddr( instance, "vkDestroyFramebuffer" ) ); - vkDestroyImage = PFN_vkDestroyImage( vkGetInstanceProcAddr( instance, "vkDestroyImage" ) ); - vkDestroyImageView = PFN_vkDestroyImageView( vkGetInstanceProcAddr( instance, "vkDestroyImageView" ) ); - vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkDestroyIndirectCommandsLayoutNV" ) ); + vkDestroyImage = PFN_vkDestroyImage( vkGetInstanceProcAddr( instance, "vkDestroyImage" ) ); + vkDestroyImageView = PFN_vkDestroyImageView( vkGetInstanceProcAddr( instance, "vkDestroyImageView" ) ); + vkDestroyIndirectCommandsLayoutNV = + PFN_vkDestroyIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkDestroyIndirectCommandsLayoutNV" ) ); vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetInstanceProcAddr( instance, "vkDestroyPipeline" ) ); - vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetInstanceProcAddr( instance, "vkDestroyPipelineCache" ) ); - vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetInstanceProcAddr( instance, "vkDestroyPipelineLayout" ) ); - vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetInstanceProcAddr( instance, "vkDestroyQueryPool" ) ); + vkDestroyPipelineCache = + PFN_vkDestroyPipelineCache( vkGetInstanceProcAddr( instance, "vkDestroyPipelineCache" ) ); + vkDestroyPipelineLayout = + PFN_vkDestroyPipelineLayout( vkGetInstanceProcAddr( instance, "vkDestroyPipelineLayout" ) ); + vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetInstanceProcAddr( instance, "vkDestroyQueryPool" ) ); vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetInstanceProcAddr( instance, "vkDestroyRenderPass" ) ); - vkDestroySampler = PFN_vkDestroySampler( vkGetInstanceProcAddr( instance, "vkDestroySampler" ) ); - vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversion" ) ); - vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversionKHR" ) ); - vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetInstanceProcAddr( instance, "vkDestroySemaphore" ) ); + vkDestroySampler = PFN_vkDestroySampler( vkGetInstanceProcAddr( instance, "vkDestroySampler" ) ); + vkDestroySamplerYcbcrConversion = + PFN_vkDestroySamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversion" ) ); + vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( + vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversionKHR" ) ); + vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetInstanceProcAddr( instance, "vkDestroySemaphore" ) ); vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetInstanceProcAddr( instance, "vkDestroyShaderModule" ) ); vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetInstanceProcAddr( instance, "vkDestroySwapchainKHR" ) ); - vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkDestroyValidationCacheEXT" ) ); + vkDestroyValidationCacheEXT = + PFN_vkDestroyValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkDestroyValidationCacheEXT" ) ); vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetInstanceProcAddr( instance, "vkDeviceWaitIdle" ) ); - vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetInstanceProcAddr( instance, "vkDisplayPowerControlEXT" ) ); - vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkFlushMappedMemoryRanges" ) ); + vkDisplayPowerControlEXT = + PFN_vkDisplayPowerControlEXT( vkGetInstanceProcAddr( instance, "vkDisplayPowerControlEXT" ) ); + vkFlushMappedMemoryRanges = + PFN_vkFlushMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkFlushMappedMemoryRanges" ) ); vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetInstanceProcAddr( instance, "vkFreeCommandBuffers" ) ); vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetInstanceProcAddr( instance, "vkFreeDescriptorSets" ) ); - vkFreeMemory = PFN_vkFreeMemory( vkGetInstanceProcAddr( instance, "vkFreeMemory" ) ); + vkFreeMemory = PFN_vkFreeMemory( vkGetInstanceProcAddr( instance, "vkFreeMemory" ) ); #ifdef VK_ENABLE_BETA_EXTENSIONS - vkGetAccelerationStructureDeviceAddressKHR = PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureDeviceAddressKHR" ) ); + vkGetAccelerationStructureDeviceAddressKHR = PFN_vkGetAccelerationStructureDeviceAddressKHR( + vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureDeviceAddressKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureHandleNV" ) ); + vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( + vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureHandleNV" ) ); #ifdef VK_ENABLE_BETA_EXTENSIONS - vkGetAccelerationStructureMemoryRequirementsKHR = PFN_vkGetAccelerationStructureMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureMemoryRequirementsKHR" ) ); + vkGetAccelerationStructureMemoryRequirementsKHR = PFN_vkGetAccelerationStructureMemoryRequirementsKHR( + vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureMemoryRequirementsKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkGetAccelerationStructureMemoryRequirementsNV = PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); + vkGetAccelerationStructureMemoryRequirementsNV = PFN_vkGetAccelerationStructureMemoryRequirementsNV( + vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); #ifdef VK_USE_PLATFORM_ANDROID_KHR - vkGetAndroidHardwareBufferPropertiesANDROID = PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetInstanceProcAddr( instance, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); + vkGetAndroidHardwareBufferPropertiesANDROID = PFN_vkGetAndroidHardwareBufferPropertiesANDROID( + vkGetInstanceProcAddr( instance, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddress" ) ); - vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressEXT" ) ); - vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressKHR" ) ); - vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements" ) ); - vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2" ) ); - vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2KHR" ) ); - vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddress" ) ); - vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddressKHR" ) ); - vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetInstanceProcAddr( instance, "vkGetCalibratedTimestampsEXT" ) ); + vkGetBufferDeviceAddress = + PFN_vkGetBufferDeviceAddress( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddress" ) ); + vkGetBufferDeviceAddressEXT = + PFN_vkGetBufferDeviceAddressEXT( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressEXT" ) ); + vkGetBufferDeviceAddressKHR = + PFN_vkGetBufferDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressKHR" ) ); + vkGetBufferMemoryRequirements = + PFN_vkGetBufferMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements" ) ); + vkGetBufferMemoryRequirements2 = + PFN_vkGetBufferMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2" ) ); + vkGetBufferMemoryRequirements2KHR = + PFN_vkGetBufferMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2KHR" ) ); + vkGetBufferOpaqueCaptureAddress = + PFN_vkGetBufferOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddress" ) ); + vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( + vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddressKHR" ) ); + vkGetCalibratedTimestampsEXT = + PFN_vkGetCalibratedTimestampsEXT( vkGetInstanceProcAddr( instance, "vkGetCalibratedTimestampsEXT" ) ); #ifdef VK_ENABLE_BETA_EXTENSIONS - vkGetDeferredOperationMaxConcurrencyKHR = PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); + vkGetDeferredOperationMaxConcurrencyKHR = PFN_vkGetDeferredOperationMaxConcurrencyKHR( + vkGetInstanceProcAddr( instance, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationResultKHR" ) ); + vkGetDeferredOperationResultKHR = + PFN_vkGetDeferredOperationResultKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationResultKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupport" ) ); - vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupportKHR" ) ); + vkGetDescriptorSetLayoutSupport = + PFN_vkGetDescriptorSetLayoutSupport( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupport" ) ); + vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( + vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupportKHR" ) ); #ifdef VK_ENABLE_BETA_EXTENSIONS - vkGetDeviceAccelerationStructureCompatibilityKHR = PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) ); + vkGetDeviceAccelerationStructureCompatibilityKHR = PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( + vkGetInstanceProcAddr( instance, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeatures" ) ); - vkGetDeviceGroupPeerMemoryFeaturesKHR = PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); - vkGetDeviceGroupPresentCapabilitiesKHR = PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); + vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( + vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeatures" ) ); + vkGetDeviceGroupPeerMemoryFeaturesKHR = PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( + vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); + vkGetDeviceGroupPresentCapabilitiesKHR = PFN_vkGetDeviceGroupPresentCapabilitiesKHR( + vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); #ifdef VK_USE_PLATFORM_WIN32_KHR - vkGetDeviceGroupSurfacePresentModes2EXT = PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); + vkGetDeviceGroupSurfacePresentModes2EXT = PFN_vkGetDeviceGroupSurfacePresentModes2EXT( + vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - vkGetDeviceGroupSurfacePresentModesKHR = PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); - vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryCommitment" ) ); - vkGetDeviceMemoryOpaqueCaptureAddress = PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddress" ) ); - vkGetDeviceMemoryOpaqueCaptureAddressKHR = PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) ); + vkGetDeviceGroupSurfacePresentModesKHR = PFN_vkGetDeviceGroupSurfacePresentModesKHR( + vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); + vkGetDeviceMemoryCommitment = + PFN_vkGetDeviceMemoryCommitment( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryCommitment" ) ); + vkGetDeviceMemoryOpaqueCaptureAddress = PFN_vkGetDeviceMemoryOpaqueCaptureAddress( + vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddress" ) ); + vkGetDeviceMemoryOpaqueCaptureAddressKHR = PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( + vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) ); vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetInstanceProcAddr( instance, "vkGetDeviceProcAddr" ) ); - vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue" ) ); - vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue2" ) ); - vkGetEventStatus = PFN_vkGetEventStatus( vkGetInstanceProcAddr( instance, "vkGetEventStatus" ) ); - vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetInstanceProcAddr( instance, "vkGetFenceFdKHR" ) ); - vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetInstanceProcAddr( instance, "vkGetFenceStatus" ) ); + vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue" ) ); + vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue2" ) ); + vkGetEventStatus = PFN_vkGetEventStatus( vkGetInstanceProcAddr( instance, "vkGetEventStatus" ) ); + vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetInstanceProcAddr( instance, "vkGetFenceFdKHR" ) ); + vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetInstanceProcAddr( instance, "vkGetFenceStatus" ) ); #ifdef VK_USE_PLATFORM_WIN32_KHR - vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetFenceWin32HandleKHR" ) ); + vkGetFenceWin32HandleKHR = + PFN_vkGetFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetFenceWin32HandleKHR" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - vkGetGeneratedCommandsMemoryRequirementsNV = PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); - vkGetImageDrmFormatModifierPropertiesEXT = PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); - vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements" ) ); - vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2" ) ); - vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2KHR" ) ); - vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements" ) ); - vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2" ) ); - vkGetImageSparseMemoryRequirements2KHR = PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2KHR" ) ); - vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout" ) ); - vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewAddressNVX" ) ); - vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewHandleNVX" ) ); + vkGetGeneratedCommandsMemoryRequirementsNV = PFN_vkGetGeneratedCommandsMemoryRequirementsNV( + vkGetInstanceProcAddr( instance, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); + vkGetImageDrmFormatModifierPropertiesEXT = PFN_vkGetImageDrmFormatModifierPropertiesEXT( + vkGetInstanceProcAddr( instance, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); + vkGetImageMemoryRequirements = + PFN_vkGetImageMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements" ) ); + vkGetImageMemoryRequirements2 = + PFN_vkGetImageMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2" ) ); + vkGetImageMemoryRequirements2KHR = + PFN_vkGetImageMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2KHR" ) ); + vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( + vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements" ) ); + vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( + vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2" ) ); + vkGetImageSparseMemoryRequirements2KHR = PFN_vkGetImageSparseMemoryRequirements2KHR( + vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2KHR" ) ); + vkGetImageSubresourceLayout = + PFN_vkGetImageSubresourceLayout( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout" ) ); + vkGetImageViewAddressNVX = + PFN_vkGetImageViewAddressNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewAddressNVX" ) ); + vkGetImageViewHandleNVX = + PFN_vkGetImageViewHandleNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewHandleNVX" ) ); #ifdef VK_USE_PLATFORM_ANDROID_KHR - vkGetMemoryAndroidHardwareBufferANDROID = PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetInstanceProcAddr( instance, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); + vkGetMemoryAndroidHardwareBufferANDROID = PFN_vkGetMemoryAndroidHardwareBufferANDROID( + vkGetInstanceProcAddr( instance, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdKHR" ) ); - vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdPropertiesKHR" ) ); - vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetMemoryHostPointerPropertiesEXT" ) ); + vkGetMemoryFdPropertiesKHR = + PFN_vkGetMemoryFdPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdPropertiesKHR" ) ); + vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( + vkGetInstanceProcAddr( instance, "vkGetMemoryHostPointerPropertiesEXT" ) ); #ifdef VK_USE_PLATFORM_WIN32_KHR - vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleKHR" ) ); + vkGetMemoryWin32HandleKHR = + PFN_vkGetMemoryWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleKHR" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #ifdef VK_USE_PLATFORM_WIN32_KHR - vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleNV" ) ); + vkGetMemoryWin32HandleNV = + PFN_vkGetMemoryWin32HandleNV( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleNV" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #ifdef VK_USE_PLATFORM_WIN32_KHR - vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandlePropertiesKHR" ) ); + vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandlePropertiesKHR" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetInstanceProcAddr( instance, "vkGetPastPresentationTimingGOOGLE" ) ); - vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetInstanceProcAddr( instance, "vkGetPerformanceParameterINTEL" ) ); - vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetInstanceProcAddr( instance, "vkGetPipelineCacheData" ) ); - vkGetPipelineExecutableInternalRepresentationsKHR = PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); - vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutablePropertiesKHR" ) ); - vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableStatisticsKHR" ) ); + vkGetPastPresentationTimingGOOGLE = + PFN_vkGetPastPresentationTimingGOOGLE( vkGetInstanceProcAddr( instance, "vkGetPastPresentationTimingGOOGLE" ) ); + vkGetPerformanceParameterINTEL = + PFN_vkGetPerformanceParameterINTEL( vkGetInstanceProcAddr( instance, "vkGetPerformanceParameterINTEL" ) ); + vkGetPipelineCacheData = + PFN_vkGetPipelineCacheData( vkGetInstanceProcAddr( instance, "vkGetPipelineCacheData" ) ); + vkGetPipelineExecutableInternalRepresentationsKHR = PFN_vkGetPipelineExecutableInternalRepresentationsKHR( + vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); + vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPipelineExecutablePropertiesKHR" ) ); + vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR( + vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableStatisticsKHR" ) ); vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetInstanceProcAddr( instance, "vkGetQueryPoolResults" ) ); #ifdef VK_ENABLE_BETA_EXTENSIONS - vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); + vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( + vkGetInstanceProcAddr( instance, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkGetRayTracingShaderGroupHandlesKHR = PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesKHR" ) ); - vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesNV" ) ); - vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetInstanceProcAddr( instance, "vkGetRefreshCycleDurationGOOGLE" ) ); - vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetInstanceProcAddr( instance, "vkGetRenderAreaGranularity" ) ); - vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValue" ) ); - vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValueKHR" ) ); + vkGetRayTracingShaderGroupHandlesKHR = PFN_vkGetRayTracingShaderGroupHandlesKHR( + vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesKHR" ) ); + vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( + vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesNV" ) ); + vkGetRefreshCycleDurationGOOGLE = + PFN_vkGetRefreshCycleDurationGOOGLE( vkGetInstanceProcAddr( instance, "vkGetRefreshCycleDurationGOOGLE" ) ); + vkGetRenderAreaGranularity = + PFN_vkGetRenderAreaGranularity( vkGetInstanceProcAddr( instance, "vkGetRenderAreaGranularity" ) ); + vkGetSemaphoreCounterValue = + PFN_vkGetSemaphoreCounterValue( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValue" ) ); + vkGetSemaphoreCounterValueKHR = + PFN_vkGetSemaphoreCounterValueKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValueKHR" ) ); vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreFdKHR" ) ); #ifdef VK_USE_PLATFORM_WIN32_KHR - vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreWin32HandleKHR" ) ); + vkGetSemaphoreWin32HandleKHR = + PFN_vkGetSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreWin32HandleKHR" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetInstanceProcAddr( instance, "vkGetShaderInfoAMD" ) ); - vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetInstanceProcAddr( instance, "vkGetSwapchainCounterEXT" ) ); - vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainImagesKHR" ) ); - vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainStatusKHR" ) ); - vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetInstanceProcAddr( instance, "vkGetValidationCacheDataEXT" ) ); + vkGetSwapchainCounterEXT = + PFN_vkGetSwapchainCounterEXT( vkGetInstanceProcAddr( instance, "vkGetSwapchainCounterEXT" ) ); + vkGetSwapchainImagesKHR = + PFN_vkGetSwapchainImagesKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainImagesKHR" ) ); + vkGetSwapchainStatusKHR = + PFN_vkGetSwapchainStatusKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainStatusKHR" ) ); + vkGetValidationCacheDataEXT = + PFN_vkGetValidationCacheDataEXT( vkGetInstanceProcAddr( instance, "vkGetValidationCacheDataEXT" ) ); vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetInstanceProcAddr( instance, "vkImportFenceFdKHR" ) ); #ifdef VK_USE_PLATFORM_WIN32_KHR - vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportFenceWin32HandleKHR" ) ); + vkImportFenceWin32HandleKHR = + PFN_vkImportFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportFenceWin32HandleKHR" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreFdKHR" ) ); + vkImportSemaphoreFdKHR = + PFN_vkImportSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreFdKHR" ) ); #ifdef VK_USE_PLATFORM_WIN32_KHR - vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreWin32HandleKHR" ) ); + vkImportSemaphoreWin32HandleKHR = + PFN_vkImportSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreWin32HandleKHR" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkInitializePerformanceApiINTEL" ) ); - vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkInvalidateMappedMemoryRanges" ) ); - vkMapMemory = PFN_vkMapMemory( vkGetInstanceProcAddr( instance, "vkMapMemory" ) ); + vkInitializePerformanceApiINTEL = + PFN_vkInitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkInitializePerformanceApiINTEL" ) ); + vkInvalidateMappedMemoryRanges = + PFN_vkInvalidateMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkInvalidateMappedMemoryRanges" ) ); + vkMapMemory = PFN_vkMapMemory( vkGetInstanceProcAddr( instance, "vkMapMemory" ) ); vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetInstanceProcAddr( instance, "vkMergePipelineCaches" ) ); - vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetInstanceProcAddr( instance, "vkMergeValidationCachesEXT" ) ); - vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDeviceEventEXT" ) ); - vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDisplayEventEXT" ) ); + vkMergeValidationCachesEXT = + PFN_vkMergeValidationCachesEXT( vkGetInstanceProcAddr( instance, "vkMergeValidationCachesEXT" ) ); + vkRegisterDeviceEventEXT = + PFN_vkRegisterDeviceEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDeviceEventEXT" ) ); + vkRegisterDisplayEventEXT = + PFN_vkRegisterDisplayEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDisplayEventEXT" ) ); #ifdef VK_USE_PLATFORM_WIN32_KHR - vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkReleaseFullScreenExclusiveModeEXT" ) ); + vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( + vkGetInstanceProcAddr( instance, "vkReleaseFullScreenExclusiveModeEXT" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - vkReleasePerformanceConfigurationINTEL = PFN_vkReleasePerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkReleasePerformanceConfigurationINTEL" ) ); - vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkReleaseProfilingLockKHR" ) ); - vkResetCommandPool = PFN_vkResetCommandPool( vkGetInstanceProcAddr( instance, "vkResetCommandPool" ) ); + vkReleasePerformanceConfigurationINTEL = PFN_vkReleasePerformanceConfigurationINTEL( + vkGetInstanceProcAddr( instance, "vkReleasePerformanceConfigurationINTEL" ) ); + vkReleaseProfilingLockKHR = + PFN_vkReleaseProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkReleaseProfilingLockKHR" ) ); + vkResetCommandPool = PFN_vkResetCommandPool( vkGetInstanceProcAddr( instance, "vkResetCommandPool" ) ); vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetInstanceProcAddr( instance, "vkResetDescriptorPool" ) ); - vkResetEvent = PFN_vkResetEvent( vkGetInstanceProcAddr( instance, "vkResetEvent" ) ); - vkResetFences = PFN_vkResetFences( vkGetInstanceProcAddr( instance, "vkResetFences" ) ); - vkResetQueryPool = PFN_vkResetQueryPool( vkGetInstanceProcAddr( instance, "vkResetQueryPool" ) ); - vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetInstanceProcAddr( instance, "vkResetQueryPoolEXT" ) ); - vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectNameEXT" ) ); - vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectTagEXT" ) ); - vkSetEvent = PFN_vkSetEvent( vkGetInstanceProcAddr( instance, "vkSetEvent" ) ); - vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetInstanceProcAddr( instance, "vkSetHdrMetadataEXT" ) ); + vkResetEvent = PFN_vkResetEvent( vkGetInstanceProcAddr( instance, "vkResetEvent" ) ); + vkResetFences = PFN_vkResetFences( vkGetInstanceProcAddr( instance, "vkResetFences" ) ); + vkResetQueryPool = PFN_vkResetQueryPool( vkGetInstanceProcAddr( instance, "vkResetQueryPool" ) ); + vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetInstanceProcAddr( instance, "vkResetQueryPoolEXT" ) ); + vkSetDebugUtilsObjectNameEXT = + PFN_vkSetDebugUtilsObjectNameEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectNameEXT" ) ); + vkSetDebugUtilsObjectTagEXT = + PFN_vkSetDebugUtilsObjectTagEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectTagEXT" ) ); + vkSetEvent = PFN_vkSetEvent( vkGetInstanceProcAddr( instance, "vkSetEvent" ) ); + vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetInstanceProcAddr( instance, "vkSetHdrMetadataEXT" ) ); vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetInstanceProcAddr( instance, "vkSetLocalDimmingAMD" ) ); - vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetInstanceProcAddr( instance, "vkSignalSemaphore" ) ); + vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetInstanceProcAddr( instance, "vkSignalSemaphore" ) ); vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetInstanceProcAddr( instance, "vkSignalSemaphoreKHR" ) ); - vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetInstanceProcAddr( instance, "vkTrimCommandPool" ) ); + vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetInstanceProcAddr( instance, "vkTrimCommandPool" ) ); vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetInstanceProcAddr( instance, "vkTrimCommandPoolKHR" ) ); - vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkUninitializePerformanceApiINTEL" ) ); + vkUninitializePerformanceApiINTEL = + PFN_vkUninitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkUninitializePerformanceApiINTEL" ) ); vkUnmapMemory = PFN_vkUnmapMemory( vkGetInstanceProcAddr( instance, "vkUnmapMemory" ) ); - vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplate" ) ); - vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplateKHR" ) ); - vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSets" ) ); - vkWaitForFences = PFN_vkWaitForFences( vkGetInstanceProcAddr( instance, "vkWaitForFences" ) ); - vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetInstanceProcAddr( instance, "vkWaitSemaphores" ) ); + vkUpdateDescriptorSetWithTemplate = + PFN_vkUpdateDescriptorSetWithTemplate( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplate" ) ); + vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR( + vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplateKHR" ) ); + vkUpdateDescriptorSets = + PFN_vkUpdateDescriptorSets( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSets" ) ); + vkWaitForFences = PFN_vkWaitForFences( vkGetInstanceProcAddr( instance, "vkWaitForFences" ) ); + vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetInstanceProcAddr( instance, "vkWaitSemaphores" ) ); vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetInstanceProcAddr( instance, "vkWaitSemaphoresKHR" ) ); #ifdef VK_ENABLE_BETA_EXTENSIONS - vkWriteAccelerationStructuresPropertiesKHR = PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetInstanceProcAddr( instance, "vkWriteAccelerationStructuresPropertiesKHR" ) ); + vkWriteAccelerationStructuresPropertiesKHR = PFN_vkWriteAccelerationStructuresPropertiesKHR( + vkGetInstanceProcAddr( instance, "vkWriteAccelerationStructuresPropertiesKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointDataNV" ) ); - vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueBeginDebugUtilsLabelEXT" ) ); + vkGetQueueCheckpointDataNV = + PFN_vkGetQueueCheckpointDataNV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointDataNV" ) ); + vkQueueBeginDebugUtilsLabelEXT = + PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueBeginDebugUtilsLabelEXT" ) ); vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetInstanceProcAddr( instance, "vkQueueBindSparse" ) ); - vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueEndDebugUtilsLabelEXT" ) ); - vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueInsertDebugUtilsLabelEXT" ) ); + vkQueueEndDebugUtilsLabelEXT = + PFN_vkQueueEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueEndDebugUtilsLabelEXT" ) ); + vkQueueInsertDebugUtilsLabelEXT = + PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueInsertDebugUtilsLabelEXT" ) ); vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetInstanceProcAddr( instance, "vkQueuePresentKHR" ) ); - vkQueueSetPerformanceConfigurationINTEL = PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkQueueSetPerformanceConfigurationINTEL" ) ); - vkQueueSubmit = PFN_vkQueueSubmit( vkGetInstanceProcAddr( instance, "vkQueueSubmit" ) ); + vkQueueSetPerformanceConfigurationINTEL = PFN_vkQueueSetPerformanceConfigurationINTEL( + vkGetInstanceProcAddr( instance, "vkQueueSetPerformanceConfigurationINTEL" ) ); + vkQueueSubmit = PFN_vkQueueSubmit( vkGetInstanceProcAddr( instance, "vkQueueSubmit" ) ); vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetInstanceProcAddr( instance, "vkQueueWaitIdle" ) ); } void init( VULKAN_HPP_NAMESPACE::Device deviceCpp ) VULKAN_HPP_NOEXCEPT { - VkDevice device = static_cast(deviceCpp); + VkDevice device = static_cast( deviceCpp ); vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetDeviceProcAddr( device, "vkBeginCommandBuffer" ) ); - vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) ); - vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdBeginDebugUtilsLabelEXT" ) ); + vkCmdBeginConditionalRenderingEXT = + PFN_vkCmdBeginConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) ); + vkCmdBeginDebugUtilsLabelEXT = + PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdBeginDebugUtilsLabelEXT" ) ); vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetDeviceProcAddr( device, "vkCmdBeginQuery" ) ); - vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdBeginQueryIndexedEXT" ) ); - vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass" ) ); + vkCmdBeginQueryIndexedEXT = + PFN_vkCmdBeginQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdBeginQueryIndexedEXT" ) ); + vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass" ) ); vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2" ) ); - vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2KHR" ) ); - vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdBeginTransformFeedbackEXT" ) ); + vkCmdBeginRenderPass2KHR = + PFN_vkCmdBeginRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2KHR" ) ); + vkCmdBeginTransformFeedbackEXT = + PFN_vkCmdBeginTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdBeginTransformFeedbackEXT" ) ); vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets" ) ); - vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer" ) ); - vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetDeviceProcAddr( device, "vkCmdBindPipeline" ) ); - vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetDeviceProcAddr( device, "vkCmdBindPipelineShaderGroupNV" ) ); - vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetDeviceProcAddr( device, "vkCmdBindShadingRateImageNV" ) ); - vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindTransformFeedbackBuffersEXT" ) ); + vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer" ) ); + vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetDeviceProcAddr( device, "vkCmdBindPipeline" ) ); + vkCmdBindPipelineShaderGroupNV = + PFN_vkCmdBindPipelineShaderGroupNV( vkGetDeviceProcAddr( device, "vkCmdBindPipelineShaderGroupNV" ) ); + vkCmdBindShadingRateImageNV = + PFN_vkCmdBindShadingRateImageNV( vkGetDeviceProcAddr( device, "vkCmdBindShadingRateImageNV" ) ); + vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT( + vkGetDeviceProcAddr( device, "vkCmdBindTransformFeedbackBuffersEXT" ) ); vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers" ) ); - vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetDeviceProcAddr( device, "vkCmdBlitImage" ) ); + vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetDeviceProcAddr( device, "vkCmdBlitImage" ) ); #ifdef VK_ENABLE_BETA_EXTENSIONS - vkCmdBuildAccelerationStructureIndirectKHR = PFN_vkCmdBuildAccelerationStructureIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureIndirectKHR" ) ); + vkCmdBuildAccelerationStructureIndirectKHR = PFN_vkCmdBuildAccelerationStructureIndirectKHR( + vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureIndirectKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - vkCmdBuildAccelerationStructureKHR = PFN_vkCmdBuildAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureKHR" ) ); + vkCmdBuildAccelerationStructureKHR = + PFN_vkCmdBuildAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureNV" ) ); + vkCmdBuildAccelerationStructureNV = + PFN_vkCmdBuildAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureNV" ) ); vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetDeviceProcAddr( device, "vkCmdClearAttachments" ) ); - vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetDeviceProcAddr( device, "vkCmdClearColorImage" ) ); - vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetDeviceProcAddr( device, "vkCmdClearDepthStencilImage" ) ); + vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetDeviceProcAddr( device, "vkCmdClearColorImage" ) ); + vkCmdClearDepthStencilImage = + PFN_vkCmdClearDepthStencilImage( vkGetDeviceProcAddr( device, "vkCmdClearDepthStencilImage" ) ); #ifdef VK_ENABLE_BETA_EXTENSIONS - vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureKHR" ) ); + vkCmdCopyAccelerationStructureKHR = + PFN_vkCmdCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureNV" ) ); + vkCmdCopyAccelerationStructureNV = + PFN_vkCmdCopyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureNV" ) ); #ifdef VK_ENABLE_BETA_EXTENSIONS - vkCmdCopyAccelerationStructureToMemoryKHR = PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); + vkCmdCopyAccelerationStructureToMemoryKHR = PFN_vkCmdCopyAccelerationStructureToMemoryKHR( + vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer" ) ); + vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer" ) ); vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage" ) ); - vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetDeviceProcAddr( device, "vkCmdCopyImage" ) ); + vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetDeviceProcAddr( device, "vkCmdCopyImage" ) ); vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer" ) ); #ifdef VK_ENABLE_BETA_EXTENSIONS - vkCmdCopyMemoryToAccelerationStructureKHR = PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToAccelerationStructureKHR" ) ); + vkCmdCopyMemoryToAccelerationStructureKHR = PFN_vkCmdCopyMemoryToAccelerationStructureKHR( + vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToAccelerationStructureKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetDeviceProcAddr( device, "vkCmdCopyQueryPoolResults" ) ); - vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerBeginEXT" ) ); + vkCmdCopyQueryPoolResults = + PFN_vkCmdCopyQueryPoolResults( vkGetDeviceProcAddr( device, "vkCmdCopyQueryPoolResults" ) ); + vkCmdDebugMarkerBeginEXT = + PFN_vkCmdDebugMarkerBeginEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerBeginEXT" ) ); vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerEndEXT" ) ); - vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerInsertEXT" ) ); - vkCmdDispatch = PFN_vkCmdDispatch( vkGetDeviceProcAddr( device, "vkCmdDispatch" ) ); - vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetDeviceProcAddr( device, "vkCmdDispatchBase" ) ); - vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetDeviceProcAddr( device, "vkCmdDispatchBaseKHR" ) ); + vkCmdDebugMarkerInsertEXT = + PFN_vkCmdDebugMarkerInsertEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerInsertEXT" ) ); + vkCmdDispatch = PFN_vkCmdDispatch( vkGetDeviceProcAddr( device, "vkCmdDispatch" ) ); + vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetDeviceProcAddr( device, "vkCmdDispatchBase" ) ); + vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetDeviceProcAddr( device, "vkCmdDispatchBaseKHR" ) ); vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetDeviceProcAddr( device, "vkCmdDispatchIndirect" ) ); - vkCmdDraw = PFN_vkCmdDraw( vkGetDeviceProcAddr( device, "vkCmdDraw" ) ); - vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetDeviceProcAddr( device, "vkCmdDrawIndexed" ) ); - vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirect" ) ); - vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCount" ) ); - vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountAMD" ) ); - vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountKHR" ) ); + vkCmdDraw = PFN_vkCmdDraw( vkGetDeviceProcAddr( device, "vkCmdDraw" ) ); + vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetDeviceProcAddr( device, "vkCmdDrawIndexed" ) ); + vkCmdDrawIndexedIndirect = + PFN_vkCmdDrawIndexedIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirect" ) ); + vkCmdDrawIndexedIndirectCount = + PFN_vkCmdDrawIndexedIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCount" ) ); + vkCmdDrawIndexedIndirectCountAMD = + PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountAMD" ) ); + vkCmdDrawIndexedIndirectCountKHR = + PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountKHR" ) ); vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndirect" ) ); - vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectByteCountEXT" ) ); + vkCmdDrawIndirectByteCountEXT = + PFN_vkCmdDrawIndirectByteCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectByteCountEXT" ) ); vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCount" ) ); - vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) ); - vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountKHR" ) ); - vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) ); - vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) ); + vkCmdDrawIndirectCountAMD = + PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) ); + vkCmdDrawIndirectCountKHR = + PFN_vkCmdDrawIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountKHR" ) ); + vkCmdDrawMeshTasksIndirectCountNV = + PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) ); + vkCmdDrawMeshTasksIndirectNV = + PFN_vkCmdDrawMeshTasksIndirectNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) ); vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksNV" ) ); - vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdEndConditionalRenderingEXT" ) ); - vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdEndDebugUtilsLabelEXT" ) ); - vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetDeviceProcAddr( device, "vkCmdEndQuery" ) ); + vkCmdEndConditionalRenderingEXT = + PFN_vkCmdEndConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdEndConditionalRenderingEXT" ) ); + vkCmdEndDebugUtilsLabelEXT = + PFN_vkCmdEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdEndDebugUtilsLabelEXT" ) ); + vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetDeviceProcAddr( device, "vkCmdEndQuery" ) ); vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdEndQueryIndexedEXT" ) ); - vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass" ) ); - vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2" ) ); - vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2KHR" ) ); - vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdEndTransformFeedbackEXT" ) ); + vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass" ) ); + vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2" ) ); + vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2KHR" ) ); + vkCmdEndTransformFeedbackEXT = + PFN_vkCmdEndTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdEndTransformFeedbackEXT" ) ); vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetDeviceProcAddr( device, "vkCmdExecuteCommands" ) ); - vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsNV" ) ); + vkCmdExecuteGeneratedCommandsNV = + PFN_vkCmdExecuteGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsNV" ) ); vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetDeviceProcAddr( device, "vkCmdFillBuffer" ) ); - vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) ); - vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) ); - vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2" ) ); + vkCmdInsertDebugUtilsLabelEXT = + PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) ); + vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) ); + vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2" ) ); vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2KHR" ) ); vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) ); - vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsNV" ) ); + vkCmdPreprocessGeneratedCommandsNV = + PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsNV" ) ); vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetDeviceProcAddr( device, "vkCmdPushConstants" ) ); - vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) ); - vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); - vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) ); - vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) ); - vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) ); + vkCmdPushDescriptorSetKHR = + PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) ); + vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR( + vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); + vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) ); + vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) ); + vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) ); vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetDeviceProcAddr( device, "vkCmdSetBlendConstants" ) ); - vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) ); - vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetDeviceProcAddr( device, "vkCmdSetCoarseSampleOrderNV" ) ); - vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias" ) ); - vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetDeviceProcAddr( device, "vkCmdSetDepthBounds" ) ); - vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMask" ) ); + vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) ); + vkCmdSetCoarseSampleOrderNV = + PFN_vkCmdSetCoarseSampleOrderNV( vkGetDeviceProcAddr( device, "vkCmdSetCoarseSampleOrderNV" ) ); + vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias" ) ); + vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetDeviceProcAddr( device, "vkCmdSetDepthBounds" ) ); + vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMask" ) ); vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMaskKHR" ) ); - vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) ); + vkCmdSetDiscardRectangleEXT = + PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) ); vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetDeviceProcAddr( device, "vkCmdSetEvent" ) ); - vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) ); + vkCmdSetExclusiveScissorNV = + PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) ); vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEXT" ) ); - vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetDeviceProcAddr( device, "vkCmdSetLineWidth" ) ); - vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceMarkerINTEL" ) ); - vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceOverrideINTEL" ) ); - vkCmdSetPerformanceStreamMarkerINTEL = PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); - vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) ); + vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetDeviceProcAddr( device, "vkCmdSetLineWidth" ) ); + vkCmdSetPerformanceMarkerINTEL = + PFN_vkCmdSetPerformanceMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceMarkerINTEL" ) ); + vkCmdSetPerformanceOverrideINTEL = + PFN_vkCmdSetPerformanceOverrideINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceOverrideINTEL" ) ); + vkCmdSetPerformanceStreamMarkerINTEL = PFN_vkCmdSetPerformanceStreamMarkerINTEL( + vkGetDeviceProcAddr( device, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); + vkCmdSetSampleLocationsEXT = + PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) ); vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetDeviceProcAddr( device, "vkCmdSetScissor" ) ); - vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilCompareMask" ) ); - vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetDeviceProcAddr( device, "vkCmdSetStencilReference" ) ); - vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilWriteMask" ) ); - vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetDeviceProcAddr( device, "vkCmdSetViewport" ) ); - vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportShadingRatePaletteNV" ) ); - vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) ); + vkCmdSetStencilCompareMask = + PFN_vkCmdSetStencilCompareMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilCompareMask" ) ); + vkCmdSetStencilReference = + PFN_vkCmdSetStencilReference( vkGetDeviceProcAddr( device, "vkCmdSetStencilReference" ) ); + vkCmdSetStencilWriteMask = + PFN_vkCmdSetStencilWriteMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilWriteMask" ) ); + vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetDeviceProcAddr( device, "vkCmdSetViewport" ) ); + vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV( + vkGetDeviceProcAddr( device, "vkCmdSetViewportShadingRatePaletteNV" ) ); + vkCmdSetViewportWScalingNV = + PFN_vkCmdSetViewportWScalingNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) ); #ifdef VK_ENABLE_BETA_EXTENSIONS - vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirectKHR" ) ); + vkCmdTraceRaysIndirectKHR = + PFN_vkCmdTraceRaysIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirectKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_ENABLE_BETA_EXTENSIONS vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) ); + vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) ); vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdateBuffer" ) ); - vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) ); - vkCmdWriteAccelerationStructuresPropertiesKHR = PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) ); - vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); - vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) ); - vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) ); - vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetDeviceProcAddr( device, "vkEndCommandBuffer" ) ); + vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) ); + vkCmdWriteAccelerationStructuresPropertiesKHR = PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( + vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) ); + vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV( + vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); + vkCmdWriteBufferMarkerAMD = + PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) ); + vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) ); + vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetDeviceProcAddr( device, "vkEndCommandBuffer" ) ); vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetDeviceProcAddr( device, "vkResetCommandBuffer" ) ); #ifdef VK_USE_PLATFORM_WIN32_KHR - vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkAcquireFullScreenExclusiveModeEXT" ) ); + vkAcquireFullScreenExclusiveModeEXT = + PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkAcquireFullScreenExclusiveModeEXT" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetDeviceProcAddr( device, "vkAcquireNextImage2KHR" ) ); - vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetDeviceProcAddr( device, "vkAcquireNextImageKHR" ) ); - vkAcquirePerformanceConfigurationINTEL = PFN_vkAcquirePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkAcquirePerformanceConfigurationINTEL" ) ); - vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetDeviceProcAddr( device, "vkAcquireProfilingLockKHR" ) ); - vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetDeviceProcAddr( device, "vkAllocateCommandBuffers" ) ); - vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetDeviceProcAddr( device, "vkAllocateDescriptorSets" ) ); - vkAllocateMemory = PFN_vkAllocateMemory( vkGetDeviceProcAddr( device, "vkAllocateMemory" ) ); - vkBindAccelerationStructureMemoryKHR = PFN_vkBindAccelerationStructureMemoryKHR( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryKHR" ) ); - vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryNV" ) ); - vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetDeviceProcAddr( device, "vkBindBufferMemory" ) ); - vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) ); + vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetDeviceProcAddr( device, "vkAcquireNextImageKHR" ) ); + vkAcquirePerformanceConfigurationINTEL = PFN_vkAcquirePerformanceConfigurationINTEL( + vkGetDeviceProcAddr( device, "vkAcquirePerformanceConfigurationINTEL" ) ); + vkAcquireProfilingLockKHR = + PFN_vkAcquireProfilingLockKHR( vkGetDeviceProcAddr( device, "vkAcquireProfilingLockKHR" ) ); + vkAllocateCommandBuffers = + PFN_vkAllocateCommandBuffers( vkGetDeviceProcAddr( device, "vkAllocateCommandBuffers" ) ); + vkAllocateDescriptorSets = + PFN_vkAllocateDescriptorSets( vkGetDeviceProcAddr( device, "vkAllocateDescriptorSets" ) ); + vkAllocateMemory = PFN_vkAllocateMemory( vkGetDeviceProcAddr( device, "vkAllocateMemory" ) ); + vkBindAccelerationStructureMemoryKHR = PFN_vkBindAccelerationStructureMemoryKHR( + vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryKHR" ) ); + vkBindAccelerationStructureMemoryNV = + PFN_vkBindAccelerationStructureMemoryNV( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryNV" ) ); + vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetDeviceProcAddr( device, "vkBindBufferMemory" ) ); + vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) ); vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetDeviceProcAddr( device, "vkBindBufferMemory2KHR" ) ); - vkBindImageMemory = PFN_vkBindImageMemory( vkGetDeviceProcAddr( device, "vkBindImageMemory" ) ); - vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) ); - vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetDeviceProcAddr( device, "vkBindImageMemory2KHR" ) ); + vkBindImageMemory = PFN_vkBindImageMemory( vkGetDeviceProcAddr( device, "vkBindImageMemory" ) ); + vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) ); + vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetDeviceProcAddr( device, "vkBindImageMemory2KHR" ) ); #ifdef VK_ENABLE_BETA_EXTENSIONS - vkBuildAccelerationStructureKHR = PFN_vkBuildAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkBuildAccelerationStructureKHR" ) ); + vkBuildAccelerationStructureKHR = + PFN_vkBuildAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkBuildAccelerationStructureKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetDeviceProcAddr( device, "vkCompileDeferredNV" ) ); #ifdef VK_ENABLE_BETA_EXTENSIONS - vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureKHR" ) ); + vkCopyAccelerationStructureKHR = + PFN_vkCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - vkCopyAccelerationStructureToMemoryKHR = PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureToMemoryKHR" ) ); + vkCopyAccelerationStructureToMemoryKHR = PFN_vkCopyAccelerationStructureToMemoryKHR( + vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureToMemoryKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - vkCopyMemoryToAccelerationStructureKHR = PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyMemoryToAccelerationStructureKHR" ) ); + vkCopyMemoryToAccelerationStructureKHR = PFN_vkCopyMemoryToAccelerationStructureKHR( + vkGetDeviceProcAddr( device, "vkCopyMemoryToAccelerationStructureKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureKHR" ) ); + vkCreateAccelerationStructureKHR = + PFN_vkCreateAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureNV" ) ); - vkCreateBuffer = PFN_vkCreateBuffer( vkGetDeviceProcAddr( device, "vkCreateBuffer" ) ); - vkCreateBufferView = PFN_vkCreateBufferView( vkGetDeviceProcAddr( device, "vkCreateBufferView" ) ); + vkCreateAccelerationStructureNV = + PFN_vkCreateAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureNV" ) ); + vkCreateBuffer = PFN_vkCreateBuffer( vkGetDeviceProcAddr( device, "vkCreateBuffer" ) ); + vkCreateBufferView = PFN_vkCreateBufferView( vkGetDeviceProcAddr( device, "vkCreateBufferView" ) ); vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetDeviceProcAddr( device, "vkCreateCommandPool" ) ); - vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetDeviceProcAddr( device, "vkCreateComputePipelines" ) ); + vkCreateComputePipelines = + PFN_vkCreateComputePipelines( vkGetDeviceProcAddr( device, "vkCreateComputePipelines" ) ); #ifdef VK_ENABLE_BETA_EXTENSIONS - vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkCreateDeferredOperationKHR" ) ); + vkCreateDeferredOperationKHR = + PFN_vkCreateDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkCreateDeferredOperationKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetDeviceProcAddr( device, "vkCreateDescriptorPool" ) ); - vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkCreateDescriptorSetLayout" ) ); - vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplate" ) ); - vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplateKHR" ) ); - vkCreateEvent = PFN_vkCreateEvent( vkGetDeviceProcAddr( device, "vkCreateEvent" ) ); - vkCreateFence = PFN_vkCreateFence( vkGetDeviceProcAddr( device, "vkCreateFence" ) ); + vkCreateDescriptorSetLayout = + PFN_vkCreateDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkCreateDescriptorSetLayout" ) ); + vkCreateDescriptorUpdateTemplate = + PFN_vkCreateDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplate" ) ); + vkCreateDescriptorUpdateTemplateKHR = + PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplateKHR" ) ); + vkCreateEvent = PFN_vkCreateEvent( vkGetDeviceProcAddr( device, "vkCreateEvent" ) ); + vkCreateFence = PFN_vkCreateFence( vkGetDeviceProcAddr( device, "vkCreateFence" ) ); vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetDeviceProcAddr( device, "vkCreateFramebuffer" ) ); - vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetDeviceProcAddr( device, "vkCreateGraphicsPipelines" ) ); - vkCreateImage = PFN_vkCreateImage( vkGetDeviceProcAddr( device, "vkCreateImage" ) ); + vkCreateGraphicsPipelines = + PFN_vkCreateGraphicsPipelines( vkGetDeviceProcAddr( device, "vkCreateGraphicsPipelines" ) ); + vkCreateImage = PFN_vkCreateImage( vkGetDeviceProcAddr( device, "vkCreateImage" ) ); vkCreateImageView = PFN_vkCreateImageView( vkGetDeviceProcAddr( device, "vkCreateImageView" ) ); - vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNV" ) ); - vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetDeviceProcAddr( device, "vkCreatePipelineCache" ) ); + vkCreateIndirectCommandsLayoutNV = + PFN_vkCreateIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNV" ) ); + vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetDeviceProcAddr( device, "vkCreatePipelineCache" ) ); vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetDeviceProcAddr( device, "vkCreatePipelineLayout" ) ); - vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetDeviceProcAddr( device, "vkCreateQueryPool" ) ); + vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetDeviceProcAddr( device, "vkCreateQueryPool" ) ); #ifdef VK_ENABLE_BETA_EXTENSIONS - vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesKHR" ) ); + vkCreateRayTracingPipelinesKHR = + PFN_vkCreateRayTracingPipelinesKHR( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesNV" ) ); - vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetDeviceProcAddr( device, "vkCreateRenderPass" ) ); - vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetDeviceProcAddr( device, "vkCreateRenderPass2" ) ); + vkCreateRayTracingPipelinesNV = + PFN_vkCreateRayTracingPipelinesNV( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesNV" ) ); + vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetDeviceProcAddr( device, "vkCreateRenderPass" ) ); + vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetDeviceProcAddr( device, "vkCreateRenderPass2" ) ); vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCreateRenderPass2KHR" ) ); - vkCreateSampler = PFN_vkCreateSampler( vkGetDeviceProcAddr( device, "vkCreateSampler" ) ); - vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversion" ) ); - vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversionKHR" ) ); - vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetDeviceProcAddr( device, "vkCreateSemaphore" ) ); + vkCreateSampler = PFN_vkCreateSampler( vkGetDeviceProcAddr( device, "vkCreateSampler" ) ); + vkCreateSamplerYcbcrConversion = + PFN_vkCreateSamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversion" ) ); + vkCreateSamplerYcbcrConversionKHR = + PFN_vkCreateSamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversionKHR" ) ); + vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetDeviceProcAddr( device, "vkCreateSemaphore" ) ); vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetDeviceProcAddr( device, "vkCreateShaderModule" ) ); - vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetDeviceProcAddr( device, "vkCreateSharedSwapchainsKHR" ) ); + vkCreateSharedSwapchainsKHR = + PFN_vkCreateSharedSwapchainsKHR( vkGetDeviceProcAddr( device, "vkCreateSharedSwapchainsKHR" ) ); vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetDeviceProcAddr( device, "vkCreateSwapchainKHR" ) ); - vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetDeviceProcAddr( device, "vkCreateValidationCacheEXT" ) ); - vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectNameEXT" ) ); - vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectTagEXT" ) ); + vkCreateValidationCacheEXT = + PFN_vkCreateValidationCacheEXT( vkGetDeviceProcAddr( device, "vkCreateValidationCacheEXT" ) ); + vkDebugMarkerSetObjectNameEXT = + PFN_vkDebugMarkerSetObjectNameEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectNameEXT" ) ); + vkDebugMarkerSetObjectTagEXT = + PFN_vkDebugMarkerSetObjectTagEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectTagEXT" ) ); #ifdef VK_ENABLE_BETA_EXTENSIONS - vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetDeviceProcAddr( device, "vkDeferredOperationJoinKHR" ) ); + vkDeferredOperationJoinKHR = + PFN_vkDeferredOperationJoinKHR( vkGetDeviceProcAddr( device, "vkDeferredOperationJoinKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureKHR" ) ); - vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureNV" ) ); - vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetDeviceProcAddr( device, "vkDestroyBuffer" ) ); - vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetDeviceProcAddr( device, "vkDestroyBufferView" ) ); + vkDestroyAccelerationStructureKHR = + PFN_vkDestroyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureKHR" ) ); + vkDestroyAccelerationStructureNV = + PFN_vkDestroyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureNV" ) ); + vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetDeviceProcAddr( device, "vkDestroyBuffer" ) ); + vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetDeviceProcAddr( device, "vkDestroyBufferView" ) ); vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetDeviceProcAddr( device, "vkDestroyCommandPool" ) ); #ifdef VK_ENABLE_BETA_EXTENSIONS - vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkDestroyDeferredOperationKHR" ) ); + vkDestroyDeferredOperationKHR = + PFN_vkDestroyDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkDestroyDeferredOperationKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetDeviceProcAddr( device, "vkDestroyDescriptorPool" ) ); - vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkDestroyDescriptorSetLayout" ) ); - vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplate" ) ); - vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplateKHR" ) ); - vkDestroyDevice = PFN_vkDestroyDevice( vkGetDeviceProcAddr( device, "vkDestroyDevice" ) ); - vkDestroyEvent = PFN_vkDestroyEvent( vkGetDeviceProcAddr( device, "vkDestroyEvent" ) ); - vkDestroyFence = PFN_vkDestroyFence( vkGetDeviceProcAddr( device, "vkDestroyFence" ) ); + vkDestroyDescriptorSetLayout = + PFN_vkDestroyDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkDestroyDescriptorSetLayout" ) ); + vkDestroyDescriptorUpdateTemplate = + PFN_vkDestroyDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplate" ) ); + vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR( + vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplateKHR" ) ); + vkDestroyDevice = PFN_vkDestroyDevice( vkGetDeviceProcAddr( device, "vkDestroyDevice" ) ); + vkDestroyEvent = PFN_vkDestroyEvent( vkGetDeviceProcAddr( device, "vkDestroyEvent" ) ); + vkDestroyFence = PFN_vkDestroyFence( vkGetDeviceProcAddr( device, "vkDestroyFence" ) ); vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetDeviceProcAddr( device, "vkDestroyFramebuffer" ) ); - vkDestroyImage = PFN_vkDestroyImage( vkGetDeviceProcAddr( device, "vkDestroyImage" ) ); - vkDestroyImageView = PFN_vkDestroyImageView( vkGetDeviceProcAddr( device, "vkDestroyImageView" ) ); - vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNV" ) ); - vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetDeviceProcAddr( device, "vkDestroyPipeline" ) ); - vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetDeviceProcAddr( device, "vkDestroyPipelineCache" ) ); + vkDestroyImage = PFN_vkDestroyImage( vkGetDeviceProcAddr( device, "vkDestroyImage" ) ); + vkDestroyImageView = PFN_vkDestroyImageView( vkGetDeviceProcAddr( device, "vkDestroyImageView" ) ); + vkDestroyIndirectCommandsLayoutNV = + PFN_vkDestroyIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNV" ) ); + vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetDeviceProcAddr( device, "vkDestroyPipeline" ) ); + vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetDeviceProcAddr( device, "vkDestroyPipelineCache" ) ); vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetDeviceProcAddr( device, "vkDestroyPipelineLayout" ) ); - vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetDeviceProcAddr( device, "vkDestroyQueryPool" ) ); - vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetDeviceProcAddr( device, "vkDestroyRenderPass" ) ); - vkDestroySampler = PFN_vkDestroySampler( vkGetDeviceProcAddr( device, "vkDestroySampler" ) ); - vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversion" ) ); - vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversionKHR" ) ); - vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetDeviceProcAddr( device, "vkDestroySemaphore" ) ); + vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetDeviceProcAddr( device, "vkDestroyQueryPool" ) ); + vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetDeviceProcAddr( device, "vkDestroyRenderPass" ) ); + vkDestroySampler = PFN_vkDestroySampler( vkGetDeviceProcAddr( device, "vkDestroySampler" ) ); + vkDestroySamplerYcbcrConversion = + PFN_vkDestroySamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversion" ) ); + vkDestroySamplerYcbcrConversionKHR = + PFN_vkDestroySamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversionKHR" ) ); + vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetDeviceProcAddr( device, "vkDestroySemaphore" ) ); vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetDeviceProcAddr( device, "vkDestroyShaderModule" ) ); vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetDeviceProcAddr( device, "vkDestroySwapchainKHR" ) ); - vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetDeviceProcAddr( device, "vkDestroyValidationCacheEXT" ) ); + vkDestroyValidationCacheEXT = + PFN_vkDestroyValidationCacheEXT( vkGetDeviceProcAddr( device, "vkDestroyValidationCacheEXT" ) ); vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetDeviceProcAddr( device, "vkDeviceWaitIdle" ) ); - vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetDeviceProcAddr( device, "vkDisplayPowerControlEXT" ) ); - vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkFlushMappedMemoryRanges" ) ); + vkDisplayPowerControlEXT = + PFN_vkDisplayPowerControlEXT( vkGetDeviceProcAddr( device, "vkDisplayPowerControlEXT" ) ); + vkFlushMappedMemoryRanges = + PFN_vkFlushMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkFlushMappedMemoryRanges" ) ); vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetDeviceProcAddr( device, "vkFreeCommandBuffers" ) ); vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetDeviceProcAddr( device, "vkFreeDescriptorSets" ) ); - vkFreeMemory = PFN_vkFreeMemory( vkGetDeviceProcAddr( device, "vkFreeMemory" ) ); + vkFreeMemory = PFN_vkFreeMemory( vkGetDeviceProcAddr( device, "vkFreeMemory" ) ); #ifdef VK_ENABLE_BETA_EXTENSIONS - vkGetAccelerationStructureDeviceAddressKHR = PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureDeviceAddressKHR" ) ); + vkGetAccelerationStructureDeviceAddressKHR = PFN_vkGetAccelerationStructureDeviceAddressKHR( + vkGetDeviceProcAddr( device, "vkGetAccelerationStructureDeviceAddressKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureHandleNV" ) ); + vkGetAccelerationStructureHandleNV = + PFN_vkGetAccelerationStructureHandleNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureHandleNV" ) ); #ifdef VK_ENABLE_BETA_EXTENSIONS - vkGetAccelerationStructureMemoryRequirementsKHR = PFN_vkGetAccelerationStructureMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsKHR" ) ); + vkGetAccelerationStructureMemoryRequirementsKHR = PFN_vkGetAccelerationStructureMemoryRequirementsKHR( + vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkGetAccelerationStructureMemoryRequirementsNV = PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); + vkGetAccelerationStructureMemoryRequirementsNV = PFN_vkGetAccelerationStructureMemoryRequirementsNV( + vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); #ifdef VK_USE_PLATFORM_ANDROID_KHR - vkGetAndroidHardwareBufferPropertiesANDROID = PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetDeviceProcAddr( device, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); + vkGetAndroidHardwareBufferPropertiesANDROID = PFN_vkGetAndroidHardwareBufferPropertiesANDROID( + vkGetDeviceProcAddr( device, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddress" ) ); - vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressEXT" ) ); - vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressKHR" ) ); - vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements" ) ); - vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2" ) ); - vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2KHR" ) ); - vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddress" ) ); - vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddressKHR" ) ); - vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) ); + vkGetBufferDeviceAddress = + PFN_vkGetBufferDeviceAddress( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddress" ) ); + vkGetBufferDeviceAddressEXT = + PFN_vkGetBufferDeviceAddressEXT( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressEXT" ) ); + vkGetBufferDeviceAddressKHR = + PFN_vkGetBufferDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressKHR" ) ); + vkGetBufferMemoryRequirements = + PFN_vkGetBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements" ) ); + vkGetBufferMemoryRequirements2 = + PFN_vkGetBufferMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2" ) ); + vkGetBufferMemoryRequirements2KHR = + PFN_vkGetBufferMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2KHR" ) ); + vkGetBufferOpaqueCaptureAddress = + PFN_vkGetBufferOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddress" ) ); + vkGetBufferOpaqueCaptureAddressKHR = + PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddressKHR" ) ); + vkGetCalibratedTimestampsEXT = + PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) ); #ifdef VK_ENABLE_BETA_EXTENSIONS - vkGetDeferredOperationMaxConcurrencyKHR = PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); + vkGetDeferredOperationMaxConcurrencyKHR = PFN_vkGetDeferredOperationMaxConcurrencyKHR( + vkGetDeviceProcAddr( device, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationResultKHR" ) ); + vkGetDeferredOperationResultKHR = + PFN_vkGetDeferredOperationResultKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationResultKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) ); - vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupportKHR" ) ); + vkGetDescriptorSetLayoutSupport = + PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) ); + vkGetDescriptorSetLayoutSupportKHR = + PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupportKHR" ) ); #ifdef VK_ENABLE_BETA_EXTENSIONS - vkGetDeviceAccelerationStructureCompatibilityKHR = PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetDeviceProcAddr( device, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) ); + vkGetDeviceAccelerationStructureCompatibilityKHR = PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( + vkGetDeviceProcAddr( device, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeatures" ) ); - vkGetDeviceGroupPeerMemoryFeaturesKHR = PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); - vkGetDeviceGroupPresentCapabilitiesKHR = PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); + vkGetDeviceGroupPeerMemoryFeatures = + PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeatures" ) ); + vkGetDeviceGroupPeerMemoryFeaturesKHR = PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( + vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); + vkGetDeviceGroupPresentCapabilitiesKHR = PFN_vkGetDeviceGroupPresentCapabilitiesKHR( + vkGetDeviceProcAddr( device, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); #ifdef VK_USE_PLATFORM_WIN32_KHR - vkGetDeviceGroupSurfacePresentModes2EXT = PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); + vkGetDeviceGroupSurfacePresentModes2EXT = PFN_vkGetDeviceGroupSurfacePresentModes2EXT( + vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - vkGetDeviceGroupSurfacePresentModesKHR = PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); - vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryCommitment" ) ); - vkGetDeviceMemoryOpaqueCaptureAddress = PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddress" ) ); - vkGetDeviceMemoryOpaqueCaptureAddressKHR = PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) ); + vkGetDeviceGroupSurfacePresentModesKHR = PFN_vkGetDeviceGroupSurfacePresentModesKHR( + vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); + vkGetDeviceMemoryCommitment = + PFN_vkGetDeviceMemoryCommitment( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryCommitment" ) ); + vkGetDeviceMemoryOpaqueCaptureAddress = PFN_vkGetDeviceMemoryOpaqueCaptureAddress( + vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddress" ) ); + vkGetDeviceMemoryOpaqueCaptureAddressKHR = PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( + vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) ); vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetDeviceProcAddr( device, "vkGetDeviceProcAddr" ) ); - vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetDeviceProcAddr( device, "vkGetDeviceQueue" ) ); - vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetDeviceProcAddr( device, "vkGetDeviceQueue2" ) ); - vkGetEventStatus = PFN_vkGetEventStatus( vkGetDeviceProcAddr( device, "vkGetEventStatus" ) ); - vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetDeviceProcAddr( device, "vkGetFenceFdKHR" ) ); - vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetDeviceProcAddr( device, "vkGetFenceStatus" ) ); + vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetDeviceProcAddr( device, "vkGetDeviceQueue" ) ); + vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetDeviceProcAddr( device, "vkGetDeviceQueue2" ) ); + vkGetEventStatus = PFN_vkGetEventStatus( vkGetDeviceProcAddr( device, "vkGetEventStatus" ) ); + vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetDeviceProcAddr( device, "vkGetFenceFdKHR" ) ); + vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetDeviceProcAddr( device, "vkGetFenceStatus" ) ); #ifdef VK_USE_PLATFORM_WIN32_KHR - vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetFenceWin32HandleKHR" ) ); + vkGetFenceWin32HandleKHR = + PFN_vkGetFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetFenceWin32HandleKHR" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - vkGetGeneratedCommandsMemoryRequirementsNV = PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); - vkGetImageDrmFormatModifierPropertiesEXT = PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); - vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements" ) ); - vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2" ) ); - vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2KHR" ) ); - vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements" ) ); - vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2" ) ); - vkGetImageSparseMemoryRequirements2KHR = PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2KHR" ) ); - vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout" ) ); - vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) ); + vkGetGeneratedCommandsMemoryRequirementsNV = PFN_vkGetGeneratedCommandsMemoryRequirementsNV( + vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); + vkGetImageDrmFormatModifierPropertiesEXT = PFN_vkGetImageDrmFormatModifierPropertiesEXT( + vkGetDeviceProcAddr( device, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); + vkGetImageMemoryRequirements = + PFN_vkGetImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements" ) ); + vkGetImageMemoryRequirements2 = + PFN_vkGetImageMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2" ) ); + vkGetImageMemoryRequirements2KHR = + PFN_vkGetImageMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2KHR" ) ); + vkGetImageSparseMemoryRequirements = + PFN_vkGetImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements" ) ); + vkGetImageSparseMemoryRequirements2 = + PFN_vkGetImageSparseMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2" ) ); + vkGetImageSparseMemoryRequirements2KHR = PFN_vkGetImageSparseMemoryRequirements2KHR( + vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2KHR" ) ); + vkGetImageSubresourceLayout = + PFN_vkGetImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout" ) ); + vkGetImageViewAddressNVX = + PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) ); vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) ); #ifdef VK_USE_PLATFORM_ANDROID_KHR - vkGetMemoryAndroidHardwareBufferANDROID = PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); + vkGetMemoryAndroidHardwareBufferANDROID = PFN_vkGetMemoryAndroidHardwareBufferANDROID( + vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdKHR" ) ); - vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdPropertiesKHR" ) ); - vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetMemoryHostPointerPropertiesEXT" ) ); + vkGetMemoryFdPropertiesKHR = + PFN_vkGetMemoryFdPropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdPropertiesKHR" ) ); + vkGetMemoryHostPointerPropertiesEXT = + PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetMemoryHostPointerPropertiesEXT" ) ); #ifdef VK_USE_PLATFORM_WIN32_KHR - vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleKHR" ) ); + vkGetMemoryWin32HandleKHR = + PFN_vkGetMemoryWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleKHR" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #ifdef VK_USE_PLATFORM_WIN32_KHR - vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleNV" ) ); + vkGetMemoryWin32HandleNV = + PFN_vkGetMemoryWin32HandleNV( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleNV" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #ifdef VK_USE_PLATFORM_WIN32_KHR - vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandlePropertiesKHR" ) ); + vkGetMemoryWin32HandlePropertiesKHR = + PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandlePropertiesKHR" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetDeviceProcAddr( device, "vkGetPastPresentationTimingGOOGLE" ) ); - vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetDeviceProcAddr( device, "vkGetPerformanceParameterINTEL" ) ); + vkGetPastPresentationTimingGOOGLE = + PFN_vkGetPastPresentationTimingGOOGLE( vkGetDeviceProcAddr( device, "vkGetPastPresentationTimingGOOGLE" ) ); + vkGetPerformanceParameterINTEL = + PFN_vkGetPerformanceParameterINTEL( vkGetDeviceProcAddr( device, "vkGetPerformanceParameterINTEL" ) ); vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetDeviceProcAddr( device, "vkGetPipelineCacheData" ) ); - vkGetPipelineExecutableInternalRepresentationsKHR = PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); - vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutablePropertiesKHR" ) ); - vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableStatisticsKHR" ) ); + vkGetPipelineExecutableInternalRepresentationsKHR = PFN_vkGetPipelineExecutableInternalRepresentationsKHR( + vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); + vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR( + vkGetDeviceProcAddr( device, "vkGetPipelineExecutablePropertiesKHR" ) ); + vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR( + vkGetDeviceProcAddr( device, "vkGetPipelineExecutableStatisticsKHR" ) ); vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetDeviceProcAddr( device, "vkGetQueryPoolResults" ) ); #ifdef VK_ENABLE_BETA_EXTENSIONS - vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); + vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( + vkGetDeviceProcAddr( device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkGetRayTracingShaderGroupHandlesKHR = PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesKHR" ) ); - vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) ); - vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetDeviceProcAddr( device, "vkGetRefreshCycleDurationGOOGLE" ) ); - vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderAreaGranularity" ) ); - vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValue" ) ); - vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValueKHR" ) ); + vkGetRayTracingShaderGroupHandlesKHR = PFN_vkGetRayTracingShaderGroupHandlesKHR( + vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesKHR" ) ); + vkGetRayTracingShaderGroupHandlesNV = + PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) ); + vkGetRefreshCycleDurationGOOGLE = + PFN_vkGetRefreshCycleDurationGOOGLE( vkGetDeviceProcAddr( device, "vkGetRefreshCycleDurationGOOGLE" ) ); + vkGetRenderAreaGranularity = + PFN_vkGetRenderAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderAreaGranularity" ) ); + vkGetSemaphoreCounterValue = + PFN_vkGetSemaphoreCounterValue( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValue" ) ); + vkGetSemaphoreCounterValueKHR = + PFN_vkGetSemaphoreCounterValueKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValueKHR" ) ); vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreFdKHR" ) ); #ifdef VK_USE_PLATFORM_WIN32_KHR - vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreWin32HandleKHR" ) ); + vkGetSemaphoreWin32HandleKHR = + PFN_vkGetSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreWin32HandleKHR" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetDeviceProcAddr( device, "vkGetShaderInfoAMD" ) ); - vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetDeviceProcAddr( device, "vkGetSwapchainCounterEXT" ) ); + vkGetSwapchainCounterEXT = + PFN_vkGetSwapchainCounterEXT( vkGetDeviceProcAddr( device, "vkGetSwapchainCounterEXT" ) ); vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainImagesKHR" ) ); vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainStatusKHR" ) ); - vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetDeviceProcAddr( device, "vkGetValidationCacheDataEXT" ) ); + vkGetValidationCacheDataEXT = + PFN_vkGetValidationCacheDataEXT( vkGetDeviceProcAddr( device, "vkGetValidationCacheDataEXT" ) ); vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetDeviceProcAddr( device, "vkImportFenceFdKHR" ) ); #ifdef VK_USE_PLATFORM_WIN32_KHR - vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportFenceWin32HandleKHR" ) ); + vkImportFenceWin32HandleKHR = + PFN_vkImportFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportFenceWin32HandleKHR" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreFdKHR" ) ); #ifdef VK_USE_PLATFORM_WIN32_KHR - vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreWin32HandleKHR" ) ); + vkImportSemaphoreWin32HandleKHR = + PFN_vkImportSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreWin32HandleKHR" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkInitializePerformanceApiINTEL" ) ); - vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkInvalidateMappedMemoryRanges" ) ); - vkMapMemory = PFN_vkMapMemory( vkGetDeviceProcAddr( device, "vkMapMemory" ) ); + vkInitializePerformanceApiINTEL = + PFN_vkInitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkInitializePerformanceApiINTEL" ) ); + vkInvalidateMappedMemoryRanges = + PFN_vkInvalidateMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkInvalidateMappedMemoryRanges" ) ); + vkMapMemory = PFN_vkMapMemory( vkGetDeviceProcAddr( device, "vkMapMemory" ) ); vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetDeviceProcAddr( device, "vkMergePipelineCaches" ) ); - vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetDeviceProcAddr( device, "vkMergeValidationCachesEXT" ) ); - vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) ); - vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) ); + vkMergeValidationCachesEXT = + PFN_vkMergeValidationCachesEXT( vkGetDeviceProcAddr( device, "vkMergeValidationCachesEXT" ) ); + vkRegisterDeviceEventEXT = + PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) ); + vkRegisterDisplayEventEXT = + PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) ); #ifdef VK_USE_PLATFORM_WIN32_KHR - vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) ); + vkReleaseFullScreenExclusiveModeEXT = + PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - vkReleasePerformanceConfigurationINTEL = PFN_vkReleasePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkReleasePerformanceConfigurationINTEL" ) ); - vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetDeviceProcAddr( device, "vkReleaseProfilingLockKHR" ) ); - vkResetCommandPool = PFN_vkResetCommandPool( vkGetDeviceProcAddr( device, "vkResetCommandPool" ) ); + vkReleasePerformanceConfigurationINTEL = PFN_vkReleasePerformanceConfigurationINTEL( + vkGetDeviceProcAddr( device, "vkReleasePerformanceConfigurationINTEL" ) ); + vkReleaseProfilingLockKHR = + PFN_vkReleaseProfilingLockKHR( vkGetDeviceProcAddr( device, "vkReleaseProfilingLockKHR" ) ); + vkResetCommandPool = PFN_vkResetCommandPool( vkGetDeviceProcAddr( device, "vkResetCommandPool" ) ); vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetDeviceProcAddr( device, "vkResetDescriptorPool" ) ); - vkResetEvent = PFN_vkResetEvent( vkGetDeviceProcAddr( device, "vkResetEvent" ) ); - vkResetFences = PFN_vkResetFences( vkGetDeviceProcAddr( device, "vkResetFences" ) ); - vkResetQueryPool = PFN_vkResetQueryPool( vkGetDeviceProcAddr( device, "vkResetQueryPool" ) ); - vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetDeviceProcAddr( device, "vkResetQueryPoolEXT" ) ); - vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectNameEXT" ) ); - vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectTagEXT" ) ); - vkSetEvent = PFN_vkSetEvent( vkGetDeviceProcAddr( device, "vkSetEvent" ) ); - vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetDeviceProcAddr( device, "vkSetHdrMetadataEXT" ) ); + vkResetEvent = PFN_vkResetEvent( vkGetDeviceProcAddr( device, "vkResetEvent" ) ); + vkResetFences = PFN_vkResetFences( vkGetDeviceProcAddr( device, "vkResetFences" ) ); + vkResetQueryPool = PFN_vkResetQueryPool( vkGetDeviceProcAddr( device, "vkResetQueryPool" ) ); + vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetDeviceProcAddr( device, "vkResetQueryPoolEXT" ) ); + vkSetDebugUtilsObjectNameEXT = + PFN_vkSetDebugUtilsObjectNameEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectNameEXT" ) ); + vkSetDebugUtilsObjectTagEXT = + PFN_vkSetDebugUtilsObjectTagEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectTagEXT" ) ); + vkSetEvent = PFN_vkSetEvent( vkGetDeviceProcAddr( device, "vkSetEvent" ) ); + vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetDeviceProcAddr( device, "vkSetHdrMetadataEXT" ) ); vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetDeviceProcAddr( device, "vkSetLocalDimmingAMD" ) ); - vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetDeviceProcAddr( device, "vkSignalSemaphore" ) ); + vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetDeviceProcAddr( device, "vkSignalSemaphore" ) ); vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetDeviceProcAddr( device, "vkSignalSemaphoreKHR" ) ); - vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetDeviceProcAddr( device, "vkTrimCommandPool" ) ); + vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetDeviceProcAddr( device, "vkTrimCommandPool" ) ); vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetDeviceProcAddr( device, "vkTrimCommandPoolKHR" ) ); - vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkUninitializePerformanceApiINTEL" ) ); + vkUninitializePerformanceApiINTEL = + PFN_vkUninitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkUninitializePerformanceApiINTEL" ) ); vkUnmapMemory = PFN_vkUnmapMemory( vkGetDeviceProcAddr( device, "vkUnmapMemory" ) ); - vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) ); - vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplateKHR" ) ); + vkUpdateDescriptorSetWithTemplate = + PFN_vkUpdateDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) ); + vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR( + vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplateKHR" ) ); vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSets" ) ); - vkWaitForFences = PFN_vkWaitForFences( vkGetDeviceProcAddr( device, "vkWaitForFences" ) ); - vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetDeviceProcAddr( device, "vkWaitSemaphores" ) ); - vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetDeviceProcAddr( device, "vkWaitSemaphoresKHR" ) ); + vkWaitForFences = PFN_vkWaitForFences( vkGetDeviceProcAddr( device, "vkWaitForFences" ) ); + vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetDeviceProcAddr( device, "vkWaitSemaphores" ) ); + vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetDeviceProcAddr( device, "vkWaitSemaphoresKHR" ) ); #ifdef VK_ENABLE_BETA_EXTENSIONS - vkWriteAccelerationStructuresPropertiesKHR = PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkWriteAccelerationStructuresPropertiesKHR" ) ); + vkWriteAccelerationStructuresPropertiesKHR = PFN_vkWriteAccelerationStructuresPropertiesKHR( + vkGetDeviceProcAddr( device, "vkWriteAccelerationStructuresPropertiesKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) ); - vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueBeginDebugUtilsLabelEXT" ) ); + vkGetQueueCheckpointDataNV = + PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) ); + vkQueueBeginDebugUtilsLabelEXT = + PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueBeginDebugUtilsLabelEXT" ) ); vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetDeviceProcAddr( device, "vkQueueBindSparse" ) ); - vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueEndDebugUtilsLabelEXT" ) ); - vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueInsertDebugUtilsLabelEXT" ) ); + vkQueueEndDebugUtilsLabelEXT = + PFN_vkQueueEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueEndDebugUtilsLabelEXT" ) ); + vkQueueInsertDebugUtilsLabelEXT = + PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueInsertDebugUtilsLabelEXT" ) ); vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetDeviceProcAddr( device, "vkQueuePresentKHR" ) ); - vkQueueSetPerformanceConfigurationINTEL = PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) ); - vkQueueSubmit = PFN_vkQueueSubmit( vkGetDeviceProcAddr( device, "vkQueueSubmit" ) ); + vkQueueSetPerformanceConfigurationINTEL = PFN_vkQueueSetPerformanceConfigurationINTEL( + vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) ); + vkQueueSubmit = PFN_vkQueueSubmit( vkGetDeviceProcAddr( device, "vkQueueSubmit" ) ); vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetDeviceProcAddr( device, "vkQueueWaitIdle" ) ); } }; -} // namespace VULKAN_HPP_NAMESPACE +} // namespace VULKAN_HPP_NAMESPACE namespace std { - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::AccelerationStructureKHR const& accelerationStructureKHR) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( vk::AccelerationStructureKHR const & accelerationStructureKHR ) const VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(accelerationStructureKHR)); + return std::hash{}( + static_cast( accelerationStructureKHR ) ); } }; - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::Buffer const& buffer) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( vk::Buffer const & buffer ) const VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(buffer)); + return std::hash{}( static_cast( buffer ) ); } }; - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::BufferView const& bufferView) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( vk::BufferView const & bufferView ) const VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(bufferView)); + return std::hash{}( static_cast( bufferView ) ); } }; - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::CommandBuffer const& commandBuffer) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( vk::CommandBuffer const & commandBuffer ) const VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(commandBuffer)); + return std::hash{}( static_cast( commandBuffer ) ); } }; - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::CommandPool const& commandPool) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( vk::CommandPool const & commandPool ) const VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(commandPool)); + return std::hash{}( static_cast( commandPool ) ); } }; - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::DebugReportCallbackEXT const& debugReportCallbackEXT) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( vk::DebugReportCallbackEXT const & debugReportCallbackEXT ) const VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(debugReportCallbackEXT)); + return std::hash{}( static_cast( debugReportCallbackEXT ) ); } }; - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::DebugUtilsMessengerEXT const& debugUtilsMessengerEXT) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( vk::DebugUtilsMessengerEXT const & debugUtilsMessengerEXT ) const VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(debugUtilsMessengerEXT)); + return std::hash{}( static_cast( debugUtilsMessengerEXT ) ); } }; #ifdef VK_ENABLE_BETA_EXTENSIONS - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::DeferredOperationKHR const& deferredOperationKHR) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( vk::DeferredOperationKHR const & deferredOperationKHR ) const VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(deferredOperationKHR)); + return std::hash{}( static_cast( deferredOperationKHR ) ); } }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::DescriptorPool const& descriptorPool) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( vk::DescriptorPool const & descriptorPool ) const VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(descriptorPool)); + return std::hash{}( static_cast( descriptorPool ) ); } }; - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::DescriptorSet const& descriptorSet) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( vk::DescriptorSet const & descriptorSet ) const VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(descriptorSet)); + return std::hash{}( static_cast( descriptorSet ) ); } }; - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::DescriptorSetLayout const& descriptorSetLayout) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( vk::DescriptorSetLayout const & descriptorSetLayout ) const VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(descriptorSetLayout)); + return std::hash{}( static_cast( descriptorSetLayout ) ); } }; - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::DescriptorUpdateTemplate const& descriptorUpdateTemplate) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( vk::DescriptorUpdateTemplate const & descriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(descriptorUpdateTemplate)); + return std::hash{}( + static_cast( descriptorUpdateTemplate ) ); } }; - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::Device const& device) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( vk::Device const & device ) const VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(device)); + return std::hash{}( static_cast( device ) ); } }; - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::DeviceMemory const& deviceMemory) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( vk::DeviceMemory const & deviceMemory ) const VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(deviceMemory)); + return std::hash{}( static_cast( deviceMemory ) ); } }; - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::DisplayKHR const& displayKHR) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( vk::DisplayKHR const & displayKHR ) const VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(displayKHR)); + return std::hash{}( static_cast( displayKHR ) ); } }; - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::DisplayModeKHR const& displayModeKHR) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( vk::DisplayModeKHR const & displayModeKHR ) const VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(displayModeKHR)); + return std::hash{}( static_cast( displayModeKHR ) ); } }; - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::Event const& event) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( vk::Event const & event ) const VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(event)); + return std::hash{}( static_cast( event ) ); } }; - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::Fence const& fence) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( vk::Fence const & fence ) const VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(fence)); + return std::hash{}( static_cast( fence ) ); } }; - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::Framebuffer const& framebuffer) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( vk::Framebuffer const & framebuffer ) const VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(framebuffer)); + return std::hash{}( static_cast( framebuffer ) ); } }; - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::Image const& image) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( vk::Image const & image ) const VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(image)); + return std::hash{}( static_cast( image ) ); } }; - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::ImageView const& imageView) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( vk::ImageView const & imageView ) const VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(imageView)); + return std::hash{}( static_cast( imageView ) ); } }; - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::IndirectCommandsLayoutNV const& indirectCommandsLayoutNV) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( vk::IndirectCommandsLayoutNV const & indirectCommandsLayoutNV ) const VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(indirectCommandsLayoutNV)); + return std::hash{}( + static_cast( indirectCommandsLayoutNV ) ); } }; - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::Instance const& instance) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( vk::Instance const & instance ) const VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(instance)); + return std::hash{}( static_cast( instance ) ); } }; - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::PerformanceConfigurationINTEL const& performanceConfigurationINTEL) const VULKAN_HPP_NOEXCEPT + std::size_t + operator()( vk::PerformanceConfigurationINTEL const & performanceConfigurationINTEL ) const VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(performanceConfigurationINTEL)); + return std::hash{}( + static_cast( performanceConfigurationINTEL ) ); } }; - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::PhysicalDevice const& physicalDevice) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( vk::PhysicalDevice const & physicalDevice ) const VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(physicalDevice)); + return std::hash{}( static_cast( physicalDevice ) ); } }; - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::Pipeline const& pipeline) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( vk::Pipeline const & pipeline ) const VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(pipeline)); + return std::hash{}( static_cast( pipeline ) ); } }; - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::PipelineCache const& pipelineCache) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( vk::PipelineCache const & pipelineCache ) const VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(pipelineCache)); + return std::hash{}( static_cast( pipelineCache ) ); } }; - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::PipelineLayout const& pipelineLayout) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( vk::PipelineLayout const & pipelineLayout ) const VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(pipelineLayout)); + return std::hash{}( static_cast( pipelineLayout ) ); } }; - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::QueryPool const& queryPool) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( vk::QueryPool const & queryPool ) const VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(queryPool)); + return std::hash{}( static_cast( queryPool ) ); } }; - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::Queue const& queue) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( vk::Queue const & queue ) const VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(queue)); + return std::hash{}( static_cast( queue ) ); } }; - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::RenderPass const& renderPass) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( vk::RenderPass const & renderPass ) const VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(renderPass)); + return std::hash{}( static_cast( renderPass ) ); } }; - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::Sampler const& sampler) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( vk::Sampler const & sampler ) const VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(sampler)); + return std::hash{}( static_cast( sampler ) ); } }; - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::SamplerYcbcrConversion const& samplerYcbcrConversion) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( vk::SamplerYcbcrConversion const & samplerYcbcrConversion ) const VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(samplerYcbcrConversion)); + return std::hash{}( static_cast( samplerYcbcrConversion ) ); } }; - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::Semaphore const& semaphore) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( vk::Semaphore const & semaphore ) const VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(semaphore)); + return std::hash{}( static_cast( semaphore ) ); } }; - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::ShaderModule const& shaderModule) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( vk::ShaderModule const & shaderModule ) const VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(shaderModule)); + return std::hash{}( static_cast( shaderModule ) ); } }; - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::SurfaceKHR const& surfaceKHR) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( vk::SurfaceKHR const & surfaceKHR ) const VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(surfaceKHR)); + return std::hash{}( static_cast( surfaceKHR ) ); } }; - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::SwapchainKHR const& swapchainKHR) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( vk::SwapchainKHR const & swapchainKHR ) const VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(swapchainKHR)); + return std::hash{}( static_cast( swapchainKHR ) ); } }; - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::ValidationCacheEXT const& validationCacheEXT) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( vk::ValidationCacheEXT const & validationCacheEXT ) const VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(validationCacheEXT)); + return std::hash{}( static_cast( validationCacheEXT ) ); } }; -} +} // namespace std #endif