23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 475 #include <vulkan/vulkan.h> 477 VK_DEFINE_HANDLE(VmaAllocator)
481 VmaAllocator allocator,
483 VkDeviceMemory memory,
487 VmaAllocator allocator,
489 VkDeviceMemory memory,
641 VmaAllocator* pAllocator);
645 VmaAllocator allocator);
652 VmaAllocator allocator,
653 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
660 VmaAllocator allocator,
661 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
670 VmaAllocator allocator,
671 uint32_t memoryTypeIndex,
672 VkMemoryPropertyFlags* pFlags);
683 VmaAllocator allocator,
684 uint32_t frameIndex);
714 VmaAllocator allocator,
717 #define VMA_STATS_STRING_ENABLED 1 719 #if VMA_STATS_STRING_ENABLED 725 VmaAllocator allocator,
726 char** ppStatsString,
727 VkBool32 detailedMap);
730 VmaAllocator allocator,
733 #endif // #if VMA_STATS_STRING_ENABLED 735 VK_DEFINE_HANDLE(VmaPool)
864 VmaAllocator allocator,
865 uint32_t memoryTypeBits,
867 uint32_t* pMemoryTypeIndex);
977 VmaAllocator allocator,
984 VmaAllocator allocator,
994 VmaAllocator allocator,
1005 VmaAllocator allocator,
1007 size_t* pLostAllocationCount);
1009 VK_DEFINE_HANDLE(VmaAllocation)
1062 VmaAllocator allocator,
1063 const VkMemoryRequirements* pVkMemoryRequirements,
1065 VmaAllocation* pAllocation,
1075 VmaAllocator allocator,
1078 VmaAllocation* pAllocation,
1083 VmaAllocator allocator,
1086 VmaAllocation* pAllocation,
1091 VmaAllocator allocator,
1092 VmaAllocation allocation);
1096 VmaAllocator allocator,
1097 VmaAllocation allocation,
1102 VmaAllocator allocator,
1103 VmaAllocation allocation,
1117 VmaAllocator allocator,
1118 VmaAllocation* pAllocation);
1129 VmaAllocator allocator,
1130 VmaAllocation allocation,
1134 VmaAllocator allocator,
1135 VmaAllocation allocation);
1266 VmaAllocator allocator,
1267 VmaAllocation* pAllocations,
1268 size_t allocationCount,
1269 VkBool32* pAllocationsChanged,
1292 VmaAllocator allocator,
1293 const VkBufferCreateInfo* pBufferCreateInfo,
1296 VmaAllocation* pAllocation,
1308 VmaAllocator allocator,
1310 VmaAllocation allocation);
1314 VmaAllocator allocator,
1315 const VkImageCreateInfo* pImageCreateInfo,
1318 VmaAllocation* pAllocation,
1330 VmaAllocator allocator,
1332 VmaAllocation allocation);
1338 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 1341 #ifdef __INTELLISENSE__ 1342 #define VMA_IMPLEMENTATION 1345 #ifdef VMA_IMPLEMENTATION 1346 #undef VMA_IMPLEMENTATION 1368 #ifndef VMA_STATIC_VULKAN_FUNCTIONS 1369 #define VMA_STATIC_VULKAN_FUNCTIONS 1 1381 #if VMA_USE_STL_CONTAINERS 1382 #define VMA_USE_STL_VECTOR 1 1383 #define VMA_USE_STL_UNORDERED_MAP 1 1384 #define VMA_USE_STL_LIST 1 1387 #if VMA_USE_STL_VECTOR 1391 #if VMA_USE_STL_UNORDERED_MAP 1392 #include <unordered_map> 1395 #if VMA_USE_STL_LIST 1404 #include <algorithm> 1408 #if !defined(_WIN32) 1415 #define VMA_ASSERT(expr) assert(expr) 1417 #define VMA_ASSERT(expr) 1423 #ifndef VMA_HEAVY_ASSERT 1425 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 1427 #define VMA_HEAVY_ASSERT(expr) 1433 #define VMA_NULL nullptr 1436 #ifndef VMA_ALIGN_OF 1437 #define VMA_ALIGN_OF(type) (__alignof(type)) 1440 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 1442 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 1444 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 1448 #ifndef VMA_SYSTEM_FREE 1450 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 1452 #define VMA_SYSTEM_FREE(ptr) free(ptr) 1457 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 1461 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 1465 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 1469 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 1472 #ifndef VMA_DEBUG_LOG 1473 #define VMA_DEBUG_LOG(format, ...) 1483 #if VMA_STATS_STRING_ENABLED 1484 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
1486 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
1488 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
1490 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
1492 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
1494 snprintf(outStr, strLen,
"%p", ptr);
1504 void Lock() { m_Mutex.lock(); }
1505 void Unlock() { m_Mutex.unlock(); }
1509 #define VMA_MUTEX VmaMutex 1520 #ifndef VMA_ATOMIC_UINT32 1521 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 1524 #ifndef VMA_BEST_FIT 1537 #define VMA_BEST_FIT (1) 1540 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 1545 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 1548 #ifndef VMA_DEBUG_ALIGNMENT 1553 #define VMA_DEBUG_ALIGNMENT (1) 1556 #ifndef VMA_DEBUG_MARGIN 1561 #define VMA_DEBUG_MARGIN (0) 1564 #ifndef VMA_DEBUG_GLOBAL_MUTEX 1569 #define VMA_DEBUG_GLOBAL_MUTEX (0) 1572 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 1577 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 1580 #ifndef VMA_SMALL_HEAP_MAX_SIZE 1581 #define VMA_SMALL_HEAP_MAX_SIZE (512 * 1024 * 1024) 1585 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 1586 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256 * 1024 * 1024) 1590 #ifndef VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE 1591 #define VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE (64 * 1024 * 1024) 1595 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
1601 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
1602 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
1605 static inline uint32_t CountBitsSet(uint32_t v)
1607 uint32_t c = v - ((v >> 1) & 0x55555555);
1608 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
1609 c = ((c >> 4) + c) & 0x0F0F0F0F;
1610 c = ((c >> 8) + c) & 0x00FF00FF;
1611 c = ((c >> 16) + c) & 0x0000FFFF;
1617 template <
typename T>
1618 static inline T VmaAlignUp(T val, T align)
1620 return (val + align - 1) / align * align;
1624 template <
typename T>
1625 inline T VmaRoundDiv(T x, T y)
1627 return (x + (y / (T)2)) / y;
1632 template<
typename Iterator,
typename Compare>
1633 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
1635 Iterator centerValue = end; --centerValue;
1636 Iterator insertIndex = beg;
1637 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
1639 if(cmp(*memTypeIndex, *centerValue))
1641 if(insertIndex != memTypeIndex)
1643 VMA_SWAP(*memTypeIndex, *insertIndex);
1648 if(insertIndex != centerValue)
1650 VMA_SWAP(*insertIndex, *centerValue);
1655 template<
typename Iterator,
typename Compare>
1656 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
1660 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
1661 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
1662 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
1666 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 1668 #endif // #ifndef VMA_SORT 1677 static inline bool VmaBlocksOnSamePage(
1678 VkDeviceSize resourceAOffset,
1679 VkDeviceSize resourceASize,
1680 VkDeviceSize resourceBOffset,
1681 VkDeviceSize pageSize)
1683 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
1684 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
1685 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
1686 VkDeviceSize resourceBStart = resourceBOffset;
1687 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
1688 return resourceAEndPage == resourceBStartPage;
1691 enum VmaSuballocationType
1693 VMA_SUBALLOCATION_TYPE_FREE = 0,
1694 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
1695 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
1696 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
1697 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
1698 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
1699 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
1708 static inline bool VmaIsBufferImageGranularityConflict(
1709 VmaSuballocationType suballocType1,
1710 VmaSuballocationType suballocType2)
1712 if(suballocType1 > suballocType2)
1714 VMA_SWAP(suballocType1, suballocType2);
1717 switch(suballocType1)
1719 case VMA_SUBALLOCATION_TYPE_FREE:
1721 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
1723 case VMA_SUBALLOCATION_TYPE_BUFFER:
1725 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
1726 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1727 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
1729 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
1730 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
1731 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1732 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
1734 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1735 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
1747 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
1748 m_pMutex(useMutex ? &mutex : VMA_NULL)
1765 VMA_MUTEX* m_pMutex;
1768 #if VMA_DEBUG_GLOBAL_MUTEX 1769 static VMA_MUTEX gDebugGlobalMutex;
1770 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 1772 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 1776 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
1787 template <
typename IterT,
typename KeyT,
typename CmpT>
1788 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
1790 size_t down = 0, up = (end - beg);
1793 const size_t mid = (down + up) / 2;
1794 if(cmp(*(beg+mid), key))
1809 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
1811 if((pAllocationCallbacks != VMA_NULL) &&
1812 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
1814 return (*pAllocationCallbacks->pfnAllocation)(
1815 pAllocationCallbacks->pUserData,
1818 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
1822 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
1826 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
1828 if((pAllocationCallbacks != VMA_NULL) &&
1829 (pAllocationCallbacks->pfnFree != VMA_NULL))
1831 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
1835 VMA_SYSTEM_FREE(ptr);
1839 template<
typename T>
1840 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
1842 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
1845 template<
typename T>
1846 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
1848 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
1851 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 1853 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 1855 template<
typename T>
1856 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
1859 VmaFree(pAllocationCallbacks, ptr);
1862 template<
typename T>
1863 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
1867 for(
size_t i = count; i--; )
1871 VmaFree(pAllocationCallbacks, ptr);
1876 template<
typename T>
1877 class VmaStlAllocator
1880 const VkAllocationCallbacks*
const m_pCallbacks;
1881 typedef T value_type;
1883 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
1884 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
1886 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
1887 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
1889 template<
typename U>
1890 bool operator==(
const VmaStlAllocator<U>& rhs)
const 1892 return m_pCallbacks == rhs.m_pCallbacks;
1894 template<
typename U>
1895 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 1897 return m_pCallbacks != rhs.m_pCallbacks;
1900 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
1903 #if VMA_USE_STL_VECTOR 1905 #define VmaVector std::vector 1907 template<
typename T,
typename allocatorT>
1908 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
1910 vec.insert(vec.begin() + index, item);
1913 template<
typename T,
typename allocatorT>
1914 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
1916 vec.erase(vec.begin() + index);
1919 #else // #if VMA_USE_STL_VECTOR 1924 template<
typename T,
typename AllocatorT>
1928 typedef T value_type;
1930 VmaVector(
const AllocatorT& allocator) :
1931 m_Allocator(allocator),
1938 VmaVector(
size_t count,
const AllocatorT& allocator) :
1939 m_Allocator(allocator),
1940 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
1946 VmaVector(
const VmaVector<T, AllocatorT>& src) :
1947 m_Allocator(src.m_Allocator),
1948 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
1949 m_Count(src.m_Count),
1950 m_Capacity(src.m_Count)
1954 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
1960 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1963 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
1967 resize(rhs.m_Count);
1970 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
1976 bool empty()
const {
return m_Count == 0; }
1977 size_t size()
const {
return m_Count; }
1978 T* data() {
return m_pArray; }
1979 const T* data()
const {
return m_pArray; }
1981 T& operator[](
size_t index)
1983 VMA_HEAVY_ASSERT(index < m_Count);
1984 return m_pArray[index];
1986 const T& operator[](
size_t index)
const 1988 VMA_HEAVY_ASSERT(index < m_Count);
1989 return m_pArray[index];
1994 VMA_HEAVY_ASSERT(m_Count > 0);
1997 const T& front()
const 1999 VMA_HEAVY_ASSERT(m_Count > 0);
2004 VMA_HEAVY_ASSERT(m_Count > 0);
2005 return m_pArray[m_Count - 1];
2007 const T& back()
const 2009 VMA_HEAVY_ASSERT(m_Count > 0);
2010 return m_pArray[m_Count - 1];
2013 void reserve(
size_t newCapacity,
bool freeMemory =
false)
2015 newCapacity = VMA_MAX(newCapacity, m_Count);
2017 if((newCapacity < m_Capacity) && !freeMemory)
2019 newCapacity = m_Capacity;
2022 if(newCapacity != m_Capacity)
2024 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
2027 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
2029 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2030 m_Capacity = newCapacity;
2031 m_pArray = newArray;
2035 void resize(
size_t newCount,
bool freeMemory =
false)
2037 size_t newCapacity = m_Capacity;
2038 if(newCount > m_Capacity)
2040 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
2044 newCapacity = newCount;
2047 if(newCapacity != m_Capacity)
2049 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
2050 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
2051 if(elementsToCopy != 0)
2053 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
2055 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2056 m_Capacity = newCapacity;
2057 m_pArray = newArray;
2063 void clear(
bool freeMemory =
false)
2065 resize(0, freeMemory);
2068 void insert(
size_t index,
const T& src)
2070 VMA_HEAVY_ASSERT(index <= m_Count);
2071 const size_t oldCount = size();
2072 resize(oldCount + 1);
2073 if(index < oldCount)
2075 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
2077 m_pArray[index] = src;
2080 void remove(
size_t index)
2082 VMA_HEAVY_ASSERT(index < m_Count);
2083 const size_t oldCount = size();
2084 if(index < oldCount - 1)
2086 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
2088 resize(oldCount - 1);
2091 void push_back(
const T& src)
2093 const size_t newIndex = size();
2094 resize(newIndex + 1);
2095 m_pArray[newIndex] = src;
2100 VMA_HEAVY_ASSERT(m_Count > 0);
2104 void push_front(
const T& src)
2111 VMA_HEAVY_ASSERT(m_Count > 0);
2115 typedef T* iterator;
2117 iterator begin() {
return m_pArray; }
2118 iterator end() {
return m_pArray + m_Count; }
2121 AllocatorT m_Allocator;
2127 template<
typename T,
typename allocatorT>
2128 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
2130 vec.insert(index, item);
2133 template<
typename T,
typename allocatorT>
2134 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
2139 #endif // #if VMA_USE_STL_VECTOR 2141 template<
typename CmpLess,
typename VectorT>
2142 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
2144 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2146 vector.data() + vector.size(),
2148 CmpLess()) - vector.data();
2149 VmaVectorInsert(vector, indexToInsert, value);
2150 return indexToInsert;
2153 template<
typename CmpLess,
typename VectorT>
2154 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
2157 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2162 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
2164 size_t indexToRemove = it - vector.begin();
2165 VmaVectorRemove(vector, indexToRemove);
2171 template<
typename CmpLess,
typename VectorT>
2172 size_t VmaVectorFindSorted(
const VectorT& vector,
const typename VectorT::value_type& value)
2175 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2177 vector.data() + vector.size(),
2180 if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
2182 return it - vector.begin();
2186 return vector.size();
2198 template<
typename T>
2199 class VmaPoolAllocator
2202 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
2203 ~VmaPoolAllocator();
2211 uint32_t NextFreeIndex;
2218 uint32_t FirstFreeIndex;
2221 const VkAllocationCallbacks* m_pAllocationCallbacks;
2222 size_t m_ItemsPerBlock;
2223 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
2225 ItemBlock& CreateNewBlock();
2228 template<
typename T>
2229 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
2230 m_pAllocationCallbacks(pAllocationCallbacks),
2231 m_ItemsPerBlock(itemsPerBlock),
2232 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
2234 VMA_ASSERT(itemsPerBlock > 0);
2237 template<
typename T>
2238 VmaPoolAllocator<T>::~VmaPoolAllocator()
2243 template<
typename T>
2244 void VmaPoolAllocator<T>::Clear()
2246 for(
size_t i = m_ItemBlocks.size(); i--; )
2247 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
2248 m_ItemBlocks.clear();
2251 template<
typename T>
2252 T* VmaPoolAllocator<T>::Alloc()
2254 for(
size_t i = m_ItemBlocks.size(); i--; )
2256 ItemBlock& block = m_ItemBlocks[i];
2258 if(block.FirstFreeIndex != UINT32_MAX)
2260 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
2261 block.FirstFreeIndex = pItem->NextFreeIndex;
2262 return &pItem->Value;
2267 ItemBlock& newBlock = CreateNewBlock();
2268 Item*
const pItem = &newBlock.pItems[0];
2269 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
2270 return &pItem->Value;
2273 template<
typename T>
2274 void VmaPoolAllocator<T>::Free(T* ptr)
2277 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
2279 ItemBlock& block = m_ItemBlocks[i];
2283 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
2286 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
2288 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
2289 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
2290 block.FirstFreeIndex = index;
2294 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
2297 template<
typename T>
2298 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
2300 ItemBlock newBlock = {
2301 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
2303 m_ItemBlocks.push_back(newBlock);
2306 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
2307 newBlock.pItems[i].NextFreeIndex = i + 1;
2308 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
2309 return m_ItemBlocks.back();
2315 #if VMA_USE_STL_LIST 2317 #define VmaList std::list 2319 #else // #if VMA_USE_STL_LIST 2321 template<
typename T>
2330 template<
typename T>
2334 typedef VmaListItem<T> ItemType;
2336 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
2340 size_t GetCount()
const {
return m_Count; }
2341 bool IsEmpty()
const {
return m_Count == 0; }
2343 ItemType* Front() {
return m_pFront; }
2344 const ItemType* Front()
const {
return m_pFront; }
2345 ItemType* Back() {
return m_pBack; }
2346 const ItemType* Back()
const {
return m_pBack; }
2348 ItemType* PushBack();
2349 ItemType* PushFront();
2350 ItemType* PushBack(
const T& value);
2351 ItemType* PushFront(
const T& value);
2356 ItemType* InsertBefore(ItemType* pItem);
2358 ItemType* InsertAfter(ItemType* pItem);
2360 ItemType* InsertBefore(ItemType* pItem,
const T& value);
2361 ItemType* InsertAfter(ItemType* pItem,
const T& value);
2363 void Remove(ItemType* pItem);
2366 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
2367 VmaPoolAllocator<ItemType> m_ItemAllocator;
2373 VmaRawList(
const VmaRawList<T>& src);
2374 VmaRawList<T>& operator=(
const VmaRawList<T>& rhs);
2377 template<
typename T>
2378 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
2379 m_pAllocationCallbacks(pAllocationCallbacks),
2380 m_ItemAllocator(pAllocationCallbacks, 128),
2387 template<
typename T>
2388 VmaRawList<T>::~VmaRawList()
2394 template<
typename T>
2395 void VmaRawList<T>::Clear()
2397 if(IsEmpty() ==
false)
2399 ItemType* pItem = m_pBack;
2400 while(pItem != VMA_NULL)
2402 ItemType*
const pPrevItem = pItem->pPrev;
2403 m_ItemAllocator.Free(pItem);
2406 m_pFront = VMA_NULL;
2412 template<
typename T>
2413 VmaListItem<T>* VmaRawList<T>::PushBack()
2415 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2416 pNewItem->pNext = VMA_NULL;
2419 pNewItem->pPrev = VMA_NULL;
2420 m_pFront = pNewItem;
2426 pNewItem->pPrev = m_pBack;
2427 m_pBack->pNext = pNewItem;
2434 template<
typename T>
2435 VmaListItem<T>* VmaRawList<T>::PushFront()
2437 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2438 pNewItem->pPrev = VMA_NULL;
2441 pNewItem->pNext = VMA_NULL;
2442 m_pFront = pNewItem;
2448 pNewItem->pNext = m_pFront;
2449 m_pFront->pPrev = pNewItem;
2450 m_pFront = pNewItem;
2456 template<
typename T>
2457 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
2459 ItemType*
const pNewItem = PushBack();
2460 pNewItem->Value = value;
2464 template<
typename T>
2465 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
2467 ItemType*
const pNewItem = PushFront();
2468 pNewItem->Value = value;
2472 template<
typename T>
2473 void VmaRawList<T>::PopBack()
2475 VMA_HEAVY_ASSERT(m_Count > 0);
2476 ItemType*
const pBackItem = m_pBack;
2477 ItemType*
const pPrevItem = pBackItem->pPrev;
2478 if(pPrevItem != VMA_NULL)
2480 pPrevItem->pNext = VMA_NULL;
2482 m_pBack = pPrevItem;
2483 m_ItemAllocator.Free(pBackItem);
2487 template<
typename T>
2488 void VmaRawList<T>::PopFront()
2490 VMA_HEAVY_ASSERT(m_Count > 0);
2491 ItemType*
const pFrontItem = m_pFront;
2492 ItemType*
const pNextItem = pFrontItem->pNext;
2493 if(pNextItem != VMA_NULL)
2495 pNextItem->pPrev = VMA_NULL;
2497 m_pFront = pNextItem;
2498 m_ItemAllocator.Free(pFrontItem);
2502 template<
typename T>
2503 void VmaRawList<T>::Remove(ItemType* pItem)
2505 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
2506 VMA_HEAVY_ASSERT(m_Count > 0);
2508 if(pItem->pPrev != VMA_NULL)
2510 pItem->pPrev->pNext = pItem->pNext;
2514 VMA_HEAVY_ASSERT(m_pFront == pItem);
2515 m_pFront = pItem->pNext;
2518 if(pItem->pNext != VMA_NULL)
2520 pItem->pNext->pPrev = pItem->pPrev;
2524 VMA_HEAVY_ASSERT(m_pBack == pItem);
2525 m_pBack = pItem->pPrev;
2528 m_ItemAllocator.Free(pItem);
2532 template<
typename T>
2533 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
2535 if(pItem != VMA_NULL)
2537 ItemType*
const prevItem = pItem->pPrev;
2538 ItemType*
const newItem = m_ItemAllocator.Alloc();
2539 newItem->pPrev = prevItem;
2540 newItem->pNext = pItem;
2541 pItem->pPrev = newItem;
2542 if(prevItem != VMA_NULL)
2544 prevItem->pNext = newItem;
2548 VMA_HEAVY_ASSERT(m_pFront == pItem);
2558 template<
typename T>
2559 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
2561 if(pItem != VMA_NULL)
2563 ItemType*
const nextItem = pItem->pNext;
2564 ItemType*
const newItem = m_ItemAllocator.Alloc();
2565 newItem->pNext = nextItem;
2566 newItem->pPrev = pItem;
2567 pItem->pNext = newItem;
2568 if(nextItem != VMA_NULL)
2570 nextItem->pPrev = newItem;
2574 VMA_HEAVY_ASSERT(m_pBack == pItem);
2584 template<
typename T>
2585 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
2587 ItemType*
const newItem = InsertBefore(pItem);
2588 newItem->Value = value;
2592 template<
typename T>
2593 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
2595 ItemType*
const newItem = InsertAfter(pItem);
2596 newItem->Value = value;
2600 template<
typename T,
typename AllocatorT>
2613 T& operator*()
const 2615 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2616 return m_pItem->Value;
2618 T* operator->()
const 2620 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2621 return &m_pItem->Value;
2624 iterator& operator++()
2626 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2627 m_pItem = m_pItem->pNext;
2630 iterator& operator--()
2632 if(m_pItem != VMA_NULL)
2634 m_pItem = m_pItem->pPrev;
2638 VMA_HEAVY_ASSERT(!m_pList.IsEmpty());
2639 m_pItem = m_pList->Back();
2644 iterator operator++(
int)
2646 iterator result = *
this;
2650 iterator operator--(
int)
2652 iterator result = *
this;
2657 bool operator==(
const iterator& rhs)
const 2659 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2660 return m_pItem == rhs.m_pItem;
2662 bool operator!=(
const iterator& rhs)
const 2664 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2665 return m_pItem != rhs.m_pItem;
2669 VmaRawList<T>* m_pList;
2670 VmaListItem<T>* m_pItem;
2672 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
2678 friend class VmaList<T, AllocatorT>;
2681 class const_iterator
2690 const_iterator(
const iterator& src) :
2691 m_pList(src.m_pList),
2692 m_pItem(src.m_pItem)
2696 const T& operator*()
const 2698 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2699 return m_pItem->Value;
2701 const T* operator->()
const 2703 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2704 return &m_pItem->Value;
2707 const_iterator& operator++()
2709 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2710 m_pItem = m_pItem->pNext;
2713 const_iterator& operator--()
2715 if(m_pItem != VMA_NULL)
2717 m_pItem = m_pItem->pPrev;
2721 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
2722 m_pItem = m_pList->Back();
2727 const_iterator operator++(
int)
2729 const_iterator result = *
this;
2733 const_iterator operator--(
int)
2735 const_iterator result = *
this;
2740 bool operator==(
const const_iterator& rhs)
const 2742 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2743 return m_pItem == rhs.m_pItem;
2745 bool operator!=(
const const_iterator& rhs)
const 2747 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2748 return m_pItem != rhs.m_pItem;
2752 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
2758 const VmaRawList<T>* m_pList;
2759 const VmaListItem<T>* m_pItem;
2761 friend class VmaList<T, AllocatorT>;
2764 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
2766 bool empty()
const {
return m_RawList.IsEmpty(); }
2767 size_t size()
const {
return m_RawList.GetCount(); }
2769 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
2770 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
2772 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
2773 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
2775 void clear() { m_RawList.Clear(); }
2776 void push_back(
const T& value) { m_RawList.PushBack(value); }
2777 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
2778 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
2781 VmaRawList<T> m_RawList;
2784 #endif // #if VMA_USE_STL_LIST 2792 #if VMA_USE_STL_UNORDERED_MAP 2794 #define VmaPair std::pair 2796 #define VMA_MAP_TYPE(KeyT, ValueT) \ 2797 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 2799 #else // #if VMA_USE_STL_UNORDERED_MAP 2801 template<
typename T1,
typename T2>
2807 VmaPair() : first(), second() { }
2808 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
2814 template<
typename KeyT,
typename ValueT>
2818 typedef VmaPair<KeyT, ValueT> PairType;
2819 typedef PairType* iterator;
2821 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
2823 iterator begin() {
return m_Vector.begin(); }
2824 iterator end() {
return m_Vector.end(); }
2826 void insert(
const PairType& pair);
2827 iterator find(
const KeyT& key);
2828 void erase(iterator it);
2831 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
2834 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 2836 template<
typename FirstT,
typename SecondT>
2837 struct VmaPairFirstLess
2839 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 2841 return lhs.first < rhs.first;
2843 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 2845 return lhs.first < rhsFirst;
2849 template<
typename KeyT,
typename ValueT>
2850 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
2852 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2854 m_Vector.data() + m_Vector.size(),
2856 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
2857 VmaVectorInsert(m_Vector, indexToInsert, pair);
2860 template<
typename KeyT,
typename ValueT>
2861 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
2863 PairType* it = VmaBinaryFindFirstNotLess(
2865 m_Vector.data() + m_Vector.size(),
2867 VmaPairFirstLess<KeyT, ValueT>());
2868 if((it != m_Vector.end()) && (it->first == key))
2874 return m_Vector.end();
2878 template<
typename KeyT,
typename ValueT>
2879 void VmaMap<KeyT, ValueT>::erase(iterator it)
2881 VmaVectorRemove(m_Vector, it - m_Vector.begin());
2884 #endif // #if VMA_USE_STL_UNORDERED_MAP 2890 class VmaDeviceMemoryBlock;
2892 enum VMA_BLOCK_VECTOR_TYPE
2894 VMA_BLOCK_VECTOR_TYPE_UNMAPPED,
2895 VMA_BLOCK_VECTOR_TYPE_MAPPED,
2896 VMA_BLOCK_VECTOR_TYPE_COUNT
2902 VMA_BLOCK_VECTOR_TYPE_MAPPED :
2903 VMA_BLOCK_VECTOR_TYPE_UNMAPPED;
2906 struct VmaAllocation_T
2909 enum ALLOCATION_TYPE
2911 ALLOCATION_TYPE_NONE,
2912 ALLOCATION_TYPE_BLOCK,
2913 ALLOCATION_TYPE_DEDICATED,
2916 VmaAllocation_T(uint32_t currentFrameIndex) :
2919 m_pUserData(VMA_NULL),
2920 m_Type(ALLOCATION_TYPE_NONE),
2921 m_SuballocationType(VMA_SUBALLOCATION_TYPE_UNKNOWN),
2922 m_LastUseFrameIndex(currentFrameIndex)
2926 void InitBlockAllocation(
2928 VmaDeviceMemoryBlock* block,
2929 VkDeviceSize offset,
2930 VkDeviceSize alignment,
2932 VmaSuballocationType suballocationType,
2936 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2937 VMA_ASSERT(block != VMA_NULL);
2938 m_Type = ALLOCATION_TYPE_BLOCK;
2939 m_Alignment = alignment;
2941 m_pUserData = pUserData;
2942 m_SuballocationType = suballocationType;
2943 m_BlockAllocation.m_hPool = hPool;
2944 m_BlockAllocation.m_Block = block;
2945 m_BlockAllocation.m_Offset = offset;
2946 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
2951 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2952 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
2953 m_Type = ALLOCATION_TYPE_BLOCK;
2954 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
2955 m_BlockAllocation.m_Block = VMA_NULL;
2956 m_BlockAllocation.m_Offset = 0;
2957 m_BlockAllocation.m_CanBecomeLost =
true;
2960 void ChangeBlockAllocation(
2961 VmaDeviceMemoryBlock* block,
2962 VkDeviceSize offset)
2964 VMA_ASSERT(block != VMA_NULL);
2965 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
2966 m_BlockAllocation.m_Block = block;
2967 m_BlockAllocation.m_Offset = offset;
2970 void InitDedicatedAllocation(
2971 uint32_t memoryTypeIndex,
2972 VkDeviceMemory hMemory,
2973 VmaSuballocationType suballocationType,
2979 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2980 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
2981 m_Type = ALLOCATION_TYPE_DEDICATED;
2984 m_pUserData = pUserData;
2985 m_SuballocationType = suballocationType;
2986 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
2987 m_DedicatedAllocation.m_hMemory = hMemory;
2988 m_DedicatedAllocation.m_PersistentMap = persistentMap;
2989 m_DedicatedAllocation.m_pMappedData = pMappedData;
2992 ALLOCATION_TYPE GetType()
const {
return m_Type; }
2993 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
2994 VkDeviceSize GetSize()
const {
return m_Size; }
2995 void* GetUserData()
const {
return m_pUserData; }
2996 void SetUserData(
void* pUserData) { m_pUserData = pUserData; }
2997 VmaSuballocationType GetSuballocationType()
const {
return m_SuballocationType; }
2999 VmaDeviceMemoryBlock* GetBlock()
const 3001 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
3002 return m_BlockAllocation.m_Block;
3004 VkDeviceSize GetOffset()
const;
3005 VkDeviceMemory GetMemory()
const;
3006 uint32_t GetMemoryTypeIndex()
const;
3007 VMA_BLOCK_VECTOR_TYPE GetBlockVectorType()
const;
3008 void* GetMappedData()
const;
3009 bool CanBecomeLost()
const;
3010 VmaPool GetPool()
const;
3012 VkResult DedicatedAllocMapPersistentlyMappedMemory(VmaAllocator hAllocator);
3013 void DedicatedAllocUnmapPersistentlyMappedMemory(VmaAllocator hAllocator);
3015 uint32_t GetLastUseFrameIndex()
const 3017 return m_LastUseFrameIndex.load();
3019 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
3021 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
3031 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3033 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
3035 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
3047 VkDeviceSize m_Alignment;
3048 VkDeviceSize m_Size;
3050 ALLOCATION_TYPE m_Type;
3051 VmaSuballocationType m_SuballocationType;
3052 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
3055 struct BlockAllocation
3058 VmaDeviceMemoryBlock* m_Block;
3059 VkDeviceSize m_Offset;
3060 bool m_CanBecomeLost;
3064 struct DedicatedAllocation
3066 uint32_t m_MemoryTypeIndex;
3067 VkDeviceMemory m_hMemory;
3068 bool m_PersistentMap;
3069 void* m_pMappedData;
3075 BlockAllocation m_BlockAllocation;
3077 DedicatedAllocation m_DedicatedAllocation;
3085 struct VmaSuballocation
3087 VkDeviceSize offset;
3089 VmaAllocation hAllocation;
3090 VmaSuballocationType type;
3093 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
3096 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
3111 struct VmaAllocationRequest
3113 VkDeviceSize offset;
3114 VkDeviceSize sumFreeSize;
3115 VkDeviceSize sumItemSize;
3116 VmaSuballocationList::iterator item;
3117 size_t itemsToMakeLostCount;
3119 VkDeviceSize CalcCost()
const 3121 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
3129 class VmaBlockMetadata
3132 VmaBlockMetadata(VmaAllocator hAllocator);
3133 ~VmaBlockMetadata();
3134 void Init(VkDeviceSize size);
3137 bool Validate()
const;
3138 VkDeviceSize GetSize()
const {
return m_Size; }
3139 size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
3140 VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
3141 VkDeviceSize GetUnusedRangeSizeMax()
const;
3143 bool IsEmpty()
const;
3145 void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
3148 #if VMA_STATS_STRING_ENABLED 3149 void PrintDetailedMap(
class VmaJsonWriter& json)
const;
3153 void CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest);
3158 bool CreateAllocationRequest(
3159 uint32_t currentFrameIndex,
3160 uint32_t frameInUseCount,
3161 VkDeviceSize bufferImageGranularity,
3162 VkDeviceSize allocSize,
3163 VkDeviceSize allocAlignment,
3164 VmaSuballocationType allocType,
3165 bool canMakeOtherLost,
3166 VmaAllocationRequest* pAllocationRequest);
3168 bool MakeRequestedAllocationsLost(
3169 uint32_t currentFrameIndex,
3170 uint32_t frameInUseCount,
3171 VmaAllocationRequest* pAllocationRequest);
3173 uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3177 const VmaAllocationRequest& request,
3178 VmaSuballocationType type,
3179 VkDeviceSize allocSize,
3180 VmaAllocation hAllocation);
3183 void Free(
const VmaAllocation allocation);
3186 VkDeviceSize m_Size;
3187 uint32_t m_FreeCount;
3188 VkDeviceSize m_SumFreeSize;
3189 VmaSuballocationList m_Suballocations;
3192 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
3194 bool ValidateFreeSuballocationList()
const;
3198 bool CheckAllocation(
3199 uint32_t currentFrameIndex,
3200 uint32_t frameInUseCount,
3201 VkDeviceSize bufferImageGranularity,
3202 VkDeviceSize allocSize,
3203 VkDeviceSize allocAlignment,
3204 VmaSuballocationType allocType,
3205 VmaSuballocationList::const_iterator suballocItem,
3206 bool canMakeOtherLost,
3207 VkDeviceSize* pOffset,
3208 size_t* itemsToMakeLostCount,
3209 VkDeviceSize* pSumFreeSize,
3210 VkDeviceSize* pSumItemSize)
const;
3212 void MergeFreeWithNext(VmaSuballocationList::iterator item);
3216 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
3219 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
3222 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
3231 class VmaDeviceMemoryBlock
3234 uint32_t m_MemoryTypeIndex;
3235 VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
3236 VkDeviceMemory m_hMemory;
3237 bool m_PersistentMap;
3238 void* m_pMappedData;
3239 VmaBlockMetadata m_Metadata;
3241 VmaDeviceMemoryBlock(VmaAllocator hAllocator);
3243 ~VmaDeviceMemoryBlock()
3245 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
3250 uint32_t newMemoryTypeIndex,
3251 VMA_BLOCK_VECTOR_TYPE newBlockVectorType,
3252 VkDeviceMemory newMemory,
3253 VkDeviceSize newSize,
3257 void Destroy(VmaAllocator allocator);
3260 bool Validate()
const;
3263 struct VmaPointerLess
3265 bool operator()(
const void* lhs,
const void* rhs)
const 3271 class VmaDefragmentator;
3279 struct VmaBlockVector
3282 VmaAllocator hAllocator,
3283 uint32_t memoryTypeIndex,
3284 VMA_BLOCK_VECTOR_TYPE blockVectorType,
3285 VkDeviceSize preferredBlockSize,
3286 size_t minBlockCount,
3287 size_t maxBlockCount,
3288 VkDeviceSize bufferImageGranularity,
3289 uint32_t frameInUseCount,
3293 VkResult CreateMinBlocks();
3295 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
3296 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
3297 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
3298 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
3299 VMA_BLOCK_VECTOR_TYPE GetBlockVectorType()
const {
return m_BlockVectorType; }
3303 bool IsEmpty()
const {
return m_Blocks.empty(); }
3306 VmaPool hCurrentPool,
3307 uint32_t currentFrameIndex,
3308 const VkMemoryRequirements& vkMemReq,
3310 VmaSuballocationType suballocType,
3311 VmaAllocation* pAllocation);
3314 VmaAllocation hAllocation);
3319 #if VMA_STATS_STRING_ENABLED 3320 void PrintDetailedMap(
class VmaJsonWriter& json);
3323 void UnmapPersistentlyMappedMemory();
3324 VkResult MapPersistentlyMappedMemory();
3326 void MakePoolAllocationsLost(
3327 uint32_t currentFrameIndex,
3328 size_t* pLostAllocationCount);
3330 VmaDefragmentator* EnsureDefragmentator(
3331 VmaAllocator hAllocator,
3332 uint32_t currentFrameIndex);
3334 VkResult Defragment(
3336 VkDeviceSize& maxBytesToMove,
3337 uint32_t& maxAllocationsToMove);
3339 void DestroyDefragmentator();
3342 friend class VmaDefragmentator;
3344 const VmaAllocator m_hAllocator;
3345 const uint32_t m_MemoryTypeIndex;
3346 const VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
3347 const VkDeviceSize m_PreferredBlockSize;
3348 const size_t m_MinBlockCount;
3349 const size_t m_MaxBlockCount;
3350 const VkDeviceSize m_BufferImageGranularity;
3351 const uint32_t m_FrameInUseCount;
3352 const bool m_IsCustomPool;
3355 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
3359 bool m_HasEmptyBlock;
3360 VmaDefragmentator* m_pDefragmentator;
3363 void Remove(VmaDeviceMemoryBlock* pBlock);
3367 void IncrementallySortBlocks();
3369 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
3375 VmaBlockVector m_BlockVector;
3379 VmaAllocator hAllocator,
3383 VmaBlockVector& GetBlockVector() {
return m_BlockVector; }
3385 #if VMA_STATS_STRING_ENABLED 3390 class VmaDefragmentator
3392 const VmaAllocator m_hAllocator;
3393 VmaBlockVector*
const m_pBlockVector;
3394 uint32_t m_CurrentFrameIndex;
3395 VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
3396 VkDeviceSize m_BytesMoved;
3397 uint32_t m_AllocationsMoved;
3399 struct AllocationInfo
3401 VmaAllocation m_hAllocation;
3402 VkBool32* m_pChanged;
3405 m_hAllocation(VK_NULL_HANDLE),
3406 m_pChanged(VMA_NULL)
3411 struct AllocationInfoSizeGreater
3413 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 3415 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
3420 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3424 VmaDeviceMemoryBlock* m_pBlock;
3425 bool m_HasNonMovableAllocations;
3426 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3428 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
3430 m_HasNonMovableAllocations(true),
3431 m_Allocations(pAllocationCallbacks),
3432 m_pMappedDataForDefragmentation(VMA_NULL)
3436 void CalcHasNonMovableAllocations()
3438 const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
3439 const size_t defragmentAllocCount = m_Allocations.size();
3440 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
3443 void SortAllocationsBySizeDescecnding()
3445 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
3448 VkResult EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData);
3449 void Unmap(VmaAllocator hAllocator);
3453 void* m_pMappedDataForDefragmentation;
3456 struct BlockPointerLess
3458 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 3460 return pLhsBlockInfo->m_pBlock < pRhsBlock;
3462 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3464 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
3470 struct BlockInfoCompareMoveDestination
3472 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3474 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
3478 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
3482 if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
3490 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
3491 BlockInfoVector m_Blocks;
3493 VkResult DefragmentRound(
3494 VkDeviceSize maxBytesToMove,
3495 uint32_t maxAllocationsToMove);
3497 static bool MoveMakesSense(
3498 size_t dstBlockIndex, VkDeviceSize dstOffset,
3499 size_t srcBlockIndex, VkDeviceSize srcOffset);
3503 VmaAllocator hAllocator,
3504 VmaBlockVector* pBlockVector,
3505 uint32_t currentFrameIndex);
3507 ~VmaDefragmentator();
3509 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
3510 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
3512 void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
3514 VkResult Defragment(
3515 VkDeviceSize maxBytesToMove,
3516 uint32_t maxAllocationsToMove);
3520 struct VmaAllocator_T
3523 bool m_UseKhrDedicatedAllocation;
3525 bool m_AllocationCallbacksSpecified;
3526 VkAllocationCallbacks m_AllocationCallbacks;
3530 uint32_t m_UnmapPersistentlyMappedMemoryCounter;
3533 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
3534 VMA_MUTEX m_HeapSizeLimitMutex;
3536 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
3537 VkPhysicalDeviceMemoryProperties m_MemProps;
3540 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES][VMA_BLOCK_VECTOR_TYPE_COUNT];
3543 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
3544 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES][VMA_BLOCK_VECTOR_TYPE_COUNT];
3545 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
3550 const VkAllocationCallbacks* GetAllocationCallbacks()
const 3552 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
3556 return m_VulkanFunctions;
3559 VkDeviceSize GetBufferImageGranularity()
const 3562 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
3563 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
3566 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
3567 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
3569 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 3571 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
3572 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
3575 void GetBufferMemoryRequirements(
3577 VkMemoryRequirements& memReq,
3578 bool& requiresDedicatedAllocation,
3579 bool& prefersDedicatedAllocation)
const;
3580 void GetImageMemoryRequirements(
3582 VkMemoryRequirements& memReq,
3583 bool& requiresDedicatedAllocation,
3584 bool& prefersDedicatedAllocation)
const;
3587 VkResult AllocateMemory(
3588 const VkMemoryRequirements& vkMemReq,
3589 bool requiresDedicatedAllocation,
3590 bool prefersDedicatedAllocation,
3591 VkBuffer dedicatedBuffer,
3592 VkImage dedicatedImage,
3594 VmaSuballocationType suballocType,
3595 VmaAllocation* pAllocation);
3598 void FreeMemory(
const VmaAllocation allocation);
3600 void CalculateStats(
VmaStats* pStats);
3602 #if VMA_STATS_STRING_ENABLED 3603 void PrintDetailedMap(
class VmaJsonWriter& json);
3606 void UnmapPersistentlyMappedMemory();
3607 VkResult MapPersistentlyMappedMemory();
3609 VkResult Defragment(
3610 VmaAllocation* pAllocations,
3611 size_t allocationCount,
3612 VkBool32* pAllocationsChanged,
3616 void GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo);
3619 void DestroyPool(VmaPool pool);
3620 void GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats);
3622 void SetCurrentFrameIndex(uint32_t frameIndex);
3624 void MakePoolAllocationsLost(
3626 size_t* pLostAllocationCount);
3628 void CreateLostAllocation(VmaAllocation* pAllocation);
3630 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
3631 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
3634 VkDeviceSize m_PreferredLargeHeapBlockSize;
3635 VkDeviceSize m_PreferredSmallHeapBlockSize;
3637 VkPhysicalDevice m_PhysicalDevice;
3638 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
3640 VMA_MUTEX m_PoolsMutex;
3642 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
3648 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
3650 VkResult AllocateMemoryOfType(
3651 const VkMemoryRequirements& vkMemReq,
3652 bool dedicatedAllocation,
3653 VkBuffer dedicatedBuffer,
3654 VkImage dedicatedImage,
3656 uint32_t memTypeIndex,
3657 VmaSuballocationType suballocType,
3658 VmaAllocation* pAllocation);
3661 VkResult AllocateDedicatedMemory(
3663 VmaSuballocationType suballocType,
3664 uint32_t memTypeIndex,
3667 VkBuffer dedicatedBuffer,
3668 VkImage dedicatedImage,
3669 VmaAllocation* pAllocation);
3672 void FreeDedicatedMemory(VmaAllocation allocation);
3678 static void* VmaMalloc(VmaAllocator hAllocator,
size_t size,
size_t alignment)
3680 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
3683 static void VmaFree(VmaAllocator hAllocator,
void* ptr)
3685 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
3688 template<
typename T>
3689 static T* VmaAllocate(VmaAllocator hAllocator)
3691 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
3694 template<
typename T>
3695 static T* VmaAllocateArray(VmaAllocator hAllocator,
size_t count)
3697 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
3700 template<
typename T>
3701 static void vma_delete(VmaAllocator hAllocator, T* ptr)
3706 VmaFree(hAllocator, ptr);
3710 template<
typename T>
3711 static void vma_delete_array(VmaAllocator hAllocator, T* ptr,
size_t count)
3715 for(
size_t i = count; i--; )
3717 VmaFree(hAllocator, ptr);
3724 #if VMA_STATS_STRING_ENABLED 3726 class VmaStringBuilder
3729 VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
3730 size_t GetLength()
const {
return m_Data.size(); }
3731 const char* GetData()
const {
return m_Data.data(); }
3733 void Add(
char ch) { m_Data.push_back(ch); }
3734 void Add(
const char* pStr);
3735 void AddNewLine() { Add(
'\n'); }
3736 void AddNumber(uint32_t num);
3737 void AddNumber(uint64_t num);
3738 void AddPointer(
const void* ptr);
3741 VmaVector< char, VmaStlAllocator<char> > m_Data;
3744 void VmaStringBuilder::Add(
const char* pStr)
3746 const size_t strLen = strlen(pStr);
3749 const size_t oldCount = m_Data.size();
3750 m_Data.resize(oldCount + strLen);
3751 memcpy(m_Data.data() + oldCount, pStr, strLen);
3755 void VmaStringBuilder::AddNumber(uint32_t num)
3758 VmaUint32ToStr(buf,
sizeof(buf), num);
3762 void VmaStringBuilder::AddNumber(uint64_t num)
3765 VmaUint64ToStr(buf,
sizeof(buf), num);
3769 void VmaStringBuilder::AddPointer(
const void* ptr)
3772 VmaPtrToStr(buf,
sizeof(buf), ptr);
3776 #endif // #if VMA_STATS_STRING_ENABLED 3781 #if VMA_STATS_STRING_ENABLED 3786 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
3789 void BeginObject(
bool singleLine =
false);
3792 void BeginArray(
bool singleLine =
false);
3795 void WriteString(
const char* pStr);
3796 void BeginString(
const char* pStr = VMA_NULL);
3797 void ContinueString(
const char* pStr);
3798 void ContinueString(uint32_t n);
3799 void ContinueString(uint64_t n);
3800 void EndString(
const char* pStr = VMA_NULL);
3802 void WriteNumber(uint32_t n);
3803 void WriteNumber(uint64_t n);
3804 void WriteBool(
bool b);
3808 static const char*
const INDENT;
3810 enum COLLECTION_TYPE
3812 COLLECTION_TYPE_OBJECT,
3813 COLLECTION_TYPE_ARRAY,
3817 COLLECTION_TYPE type;
3818 uint32_t valueCount;
3819 bool singleLineMode;
3822 VmaStringBuilder& m_SB;
3823 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
3824 bool m_InsideString;
3826 void BeginValue(
bool isString);
3827 void WriteIndent(
bool oneLess =
false);
3830 const char*
const VmaJsonWriter::INDENT =
" ";
3832 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
3834 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
3835 m_InsideString(false)
3839 VmaJsonWriter::~VmaJsonWriter()
3841 VMA_ASSERT(!m_InsideString);
3842 VMA_ASSERT(m_Stack.empty());
3845 void VmaJsonWriter::BeginObject(
bool singleLine)
3847 VMA_ASSERT(!m_InsideString);
3853 item.type = COLLECTION_TYPE_OBJECT;
3854 item.valueCount = 0;
3855 item.singleLineMode = singleLine;
3856 m_Stack.push_back(item);
3859 void VmaJsonWriter::EndObject()
3861 VMA_ASSERT(!m_InsideString);
3866 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
3870 void VmaJsonWriter::BeginArray(
bool singleLine)
3872 VMA_ASSERT(!m_InsideString);
3878 item.type = COLLECTION_TYPE_ARRAY;
3879 item.valueCount = 0;
3880 item.singleLineMode = singleLine;
3881 m_Stack.push_back(item);
3884 void VmaJsonWriter::EndArray()
3886 VMA_ASSERT(!m_InsideString);
3891 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
3895 void VmaJsonWriter::WriteString(
const char* pStr)
3901 void VmaJsonWriter::BeginString(
const char* pStr)
3903 VMA_ASSERT(!m_InsideString);
3907 m_InsideString =
true;
3908 if(pStr != VMA_NULL && pStr[0] !=
'\0')
3910 ContinueString(pStr);
3914 void VmaJsonWriter::ContinueString(
const char* pStr)
3916 VMA_ASSERT(m_InsideString);
3918 const size_t strLen = strlen(pStr);
3919 for(
size_t i = 0; i < strLen; ++i)
3946 VMA_ASSERT(0 &&
"Character not currently supported.");
3952 void VmaJsonWriter::ContinueString(uint32_t n)
3954 VMA_ASSERT(m_InsideString);
3958 void VmaJsonWriter::ContinueString(uint64_t n)
3960 VMA_ASSERT(m_InsideString);
3964 void VmaJsonWriter::EndString(
const char* pStr)
3966 VMA_ASSERT(m_InsideString);
3967 if(pStr != VMA_NULL && pStr[0] !=
'\0')
3969 ContinueString(pStr);
3972 m_InsideString =
false;
3975 void VmaJsonWriter::WriteNumber(uint32_t n)
3977 VMA_ASSERT(!m_InsideString);
3982 void VmaJsonWriter::WriteNumber(uint64_t n)
3984 VMA_ASSERT(!m_InsideString);
3989 void VmaJsonWriter::WriteBool(
bool b)
3991 VMA_ASSERT(!m_InsideString);
3993 m_SB.Add(b ?
"true" :
"false");
3996 void VmaJsonWriter::WriteNull()
3998 VMA_ASSERT(!m_InsideString);
4003 void VmaJsonWriter::BeginValue(
bool isString)
4005 if(!m_Stack.empty())
4007 StackItem& currItem = m_Stack.back();
4008 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4009 currItem.valueCount % 2 == 0)
4011 VMA_ASSERT(isString);
4014 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4015 currItem.valueCount % 2 != 0)
4019 else if(currItem.valueCount > 0)
4028 ++currItem.valueCount;
4032 void VmaJsonWriter::WriteIndent(
bool oneLess)
4034 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
4038 size_t count = m_Stack.size();
4039 if(count > 0 && oneLess)
4043 for(
size_t i = 0; i < count; ++i)
4050 #endif // #if VMA_STATS_STRING_ENABLED 4054 VkDeviceSize VmaAllocation_T::GetOffset()
const 4058 case ALLOCATION_TYPE_BLOCK:
4059 return m_BlockAllocation.m_Offset;
4060 case ALLOCATION_TYPE_DEDICATED:
4068 VkDeviceMemory VmaAllocation_T::GetMemory()
const 4072 case ALLOCATION_TYPE_BLOCK:
4073 return m_BlockAllocation.m_Block->m_hMemory;
4074 case ALLOCATION_TYPE_DEDICATED:
4075 return m_DedicatedAllocation.m_hMemory;
4078 return VK_NULL_HANDLE;
4082 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 4086 case ALLOCATION_TYPE_BLOCK:
4087 return m_BlockAllocation.m_Block->m_MemoryTypeIndex;
4088 case ALLOCATION_TYPE_DEDICATED:
4089 return m_DedicatedAllocation.m_MemoryTypeIndex;
4096 VMA_BLOCK_VECTOR_TYPE VmaAllocation_T::GetBlockVectorType()
const 4100 case ALLOCATION_TYPE_BLOCK:
4101 return m_BlockAllocation.m_Block->m_BlockVectorType;
4102 case ALLOCATION_TYPE_DEDICATED:
4103 return (m_DedicatedAllocation.m_PersistentMap ? VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED);
4106 return VMA_BLOCK_VECTOR_TYPE_COUNT;
4110 void* VmaAllocation_T::GetMappedData()
const 4114 case ALLOCATION_TYPE_BLOCK:
4115 if(m_BlockAllocation.m_Block->m_pMappedData != VMA_NULL)
4117 return (
char*)m_BlockAllocation.m_Block->m_pMappedData + m_BlockAllocation.m_Offset;
4124 case ALLOCATION_TYPE_DEDICATED:
4125 return m_DedicatedAllocation.m_pMappedData;
4132 bool VmaAllocation_T::CanBecomeLost()
const 4136 case ALLOCATION_TYPE_BLOCK:
4137 return m_BlockAllocation.m_CanBecomeLost;
4138 case ALLOCATION_TYPE_DEDICATED:
4146 VmaPool VmaAllocation_T::GetPool()
const 4148 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4149 return m_BlockAllocation.m_hPool;
4152 VkResult VmaAllocation_T::DedicatedAllocMapPersistentlyMappedMemory(VmaAllocator hAllocator)
4154 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
4155 if(m_DedicatedAllocation.m_PersistentMap)
4157 return (*hAllocator->GetVulkanFunctions().vkMapMemory)(
4158 hAllocator->m_hDevice,
4159 m_DedicatedAllocation.m_hMemory,
4163 &m_DedicatedAllocation.m_pMappedData);
4167 void VmaAllocation_T::DedicatedAllocUnmapPersistentlyMappedMemory(VmaAllocator hAllocator)
4169 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
4170 if(m_DedicatedAllocation.m_pMappedData)
4172 VMA_ASSERT(m_DedicatedAllocation.m_PersistentMap);
4173 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_DedicatedAllocation.m_hMemory);
4174 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
4179 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4181 VMA_ASSERT(CanBecomeLost());
4187 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
4190 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
4195 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
4201 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
4211 #if VMA_STATS_STRING_ENABLED 4214 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
4223 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
4227 json.WriteString(
"Blocks");
4230 json.WriteString(
"Allocations");
4233 json.WriteString(
"UnusedRanges");
4236 json.WriteString(
"UsedBytes");
4239 json.WriteString(
"UnusedBytes");
4244 json.WriteString(
"AllocationSize");
4245 json.BeginObject(
true);
4246 json.WriteString(
"Min");
4248 json.WriteString(
"Avg");
4250 json.WriteString(
"Max");
4257 json.WriteString(
"UnusedRangeSize");
4258 json.BeginObject(
true);
4259 json.WriteString(
"Min");
4261 json.WriteString(
"Avg");
4263 json.WriteString(
"Max");
4271 #endif // #if VMA_STATS_STRING_ENABLED 4273 struct VmaSuballocationItemSizeLess
4276 const VmaSuballocationList::iterator lhs,
4277 const VmaSuballocationList::iterator rhs)
const 4279 return lhs->size < rhs->size;
4282 const VmaSuballocationList::iterator lhs,
4283 VkDeviceSize rhsSize)
const 4285 return lhs->size < rhsSize;
4292 VmaBlockMetadata::VmaBlockMetadata(VmaAllocator hAllocator) :
4296 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
4297 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
4301 VmaBlockMetadata::~VmaBlockMetadata()
4305 void VmaBlockMetadata::Init(VkDeviceSize size)
4309 m_SumFreeSize = size;
4311 VmaSuballocation suballoc = {};
4312 suballoc.offset = 0;
4313 suballoc.size = size;
4314 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4315 suballoc.hAllocation = VK_NULL_HANDLE;
4317 m_Suballocations.push_back(suballoc);
4318 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
4320 m_FreeSuballocationsBySize.push_back(suballocItem);
4323 bool VmaBlockMetadata::Validate()
const 4325 if(m_Suballocations.empty())
4331 VkDeviceSize calculatedOffset = 0;
4333 uint32_t calculatedFreeCount = 0;
4335 VkDeviceSize calculatedSumFreeSize = 0;
4338 size_t freeSuballocationsToRegister = 0;
4340 bool prevFree =
false;
4342 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4343 suballocItem != m_Suballocations.cend();
4346 const VmaSuballocation& subAlloc = *suballocItem;
4349 if(subAlloc.offset != calculatedOffset)
4354 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
4356 if(prevFree && currFree)
4360 prevFree = currFree;
4362 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
4369 calculatedSumFreeSize += subAlloc.size;
4370 ++calculatedFreeCount;
4371 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4373 ++freeSuballocationsToRegister;
4377 calculatedOffset += subAlloc.size;
4382 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
4387 VkDeviceSize lastSize = 0;
4388 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
4390 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
4393 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
4398 if(suballocItem->size < lastSize)
4403 lastSize = suballocItem->size;
4408 ValidateFreeSuballocationList() &&
4409 (calculatedOffset == m_Size) &&
4410 (calculatedSumFreeSize == m_SumFreeSize) &&
4411 (calculatedFreeCount == m_FreeCount);
4414 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax()
const 4416 if(!m_FreeSuballocationsBySize.empty())
4418 return m_FreeSuballocationsBySize.back()->size;
4426 bool VmaBlockMetadata::IsEmpty()
const 4428 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
4431 void VmaBlockMetadata::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 4435 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
4447 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4448 suballocItem != m_Suballocations.cend();
4451 const VmaSuballocation& suballoc = *suballocItem;
4452 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
4465 void VmaBlockMetadata::AddPoolStats(
VmaPoolStats& inoutStats)
const 4467 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
4469 inoutStats.
size += m_Size;
4476 #if VMA_STATS_STRING_ENABLED 4478 void VmaBlockMetadata::PrintDetailedMap(
class VmaJsonWriter& json)
const 4482 json.WriteString(
"TotalBytes");
4483 json.WriteNumber(m_Size);
4485 json.WriteString(
"UnusedBytes");
4486 json.WriteNumber(m_SumFreeSize);
4488 json.WriteString(
"Allocations");
4489 json.WriteNumber(m_Suballocations.size() - m_FreeCount);
4491 json.WriteString(
"UnusedRanges");
4492 json.WriteNumber(m_FreeCount);
4494 json.WriteString(
"Suballocations");
4497 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4498 suballocItem != m_Suballocations.cend();
4499 ++suballocItem, ++i)
4501 json.BeginObject(
true);
4503 json.WriteString(
"Type");
4504 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
4506 json.WriteString(
"Size");
4507 json.WriteNumber(suballocItem->size);
4509 json.WriteString(
"Offset");
4510 json.WriteNumber(suballocItem->offset);
4519 #endif // #if VMA_STATS_STRING_ENABLED 4531 void VmaBlockMetadata::CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest)
4533 VMA_ASSERT(IsEmpty());
4534 pAllocationRequest->offset = 0;
4535 pAllocationRequest->sumFreeSize = m_SumFreeSize;
4536 pAllocationRequest->sumItemSize = 0;
4537 pAllocationRequest->item = m_Suballocations.begin();
4538 pAllocationRequest->itemsToMakeLostCount = 0;
4541 bool VmaBlockMetadata::CreateAllocationRequest(
4542 uint32_t currentFrameIndex,
4543 uint32_t frameInUseCount,
4544 VkDeviceSize bufferImageGranularity,
4545 VkDeviceSize allocSize,
4546 VkDeviceSize allocAlignment,
4547 VmaSuballocationType allocType,
4548 bool canMakeOtherLost,
4549 VmaAllocationRequest* pAllocationRequest)
4551 VMA_ASSERT(allocSize > 0);
4552 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
4553 VMA_ASSERT(pAllocationRequest != VMA_NULL);
4554 VMA_HEAVY_ASSERT(Validate());
4557 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize)
4563 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
4564 if(freeSuballocCount > 0)
4569 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
4570 m_FreeSuballocationsBySize.data(),
4571 m_FreeSuballocationsBySize.data() + freeSuballocCount,
4573 VmaSuballocationItemSizeLess());
4574 size_t index = it - m_FreeSuballocationsBySize.data();
4575 for(; index < freeSuballocCount; ++index)
4580 bufferImageGranularity,
4584 m_FreeSuballocationsBySize[index],
4586 &pAllocationRequest->offset,
4587 &pAllocationRequest->itemsToMakeLostCount,
4588 &pAllocationRequest->sumFreeSize,
4589 &pAllocationRequest->sumItemSize))
4591 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
4599 for(
size_t index = freeSuballocCount; index--; )
4604 bufferImageGranularity,
4608 m_FreeSuballocationsBySize[index],
4610 &pAllocationRequest->offset,
4611 &pAllocationRequest->itemsToMakeLostCount,
4612 &pAllocationRequest->sumFreeSize,
4613 &pAllocationRequest->sumItemSize))
4615 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
4622 if(canMakeOtherLost)
4626 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
4627 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
4629 VmaAllocationRequest tmpAllocRequest = {};
4630 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
4631 suballocIt != m_Suballocations.end();
4634 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
4635 suballocIt->hAllocation->CanBecomeLost())
4640 bufferImageGranularity,
4646 &tmpAllocRequest.offset,
4647 &tmpAllocRequest.itemsToMakeLostCount,
4648 &tmpAllocRequest.sumFreeSize,
4649 &tmpAllocRequest.sumItemSize))
4651 tmpAllocRequest.item = suballocIt;
4653 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
4655 *pAllocationRequest = tmpAllocRequest;
4661 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
4670 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
4671 uint32_t currentFrameIndex,
4672 uint32_t frameInUseCount,
4673 VmaAllocationRequest* pAllocationRequest)
4675 while(pAllocationRequest->itemsToMakeLostCount > 0)
4677 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
4679 ++pAllocationRequest->item;
4681 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
4682 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
4683 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
4684 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
4686 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
4687 --pAllocationRequest->itemsToMakeLostCount;
4695 VMA_HEAVY_ASSERT(Validate());
4696 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
4697 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
4702 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4704 uint32_t lostAllocationCount = 0;
4705 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
4706 it != m_Suballocations.end();
4709 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
4710 it->hAllocation->CanBecomeLost() &&
4711 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
4713 it = FreeSuballocation(it);
4714 ++lostAllocationCount;
4717 return lostAllocationCount;
4720 void VmaBlockMetadata::Alloc(
4721 const VmaAllocationRequest& request,
4722 VmaSuballocationType type,
4723 VkDeviceSize allocSize,
4724 VmaAllocation hAllocation)
4726 VMA_ASSERT(request.item != m_Suballocations.end());
4727 VmaSuballocation& suballoc = *request.item;
4729 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
4731 VMA_ASSERT(request.offset >= suballoc.offset);
4732 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
4733 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
4734 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
4738 UnregisterFreeSuballocation(request.item);
4740 suballoc.offset = request.offset;
4741 suballoc.size = allocSize;
4742 suballoc.type = type;
4743 suballoc.hAllocation = hAllocation;
4748 VmaSuballocation paddingSuballoc = {};
4749 paddingSuballoc.offset = request.offset + allocSize;
4750 paddingSuballoc.size = paddingEnd;
4751 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4752 VmaSuballocationList::iterator next = request.item;
4754 const VmaSuballocationList::iterator paddingEndItem =
4755 m_Suballocations.insert(next, paddingSuballoc);
4756 RegisterFreeSuballocation(paddingEndItem);
4762 VmaSuballocation paddingSuballoc = {};
4763 paddingSuballoc.offset = request.offset - paddingBegin;
4764 paddingSuballoc.size = paddingBegin;
4765 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4766 const VmaSuballocationList::iterator paddingBeginItem =
4767 m_Suballocations.insert(request.item, paddingSuballoc);
4768 RegisterFreeSuballocation(paddingBeginItem);
4772 m_FreeCount = m_FreeCount - 1;
4773 if(paddingBegin > 0)
4781 m_SumFreeSize -= allocSize;
4784 void VmaBlockMetadata::Free(
const VmaAllocation allocation)
4786 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
4787 suballocItem != m_Suballocations.end();
4790 VmaSuballocation& suballoc = *suballocItem;
4791 if(suballoc.hAllocation == allocation)
4793 FreeSuballocation(suballocItem);
4794 VMA_HEAVY_ASSERT(Validate());
4798 VMA_ASSERT(0 &&
"Not found!");
4801 bool VmaBlockMetadata::ValidateFreeSuballocationList()
const 4803 VkDeviceSize lastSize = 0;
4804 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
4806 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
4808 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
4813 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4818 if(it->size < lastSize)
4824 lastSize = it->size;
4829 bool VmaBlockMetadata::CheckAllocation(
4830 uint32_t currentFrameIndex,
4831 uint32_t frameInUseCount,
4832 VkDeviceSize bufferImageGranularity,
4833 VkDeviceSize allocSize,
4834 VkDeviceSize allocAlignment,
4835 VmaSuballocationType allocType,
4836 VmaSuballocationList::const_iterator suballocItem,
4837 bool canMakeOtherLost,
4838 VkDeviceSize* pOffset,
4839 size_t* itemsToMakeLostCount,
4840 VkDeviceSize* pSumFreeSize,
4841 VkDeviceSize* pSumItemSize)
const 4843 VMA_ASSERT(allocSize > 0);
4844 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
4845 VMA_ASSERT(suballocItem != m_Suballocations.cend());
4846 VMA_ASSERT(pOffset != VMA_NULL);
4848 *itemsToMakeLostCount = 0;
4852 if(canMakeOtherLost)
4854 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
4856 *pSumFreeSize = suballocItem->size;
4860 if(suballocItem->hAllocation->CanBecomeLost() &&
4861 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
4863 ++*itemsToMakeLostCount;
4864 *pSumItemSize = suballocItem->size;
4873 if(m_Size - suballocItem->offset < allocSize)
4879 *pOffset = suballocItem->offset;
4882 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
4884 *pOffset += VMA_DEBUG_MARGIN;
4888 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
4889 *pOffset = VmaAlignUp(*pOffset, alignment);
4893 if(bufferImageGranularity > 1)
4895 bool bufferImageGranularityConflict =
false;
4896 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
4897 while(prevSuballocItem != m_Suballocations.cbegin())
4900 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
4901 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
4903 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
4905 bufferImageGranularityConflict =
true;
4913 if(bufferImageGranularityConflict)
4915 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
4921 if(*pOffset >= suballocItem->offset + suballocItem->size)
4927 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
4930 VmaSuballocationList::const_iterator next = suballocItem;
4932 const VkDeviceSize requiredEndMargin =
4933 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
4935 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
4937 if(suballocItem->offset + totalSize > m_Size)
4944 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
4945 if(totalSize > suballocItem->size)
4947 VkDeviceSize remainingSize = totalSize - suballocItem->size;
4948 while(remainingSize > 0)
4951 if(lastSuballocItem == m_Suballocations.cend())
4955 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
4957 *pSumFreeSize += lastSuballocItem->size;
4961 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
4962 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
4963 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
4965 ++*itemsToMakeLostCount;
4966 *pSumItemSize += lastSuballocItem->size;
4973 remainingSize = (lastSuballocItem->size < remainingSize) ?
4974 remainingSize - lastSuballocItem->size : 0;
4980 if(bufferImageGranularity > 1)
4982 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
4984 while(nextSuballocItem != m_Suballocations.cend())
4986 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
4987 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
4989 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
4991 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
4992 if(nextSuballoc.hAllocation->CanBecomeLost() &&
4993 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
4995 ++*itemsToMakeLostCount;
5014 const VmaSuballocation& suballoc = *suballocItem;
5015 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5017 *pSumFreeSize = suballoc.size;
5020 if(suballoc.size < allocSize)
5026 *pOffset = suballoc.offset;
5029 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5031 *pOffset += VMA_DEBUG_MARGIN;
5035 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5036 *pOffset = VmaAlignUp(*pOffset, alignment);
5040 if(bufferImageGranularity > 1)
5042 bool bufferImageGranularityConflict =
false;
5043 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5044 while(prevSuballocItem != m_Suballocations.cbegin())
5047 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5048 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5050 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5052 bufferImageGranularityConflict =
true;
5060 if(bufferImageGranularityConflict)
5062 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5067 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
5070 VmaSuballocationList::const_iterator next = suballocItem;
5072 const VkDeviceSize requiredEndMargin =
5073 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5076 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
5083 if(bufferImageGranularity > 1)
5085 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
5087 while(nextSuballocItem != m_Suballocations.cend())
5089 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5090 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5092 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5111 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
5113 VMA_ASSERT(item != m_Suballocations.end());
5114 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5116 VmaSuballocationList::iterator nextItem = item;
5118 VMA_ASSERT(nextItem != m_Suballocations.end());
5119 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
5121 item->size += nextItem->size;
5123 m_Suballocations.erase(nextItem);
5126 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
5129 VmaSuballocation& suballoc = *suballocItem;
5130 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5131 suballoc.hAllocation = VK_NULL_HANDLE;
5135 m_SumFreeSize += suballoc.size;
5138 bool mergeWithNext =
false;
5139 bool mergeWithPrev =
false;
5141 VmaSuballocationList::iterator nextItem = suballocItem;
5143 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
5145 mergeWithNext =
true;
5148 VmaSuballocationList::iterator prevItem = suballocItem;
5149 if(suballocItem != m_Suballocations.begin())
5152 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5154 mergeWithPrev =
true;
5160 UnregisterFreeSuballocation(nextItem);
5161 MergeFreeWithNext(suballocItem);
5166 UnregisterFreeSuballocation(prevItem);
5167 MergeFreeWithNext(prevItem);
5168 RegisterFreeSuballocation(prevItem);
5173 RegisterFreeSuballocation(suballocItem);
5174 return suballocItem;
5178 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
5180 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5181 VMA_ASSERT(item->size > 0);
5185 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5187 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5189 if(m_FreeSuballocationsBySize.empty())
5191 m_FreeSuballocationsBySize.push_back(item);
5195 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
5203 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
5205 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5206 VMA_ASSERT(item->size > 0);
5210 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5212 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5214 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5215 m_FreeSuballocationsBySize.data(),
5216 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
5218 VmaSuballocationItemSizeLess());
5219 for(
size_t index = it - m_FreeSuballocationsBySize.data();
5220 index < m_FreeSuballocationsBySize.size();
5223 if(m_FreeSuballocationsBySize[index] == item)
5225 VmaVectorRemove(m_FreeSuballocationsBySize, index);
5228 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
5230 VMA_ASSERT(0 &&
"Not found.");
5239 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
5240 m_MemoryTypeIndex(UINT32_MAX),
5241 m_BlockVectorType(VMA_BLOCK_VECTOR_TYPE_COUNT),
5242 m_hMemory(VK_NULL_HANDLE),
5243 m_PersistentMap(false),
5244 m_pMappedData(VMA_NULL),
5245 m_Metadata(hAllocator)
5249 void VmaDeviceMemoryBlock::Init(
5250 uint32_t newMemoryTypeIndex,
5251 VMA_BLOCK_VECTOR_TYPE newBlockVectorType,
5252 VkDeviceMemory newMemory,
5253 VkDeviceSize newSize,
5257 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5259 m_MemoryTypeIndex = newMemoryTypeIndex;
5260 m_BlockVectorType = newBlockVectorType;
5261 m_hMemory = newMemory;
5262 m_PersistentMap = persistentMap;
5263 m_pMappedData = pMappedData;
5265 m_Metadata.Init(newSize);
5268 void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
5272 VMA_ASSERT(m_Metadata.IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
5274 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
5275 if(m_pMappedData != VMA_NULL)
5277 (allocator->GetVulkanFunctions().vkUnmapMemory)(allocator->m_hDevice, m_hMemory);
5278 m_pMappedData = VMA_NULL;
5281 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Metadata.GetSize(), m_hMemory);
5282 m_hMemory = VK_NULL_HANDLE;
5285 bool VmaDeviceMemoryBlock::Validate()
const 5287 if((m_hMemory == VK_NULL_HANDLE) ||
5288 (m_Metadata.GetSize() == 0))
5293 return m_Metadata.Validate();
5298 memset(&outInfo, 0,
sizeof(outInfo));
5317 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
5325 VmaPool_T::VmaPool_T(
5326 VmaAllocator hAllocator,
5330 createInfo.memoryTypeIndex,
5332 VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED,
5333 createInfo.blockSize,
5334 createInfo.minBlockCount,
5335 createInfo.maxBlockCount,
5337 createInfo.frameInUseCount,
5342 VmaPool_T::~VmaPool_T()
5346 #if VMA_STATS_STRING_ENABLED 5348 #endif // #if VMA_STATS_STRING_ENABLED 5350 VmaBlockVector::VmaBlockVector(
5351 VmaAllocator hAllocator,
5352 uint32_t memoryTypeIndex,
5353 VMA_BLOCK_VECTOR_TYPE blockVectorType,
5354 VkDeviceSize preferredBlockSize,
5355 size_t minBlockCount,
5356 size_t maxBlockCount,
5357 VkDeviceSize bufferImageGranularity,
5358 uint32_t frameInUseCount,
5359 bool isCustomPool) :
5360 m_hAllocator(hAllocator),
5361 m_MemoryTypeIndex(memoryTypeIndex),
5362 m_BlockVectorType(blockVectorType),
5363 m_PreferredBlockSize(preferredBlockSize),
5364 m_MinBlockCount(minBlockCount),
5365 m_MaxBlockCount(maxBlockCount),
5366 m_BufferImageGranularity(bufferImageGranularity),
5367 m_FrameInUseCount(frameInUseCount),
5368 m_IsCustomPool(isCustomPool),
5369 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
5370 m_HasEmptyBlock(false),
5371 m_pDefragmentator(VMA_NULL)
5375 VmaBlockVector::~VmaBlockVector()
5377 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
5379 for(
size_t i = m_Blocks.size(); i--; )
5381 m_Blocks[i]->Destroy(m_hAllocator);
5382 vma_delete(m_hAllocator, m_Blocks[i]);
5386 VkResult VmaBlockVector::CreateMinBlocks()
5388 for(
size_t i = 0; i < m_MinBlockCount; ++i)
5390 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
5391 if(res != VK_SUCCESS)
5399 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
5407 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5409 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5411 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5413 VMA_HEAVY_ASSERT(pBlock->Validate());
5414 pBlock->m_Metadata.AddPoolStats(*pStats);
5418 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
5420 VkResult VmaBlockVector::Allocate(
5421 VmaPool hCurrentPool,
5422 uint32_t currentFrameIndex,
5423 const VkMemoryRequirements& vkMemReq,
5425 VmaSuballocationType suballocType,
5426 VmaAllocation* pAllocation)
5429 if(createInfo.
pool != VK_NULL_HANDLE &&
5432 VMA_ASSERT(0 &&
"Usage of VMA_ALLOCATION_CREATE_PERSISTENT_MAP_BIT must match VMA_POOL_CREATE_PERSISTENT_MAP_BIT.");
5433 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
5436 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5440 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
5442 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
5443 VMA_ASSERT(pCurrBlock);
5444 VmaAllocationRequest currRequest = {};
5445 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
5448 m_BufferImageGranularity,
5456 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
5459 if(pCurrBlock->m_Metadata.IsEmpty())
5461 m_HasEmptyBlock =
false;
5464 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5465 pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
5466 (*pAllocation)->InitBlockAllocation(
5475 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
5476 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
5481 const bool canCreateNewBlock =
5483 (m_Blocks.size() < m_MaxBlockCount);
5486 if(canCreateNewBlock)
5489 VkDeviceSize blockSize = m_PreferredBlockSize;
5490 size_t newBlockIndex = 0;
5491 VkResult res = CreateBlock(blockSize, &newBlockIndex);
5494 if(res < 0 && m_IsCustomPool ==
false)
5498 if(blockSize >= vkMemReq.size)
5500 res = CreateBlock(blockSize, &newBlockIndex);
5505 if(blockSize >= vkMemReq.size)
5507 res = CreateBlock(blockSize, &newBlockIndex);
5512 if(res == VK_SUCCESS)
5514 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
5515 VMA_ASSERT(pBlock->m_Metadata.GetSize() >= vkMemReq.size);
5518 VmaAllocationRequest allocRequest;
5519 pBlock->m_Metadata.CreateFirstAllocationRequest(&allocRequest);
5520 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5521 pBlock->m_Metadata.Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
5522 (*pAllocation)->InitBlockAllocation(
5525 allocRequest.offset,
5531 VMA_HEAVY_ASSERT(pBlock->Validate());
5532 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
5541 if(canMakeOtherLost)
5543 uint32_t tryIndex = 0;
5544 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
5546 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
5547 VmaAllocationRequest bestRequest = {};
5548 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
5552 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
5554 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
5555 VMA_ASSERT(pCurrBlock);
5556 VmaAllocationRequest currRequest = {};
5557 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
5560 m_BufferImageGranularity,
5567 const VkDeviceSize currRequestCost = currRequest.CalcCost();
5568 if(pBestRequestBlock == VMA_NULL ||
5569 currRequestCost < bestRequestCost)
5571 pBestRequestBlock = pCurrBlock;
5572 bestRequest = currRequest;
5573 bestRequestCost = currRequestCost;
5575 if(bestRequestCost == 0)
5583 if(pBestRequestBlock != VMA_NULL)
5585 if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
5591 if(pBestRequestBlock->m_Metadata.IsEmpty())
5593 m_HasEmptyBlock =
false;
5596 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5597 pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
5598 (*pAllocation)->InitBlockAllocation(
5607 VMA_HEAVY_ASSERT(pBlock->Validate());
5608 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
5622 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
5624 return VK_ERROR_TOO_MANY_OBJECTS;
5628 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
5631 void VmaBlockVector::Free(
5632 VmaAllocation hAllocation)
5634 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
5638 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5640 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
5642 pBlock->m_Metadata.Free(hAllocation);
5643 VMA_HEAVY_ASSERT(pBlock->Validate());
5645 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
5648 if(pBlock->m_Metadata.IsEmpty())
5651 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
5653 pBlockToDelete = pBlock;
5659 m_HasEmptyBlock =
true;
5664 else if(m_HasEmptyBlock)
5666 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
5667 if(pLastBlock->m_Metadata.IsEmpty() && m_Blocks.size() > m_MinBlockCount)
5669 pBlockToDelete = pLastBlock;
5670 m_Blocks.pop_back();
5671 m_HasEmptyBlock =
false;
5675 IncrementallySortBlocks();
5680 if(pBlockToDelete != VMA_NULL)
5682 VMA_DEBUG_LOG(
" Deleted empty allocation");
5683 pBlockToDelete->Destroy(m_hAllocator);
5684 vma_delete(m_hAllocator, pBlockToDelete);
5688 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
5690 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5692 if(m_Blocks[blockIndex] == pBlock)
5694 VmaVectorRemove(m_Blocks, blockIndex);
5701 void VmaBlockVector::IncrementallySortBlocks()
5704 for(
size_t i = 1; i < m_Blocks.size(); ++i)
5706 if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
5708 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
5714 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
5716 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
5717 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
5718 allocInfo.allocationSize = blockSize;
5719 VkDeviceMemory mem = VK_NULL_HANDLE;
5720 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
5729 void* pMappedData = VMA_NULL;
5730 const bool persistentMap = (m_BlockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED);
5731 if(persistentMap && m_hAllocator->m_UnmapPersistentlyMappedMemoryCounter == 0)
5733 res = (*m_hAllocator->GetVulkanFunctions().vkMapMemory)(
5734 m_hAllocator->m_hDevice,
5742 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
5743 m_hAllocator->FreeVulkanMemory(m_MemoryTypeIndex, blockSize, mem);
5749 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
5752 (VMA_BLOCK_VECTOR_TYPE)m_BlockVectorType,
5754 allocInfo.allocationSize,
5758 m_Blocks.push_back(pBlock);
5759 if(pNewBlockIndex != VMA_NULL)
5761 *pNewBlockIndex = m_Blocks.size() - 1;
5767 #if VMA_STATS_STRING_ENABLED 5769 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
5771 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5777 json.WriteString(
"MemoryTypeIndex");
5778 json.WriteNumber(m_MemoryTypeIndex);
5780 if(m_BlockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
5782 json.WriteString(
"Mapped");
5783 json.WriteBool(
true);
5786 json.WriteString(
"BlockSize");
5787 json.WriteNumber(m_PreferredBlockSize);
5789 json.WriteString(
"BlockCount");
5790 json.BeginObject(
true);
5791 if(m_MinBlockCount > 0)
5793 json.WriteString(
"Min");
5794 json.WriteNumber(m_MinBlockCount);
5796 if(m_MaxBlockCount < SIZE_MAX)
5798 json.WriteString(
"Max");
5799 json.WriteNumber(m_MaxBlockCount);
5801 json.WriteString(
"Cur");
5802 json.WriteNumber(m_Blocks.size());
5805 if(m_FrameInUseCount > 0)
5807 json.WriteString(
"FrameInUseCount");
5808 json.WriteNumber(m_FrameInUseCount);
5813 json.WriteString(
"PreferredBlockSize");
5814 json.WriteNumber(m_PreferredBlockSize);
5817 json.WriteString(
"Blocks");
5819 for(
size_t i = 0; i < m_Blocks.size(); ++i)
5821 m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
5828 #endif // #if VMA_STATS_STRING_ENABLED 5830 void VmaBlockVector::UnmapPersistentlyMappedMemory()
5832 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5834 for(
size_t i = m_Blocks.size(); i--; )
5836 VmaDeviceMemoryBlock* pBlock = m_Blocks[i];
5837 if(pBlock->m_pMappedData != VMA_NULL)
5839 VMA_ASSERT(pBlock->m_PersistentMap !=
false);
5840 (m_hAllocator->GetVulkanFunctions().vkUnmapMemory)(m_hAllocator->m_hDevice, pBlock->m_hMemory);
5841 pBlock->m_pMappedData = VMA_NULL;
5846 VkResult VmaBlockVector::MapPersistentlyMappedMemory()
5848 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5850 VkResult finalResult = VK_SUCCESS;
5851 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
5853 VmaDeviceMemoryBlock* pBlock = m_Blocks[i];
5854 if(pBlock->m_PersistentMap)
5856 VMA_ASSERT(pBlock->m_pMappedData ==
nullptr);
5857 VkResult localResult = (*m_hAllocator->GetVulkanFunctions().vkMapMemory)(
5858 m_hAllocator->m_hDevice,
5863 &pBlock->m_pMappedData);
5864 if(localResult != VK_SUCCESS)
5866 finalResult = localResult;
5873 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
5874 VmaAllocator hAllocator,
5875 uint32_t currentFrameIndex)
5877 if(m_pDefragmentator == VMA_NULL)
5879 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
5885 return m_pDefragmentator;
5888 VkResult VmaBlockVector::Defragment(
5890 VkDeviceSize& maxBytesToMove,
5891 uint32_t& maxAllocationsToMove)
5893 if(m_pDefragmentator == VMA_NULL)
5898 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5901 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
5904 if(pDefragmentationStats != VMA_NULL)
5906 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
5907 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
5910 VMA_ASSERT(bytesMoved <= maxBytesToMove);
5911 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
5917 m_HasEmptyBlock =
false;
5918 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
5920 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
5921 if(pBlock->m_Metadata.IsEmpty())
5923 if(m_Blocks.size() > m_MinBlockCount)
5925 if(pDefragmentationStats != VMA_NULL)
5928 pDefragmentationStats->
bytesFreed += pBlock->m_Metadata.GetSize();
5931 VmaVectorRemove(m_Blocks, blockIndex);
5932 pBlock->Destroy(m_hAllocator);
5933 vma_delete(m_hAllocator, pBlock);
5937 m_HasEmptyBlock =
true;
5945 void VmaBlockVector::DestroyDefragmentator()
5947 if(m_pDefragmentator != VMA_NULL)
5949 vma_delete(m_hAllocator, m_pDefragmentator);
5950 m_pDefragmentator = VMA_NULL;
5954 void VmaBlockVector::MakePoolAllocationsLost(
5955 uint32_t currentFrameIndex,
5956 size_t* pLostAllocationCount)
5958 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5960 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5962 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5964 pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
5968 void VmaBlockVector::AddStats(
VmaStats* pStats)
5970 const uint32_t memTypeIndex = m_MemoryTypeIndex;
5971 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
5973 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5975 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5977 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5979 VMA_HEAVY_ASSERT(pBlock->Validate());
5981 pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
5982 VmaAddStatInfo(pStats->
total, allocationStatInfo);
5983 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
5984 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
5991 VmaDefragmentator::VmaDefragmentator(
5992 VmaAllocator hAllocator,
5993 VmaBlockVector* pBlockVector,
5994 uint32_t currentFrameIndex) :
5995 m_hAllocator(hAllocator),
5996 m_pBlockVector(pBlockVector),
5997 m_CurrentFrameIndex(currentFrameIndex),
5999 m_AllocationsMoved(0),
6000 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
6001 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
6005 VmaDefragmentator::~VmaDefragmentator()
6007 for(
size_t i = m_Blocks.size(); i--; )
6009 vma_delete(m_hAllocator, m_Blocks[i]);
6013 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
6015 AllocationInfo allocInfo;
6016 allocInfo.m_hAllocation = hAlloc;
6017 allocInfo.m_pChanged = pChanged;
6018 m_Allocations.push_back(allocInfo);
6021 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData)
6024 if(m_pMappedDataForDefragmentation)
6026 *ppMappedData = m_pMappedDataForDefragmentation;
6031 if(m_pBlock->m_PersistentMap)
6033 VMA_ASSERT(m_pBlock->m_pMappedData != VMA_NULL);
6034 *ppMappedData = m_pBlock->m_pMappedData;
6039 VkResult res = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
6040 hAllocator->m_hDevice,
6041 m_pBlock->m_hMemory,
6045 &m_pMappedDataForDefragmentation);
6046 *ppMappedData = m_pMappedDataForDefragmentation;
6050 void VmaDefragmentator::BlockInfo::Unmap(VmaAllocator hAllocator)
6052 if(m_pMappedDataForDefragmentation != VMA_NULL)
6054 (hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_pBlock->m_hMemory);
6058 VkResult VmaDefragmentator::DefragmentRound(
6059 VkDeviceSize maxBytesToMove,
6060 uint32_t maxAllocationsToMove)
6062 if(m_Blocks.empty())
6067 size_t srcBlockIndex = m_Blocks.size() - 1;
6068 size_t srcAllocIndex = SIZE_MAX;
6074 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
6076 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
6079 if(srcBlockIndex == 0)
6086 srcAllocIndex = SIZE_MAX;
6091 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
6095 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
6096 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
6098 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
6099 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
6100 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
6101 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
6104 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
6106 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
6107 VmaAllocationRequest dstAllocRequest;
6108 if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
6109 m_CurrentFrameIndex,
6110 m_pBlockVector->GetFrameInUseCount(),
6111 m_pBlockVector->GetBufferImageGranularity(),
6116 &dstAllocRequest) &&
6118 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
6120 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
6123 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
6124 (m_BytesMoved + size > maxBytesToMove))
6126 return VK_INCOMPLETE;
6129 void* pDstMappedData = VMA_NULL;
6130 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
6131 if(res != VK_SUCCESS)
6136 void* pSrcMappedData = VMA_NULL;
6137 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
6138 if(res != VK_SUCCESS)
6145 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
6146 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
6147 static_cast<size_t>(size));
6149 pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
6150 pSrcBlockInfo->m_pBlock->m_Metadata.Free(allocInfo.m_hAllocation);
6152 allocInfo.m_hAllocation->ChangeBlockAllocation(pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
6154 if(allocInfo.m_pChanged != VMA_NULL)
6156 *allocInfo.m_pChanged = VK_TRUE;
6159 ++m_AllocationsMoved;
6160 m_BytesMoved += size;
6162 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
6170 if(srcAllocIndex > 0)
6176 if(srcBlockIndex > 0)
6179 srcAllocIndex = SIZE_MAX;
6189 VkResult VmaDefragmentator::Defragment(
6190 VkDeviceSize maxBytesToMove,
6191 uint32_t maxAllocationsToMove)
6193 if(m_Allocations.empty())
6199 const size_t blockCount = m_pBlockVector->m_Blocks.size();
6200 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6202 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
6203 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
6204 m_Blocks.push_back(pBlockInfo);
6208 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
6211 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
6213 AllocationInfo& allocInfo = m_Allocations[blockIndex];
6215 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
6217 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
6218 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
6219 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
6221 (*it)->m_Allocations.push_back(allocInfo);
6229 m_Allocations.clear();
6231 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6233 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
6234 pBlockInfo->CalcHasNonMovableAllocations();
6235 pBlockInfo->SortAllocationsBySizeDescecnding();
6239 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
6242 VkResult result = VK_SUCCESS;
6243 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
6245 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
6249 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6251 m_Blocks[blockIndex]->Unmap(m_hAllocator);
6257 bool VmaDefragmentator::MoveMakesSense(
6258 size_t dstBlockIndex, VkDeviceSize dstOffset,
6259 size_t srcBlockIndex, VkDeviceSize srcOffset)
6261 if(dstBlockIndex < srcBlockIndex)
6265 if(dstBlockIndex > srcBlockIndex)
6269 if(dstOffset < srcOffset)
6282 m_PhysicalDevice(pCreateInfo->physicalDevice),
6283 m_hDevice(pCreateInfo->device),
6284 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
6285 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
6286 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
6287 m_UnmapPersistentlyMappedMemoryCounter(0),
6288 m_PreferredLargeHeapBlockSize(0),
6289 m_PreferredSmallHeapBlockSize(0),
6290 m_CurrentFrameIndex(0),
6291 m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks()))
6295 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
6296 memset(&m_MemProps, 0,
sizeof(m_MemProps));
6297 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
6299 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
6300 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
6302 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
6304 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
6315 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
6316 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
6325 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
6327 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
6328 if(limit != VK_WHOLE_SIZE)
6330 m_HeapSizeLimit[heapIndex] = limit;
6331 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
6333 m_MemProps.memoryHeaps[heapIndex].size = limit;
6339 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6341 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
6343 for(
size_t blockVectorTypeIndex = 0; blockVectorTypeIndex < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorTypeIndex)
6345 m_pBlockVectors[memTypeIndex][blockVectorTypeIndex] = vma_new(
this, VmaBlockVector)(
6348 static_cast<VMA_BLOCK_VECTOR_TYPE
>(blockVectorTypeIndex),
6352 GetBufferImageGranularity(),
6357 m_pDedicatedAllocations[memTypeIndex][blockVectorTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
6362 VmaAllocator_T::~VmaAllocator_T()
6364 VMA_ASSERT(m_Pools.empty());
6366 for(
size_t i = GetMemoryTypeCount(); i--; )
6368 for(
size_t j = VMA_BLOCK_VECTOR_TYPE_COUNT; j--; )
6370 vma_delete(
this, m_pDedicatedAllocations[i][j]);
6371 vma_delete(
this, m_pBlockVectors[i][j]);
6376 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
6378 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 6379 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
6380 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
6381 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
6382 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
6383 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
6384 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
6385 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
6386 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
6387 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
6388 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
6389 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
6390 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
6391 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
6392 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
6395 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 6397 #define VMA_COPY_IF_NOT_NULL(funcName) \ 6398 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 6400 if(pVulkanFunctions != VMA_NULL)
6402 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
6403 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
6404 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
6405 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
6406 VMA_COPY_IF_NOT_NULL(vkMapMemory);
6407 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
6408 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
6409 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
6410 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
6411 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
6412 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
6413 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
6414 VMA_COPY_IF_NOT_NULL(vkCreateImage);
6415 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
6416 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
6417 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
6420 #undef VMA_COPY_IF_NOT_NULL 6424 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
6425 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
6426 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
6427 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
6428 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
6429 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
6430 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
6431 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
6432 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
6433 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
6434 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
6435 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
6436 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
6437 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
6438 if(m_UseKhrDedicatedAllocation)
6440 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
6441 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
6445 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
6447 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6448 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
6449 return (heapSize <= VMA_SMALL_HEAP_MAX_SIZE) ?
6450 m_PreferredSmallHeapBlockSize : m_PreferredLargeHeapBlockSize;
6453 VkResult VmaAllocator_T::AllocateMemoryOfType(
6454 const VkMemoryRequirements& vkMemReq,
6455 bool dedicatedAllocation,
6456 VkBuffer dedicatedBuffer,
6457 VkImage dedicatedImage,
6459 uint32_t memTypeIndex,
6460 VmaSuballocationType suballocType,
6461 VmaAllocation* pAllocation)
6463 VMA_ASSERT(pAllocation != VMA_NULL);
6464 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
6466 uint32_t blockVectorType = VmaAllocationCreateFlagsToBlockVectorType(createInfo.
flags);
6467 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
6468 VMA_ASSERT(blockVector);
6472 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
6473 bool preferDedicatedMemory =
6474 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
6475 dedicatedAllocation ||
6477 vkMemReq.size > preferredBlockSize / 2;
6479 if(preferDedicatedMemory &&
6481 finalCreateInfo.
pool == VK_NULL_HANDLE)
6488 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
6497 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6501 return AllocateDedicatedMemory(
6514 VkResult res = blockVector->Allocate(
6516 m_CurrentFrameIndex.load(),
6521 if(res == VK_SUCCESS)
6529 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6533 res = AllocateDedicatedMemory(
6538 finalCreateInfo.pUserData,
6542 if(res == VK_SUCCESS)
6545 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
6551 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
6558 VkResult VmaAllocator_T::AllocateDedicatedMemory(
6560 VmaSuballocationType suballocType,
6561 uint32_t memTypeIndex,
6564 VkBuffer dedicatedBuffer,
6565 VkImage dedicatedImage,
6566 VmaAllocation* pAllocation)
6568 VMA_ASSERT(pAllocation);
6570 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6571 allocInfo.memoryTypeIndex = memTypeIndex;
6572 allocInfo.allocationSize = size;
6574 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
6575 if(m_UseKhrDedicatedAllocation)
6577 if(dedicatedBuffer != VK_NULL_HANDLE)
6579 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
6580 dedicatedAllocInfo.buffer = dedicatedBuffer;
6581 allocInfo.pNext = &dedicatedAllocInfo;
6583 else if(dedicatedImage != VK_NULL_HANDLE)
6585 dedicatedAllocInfo.image = dedicatedImage;
6586 allocInfo.pNext = &dedicatedAllocInfo;
6591 VkDeviceMemory hMemory = VK_NULL_HANDLE;
6592 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
6595 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
6599 void* pMappedData =
nullptr;
6602 if(m_UnmapPersistentlyMappedMemoryCounter == 0)
6604 res = (*m_VulkanFunctions.vkMapMemory)(
6613 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
6614 FreeVulkanMemory(memTypeIndex, size, hMemory);
6620 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load());
6621 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, map, pMappedData, size, pUserData);
6625 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
6626 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex][map ? VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED];
6627 VMA_ASSERT(pDedicatedAllocations);
6628 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
6631 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
6636 void VmaAllocator_T::GetBufferMemoryRequirements(
6638 VkMemoryRequirements& memReq,
6639 bool& requiresDedicatedAllocation,
6640 bool& prefersDedicatedAllocation)
const 6642 if(m_UseKhrDedicatedAllocation)
6644 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
6645 memReqInfo.buffer = hBuffer;
6647 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
6649 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
6650 memReq2.pNext = &memDedicatedReq;
6652 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
6654 memReq = memReq2.memoryRequirements;
6655 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
6656 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
6660 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
6661 requiresDedicatedAllocation =
false;
6662 prefersDedicatedAllocation =
false;
6666 void VmaAllocator_T::GetImageMemoryRequirements(
6668 VkMemoryRequirements& memReq,
6669 bool& requiresDedicatedAllocation,
6670 bool& prefersDedicatedAllocation)
const 6672 if(m_UseKhrDedicatedAllocation)
6674 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
6675 memReqInfo.image = hImage;
6677 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
6679 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
6680 memReq2.pNext = &memDedicatedReq;
6682 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
6684 memReq = memReq2.memoryRequirements;
6685 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
6686 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
6690 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
6691 requiresDedicatedAllocation =
false;
6692 prefersDedicatedAllocation =
false;
6696 VkResult VmaAllocator_T::AllocateMemory(
6697 const VkMemoryRequirements& vkMemReq,
6698 bool requiresDedicatedAllocation,
6699 bool prefersDedicatedAllocation,
6700 VkBuffer dedicatedBuffer,
6701 VkImage dedicatedImage,
6703 VmaSuballocationType suballocType,
6704 VmaAllocation* pAllocation)
6709 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
6710 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6712 if(requiresDedicatedAllocation)
6716 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
6717 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6719 if(createInfo.
pool != VK_NULL_HANDLE)
6721 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
6722 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6725 if((createInfo.
pool != VK_NULL_HANDLE) &&
6728 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
6729 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6732 if(createInfo.
pool != VK_NULL_HANDLE)
6734 return createInfo.
pool->m_BlockVector.Allocate(
6736 m_CurrentFrameIndex.load(),
6745 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
6746 uint32_t memTypeIndex = UINT32_MAX;
6748 if(res == VK_SUCCESS)
6750 res = AllocateMemoryOfType(
6752 requiresDedicatedAllocation || prefersDedicatedAllocation,
6760 if(res == VK_SUCCESS)
6770 memoryTypeBits &= ~(1u << memTypeIndex);
6773 if(res == VK_SUCCESS)
6775 res = AllocateMemoryOfType(
6777 requiresDedicatedAllocation || prefersDedicatedAllocation,
6785 if(res == VK_SUCCESS)
6795 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6806 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
6808 VMA_ASSERT(allocation);
6810 if(allocation->CanBecomeLost() ==
false ||
6811 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
6813 switch(allocation->GetType())
6815 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
6817 VmaBlockVector* pBlockVector = VMA_NULL;
6818 VmaPool hPool = allocation->GetPool();
6819 if(hPool != VK_NULL_HANDLE)
6821 pBlockVector = &hPool->m_BlockVector;
6825 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
6826 const VMA_BLOCK_VECTOR_TYPE blockVectorType = allocation->GetBlockVectorType();
6827 pBlockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
6829 pBlockVector->Free(allocation);
6832 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
6833 FreeDedicatedMemory(allocation);
6840 vma_delete(
this, allocation);
6843 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
6846 InitStatInfo(pStats->
total);
6847 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
6849 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
6853 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6855 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6856 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
6858 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
6859 VMA_ASSERT(pBlockVector);
6860 pBlockVector->AddStats(pStats);
6866 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6867 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
6869 m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
6874 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6876 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6877 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
6878 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
6880 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex][blockVectorType];
6881 VMA_ASSERT(pDedicatedAllocVector);
6882 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
6885 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
6886 VmaAddStatInfo(pStats->
total, allocationStatInfo);
6887 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
6888 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
6894 VmaPostprocessCalcStatInfo(pStats->
total);
6895 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
6896 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
6897 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
6898 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
6901 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
6903 void VmaAllocator_T::UnmapPersistentlyMappedMemory()
6905 if(m_UnmapPersistentlyMappedMemoryCounter++ == 0)
6907 if(m_PhysicalDeviceProperties.vendorID == VMA_VENDOR_ID_AMD)
6909 for(uint32_t memTypeIndex = m_MemProps.memoryTypeCount; memTypeIndex--; )
6911 const VkMemoryPropertyFlags memFlags = m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
6912 if((memFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0 &&
6913 (memFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
6917 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
6918 AllocationVectorType* pDedicatedAllocationsVector = m_pDedicatedAllocations[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6919 for(
size_t dedicatedAllocIndex = pDedicatedAllocationsVector->size(); dedicatedAllocIndex--; )
6921 VmaAllocation hAlloc = (*pDedicatedAllocationsVector)[dedicatedAllocIndex];
6922 hAlloc->DedicatedAllocUnmapPersistentlyMappedMemory(
this);
6928 VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6929 pBlockVector->UnmapPersistentlyMappedMemory();
6936 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6937 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
6939 m_Pools[poolIndex]->GetBlockVector().UnmapPersistentlyMappedMemory();
6946 VkResult VmaAllocator_T::MapPersistentlyMappedMemory()
6948 VMA_ASSERT(m_UnmapPersistentlyMappedMemoryCounter > 0);
6949 if(--m_UnmapPersistentlyMappedMemoryCounter == 0)
6951 VkResult finalResult = VK_SUCCESS;
6952 if(m_PhysicalDeviceProperties.vendorID == VMA_VENDOR_ID_AMD)
6956 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6957 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
6959 m_Pools[poolIndex]->GetBlockVector().MapPersistentlyMappedMemory();
6963 for(uint32_t memTypeIndex = 0; memTypeIndex < m_MemProps.memoryTypeCount; ++memTypeIndex)
6965 const VkMemoryPropertyFlags memFlags = m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
6966 if((memFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0 &&
6967 (memFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
6971 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
6972 AllocationVectorType* pAllocationsVector = m_pDedicatedAllocations[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6973 for(
size_t dedicatedAllocIndex = 0, dedicatedAllocCount = pAllocationsVector->size(); dedicatedAllocIndex < dedicatedAllocCount; ++dedicatedAllocIndex)
6975 VmaAllocation hAlloc = (*pAllocationsVector)[dedicatedAllocIndex];
6976 hAlloc->DedicatedAllocMapPersistentlyMappedMemory(
this);
6982 VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6983 VkResult localResult = pBlockVector->MapPersistentlyMappedMemory();
6984 if(localResult != VK_SUCCESS)
6986 finalResult = localResult;
6998 VkResult VmaAllocator_T::Defragment(
6999 VmaAllocation* pAllocations,
7000 size_t allocationCount,
7001 VkBool32* pAllocationsChanged,
7005 if(pAllocationsChanged != VMA_NULL)
7007 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
7009 if(pDefragmentationStats != VMA_NULL)
7011 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
7014 if(m_UnmapPersistentlyMappedMemoryCounter > 0)
7016 VMA_DEBUG_LOG(
"ERROR: Cannot defragment when inside vmaUnmapPersistentlyMappedMemory.");
7017 return VK_ERROR_MEMORY_MAP_FAILED;
7020 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
7022 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
7024 const size_t poolCount = m_Pools.size();
7027 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
7029 VmaAllocation hAlloc = pAllocations[allocIndex];
7031 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
7033 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
7035 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
7037 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
7039 VmaBlockVector* pAllocBlockVector =
nullptr;
7041 const VmaPool hAllocPool = hAlloc->GetPool();
7043 if(hAllocPool != VK_NULL_HANDLE)
7045 pAllocBlockVector = &hAllocPool->GetBlockVector();
7050 pAllocBlockVector = m_pBlockVectors[memTypeIndex][hAlloc->GetBlockVectorType()];
7053 VmaDefragmentator*
const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
7055 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
7056 &pAllocationsChanged[allocIndex] : VMA_NULL;
7057 pDefragmentator->AddAllocation(hAlloc, pChanged);
7061 VkResult result = VK_SUCCESS;
7065 VkDeviceSize maxBytesToMove = SIZE_MAX;
7066 uint32_t maxAllocationsToMove = UINT32_MAX;
7067 if(pDefragmentationInfo != VMA_NULL)
7074 for(uint32_t memTypeIndex = 0;
7075 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
7079 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7081 for(uint32_t blockVectorType = 0;
7082 (blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT) && (result == VK_SUCCESS);
7085 result = m_pBlockVectors[memTypeIndex][blockVectorType]->Defragment(
7086 pDefragmentationStats,
7088 maxAllocationsToMove);
7094 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
7096 result = m_Pools[poolIndex]->GetBlockVector().Defragment(
7097 pDefragmentationStats,
7099 maxAllocationsToMove);
7105 for(
size_t poolIndex = poolCount; poolIndex--; )
7107 m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
7111 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
7113 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7115 for(
size_t blockVectorType = VMA_BLOCK_VECTOR_TYPE_COUNT; blockVectorType--; )
7117 m_pBlockVectors[memTypeIndex][blockVectorType]->DestroyDefragmentator();
7125 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo)
7127 if(hAllocation->CanBecomeLost())
7133 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
7134 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
7137 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7141 pAllocationInfo->
offset = 0;
7142 pAllocationInfo->
size = hAllocation->GetSize();
7144 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7147 else if(localLastUseFrameIndex == localCurrFrameIndex)
7149 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7150 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7151 pAllocationInfo->
offset = hAllocation->GetOffset();
7152 pAllocationInfo->
size = hAllocation->GetSize();
7153 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
7154 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7159 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
7161 localLastUseFrameIndex = localCurrFrameIndex;
7169 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7170 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7171 pAllocationInfo->
offset = hAllocation->GetOffset();
7172 pAllocationInfo->
size = hAllocation->GetSize();
7173 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
7174 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7178 VkResult VmaAllocator_T::CreatePool(
const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
7180 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u", pCreateInfo->
memoryTypeIndex);
7193 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
7195 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
7196 if(res != VK_SUCCESS)
7198 vma_delete(
this, *pPool);
7205 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7206 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
7212 void VmaAllocator_T::DestroyPool(VmaPool pool)
7216 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7217 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
7218 VMA_ASSERT(success &&
"Pool not found in Allocator.");
7221 vma_delete(
this, pool);
7224 void VmaAllocator_T::GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats)
7226 pool->m_BlockVector.GetPoolStats(pPoolStats);
7229 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
7231 m_CurrentFrameIndex.store(frameIndex);
7234 void VmaAllocator_T::MakePoolAllocationsLost(
7236 size_t* pLostAllocationCount)
7238 hPool->m_BlockVector.MakePoolAllocationsLost(
7239 m_CurrentFrameIndex.load(),
7240 pLostAllocationCount);
7243 void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
7245 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST);
7246 (*pAllocation)->InitLost();
7249 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
7251 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
7254 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
7256 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
7257 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
7259 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7260 if(res == VK_SUCCESS)
7262 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
7267 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
7272 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7275 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
7277 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
7283 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
7285 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
7287 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
7290 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
7292 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
7293 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
7295 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
7296 m_HeapSizeLimit[heapIndex] += size;
7300 void VmaAllocator_T::FreeDedicatedMemory(VmaAllocation allocation)
7302 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
7304 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
7306 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7307 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex][allocation->GetBlockVectorType()];
7308 VMA_ASSERT(pDedicatedAllocations);
7309 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
7310 VMA_ASSERT(success);
7313 VkDeviceMemory hMemory = allocation->GetMemory();
7315 if(allocation->GetMappedData() != VMA_NULL)
7317 (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
7320 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
7322 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
7325 #if VMA_STATS_STRING_ENABLED 7327 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
7329 bool dedicatedAllocationsStarted =
false;
7330 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7332 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7333 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
7335 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex][blockVectorType];
7336 VMA_ASSERT(pDedicatedAllocVector);
7337 if(pDedicatedAllocVector->empty() ==
false)
7339 if(dedicatedAllocationsStarted ==
false)
7341 dedicatedAllocationsStarted =
true;
7342 json.WriteString(
"DedicatedAllocations");
7346 json.BeginString(
"Type ");
7347 json.ContinueString(memTypeIndex);
7348 if(blockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
7350 json.ContinueString(
" Mapped");
7356 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
7358 const VmaAllocation hAlloc = (*pDedicatedAllocVector)[i];
7359 json.BeginObject(
true);
7361 json.WriteString(
"Size");
7362 json.WriteNumber(hAlloc->GetSize());
7364 json.WriteString(
"Type");
7365 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
7374 if(dedicatedAllocationsStarted)
7380 bool allocationsStarted =
false;
7381 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7383 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
7385 if(m_pBlockVectors[memTypeIndex][blockVectorType]->IsEmpty() ==
false)
7387 if(allocationsStarted ==
false)
7389 allocationsStarted =
true;
7390 json.WriteString(
"DefaultPools");
7394 json.BeginString(
"Type ");
7395 json.ContinueString(memTypeIndex);
7396 if(blockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
7398 json.ContinueString(
" Mapped");
7402 m_pBlockVectors[memTypeIndex][blockVectorType]->PrintDetailedMap(json);
7406 if(allocationsStarted)
7413 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7414 const size_t poolCount = m_Pools.size();
7417 json.WriteString(
"Pools");
7419 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
7421 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
7428 #endif // #if VMA_STATS_STRING_ENABLED 7430 static VkResult AllocateMemoryForImage(
7431 VmaAllocator allocator,
7434 VmaSuballocationType suballocType,
7435 VmaAllocation* pAllocation)
7437 VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
7439 VkMemoryRequirements vkMemReq = {};
7440 bool requiresDedicatedAllocation =
false;
7441 bool prefersDedicatedAllocation =
false;
7442 allocator->GetImageMemoryRequirements(image, vkMemReq,
7443 requiresDedicatedAllocation, prefersDedicatedAllocation);
7445 return allocator->AllocateMemory(
7447 requiresDedicatedAllocation,
7448 prefersDedicatedAllocation,
7451 *pAllocationCreateInfo,
7461 VmaAllocator* pAllocator)
7463 VMA_ASSERT(pCreateInfo && pAllocator);
7464 VMA_DEBUG_LOG(
"vmaCreateAllocator");
7470 VmaAllocator allocator)
7472 if(allocator != VK_NULL_HANDLE)
7474 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
7475 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
7476 vma_delete(&allocationCallbacks, allocator);
7481 VmaAllocator allocator,
7482 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
7484 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
7485 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
7489 VmaAllocator allocator,
7490 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
7492 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
7493 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
7497 VmaAllocator allocator,
7498 uint32_t memoryTypeIndex,
7499 VkMemoryPropertyFlags* pFlags)
7501 VMA_ASSERT(allocator && pFlags);
7502 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
7503 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
7507 VmaAllocator allocator,
7508 uint32_t frameIndex)
7510 VMA_ASSERT(allocator);
7511 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
7513 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7515 allocator->SetCurrentFrameIndex(frameIndex);
7519 VmaAllocator allocator,
7522 VMA_ASSERT(allocator && pStats);
7523 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7524 allocator->CalculateStats(pStats);
7527 #if VMA_STATS_STRING_ENABLED 7530 VmaAllocator allocator,
7531 char** ppStatsString,
7532 VkBool32 detailedMap)
7534 VMA_ASSERT(allocator && ppStatsString);
7535 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7537 VmaStringBuilder sb(allocator);
7539 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
7543 allocator->CalculateStats(&stats);
7545 json.WriteString(
"Total");
7546 VmaPrintStatInfo(json, stats.
total);
7548 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
7550 json.BeginString(
"Heap ");
7551 json.ContinueString(heapIndex);
7555 json.WriteString(
"Size");
7556 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
7558 json.WriteString(
"Flags");
7559 json.BeginArray(
true);
7560 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
7562 json.WriteString(
"DEVICE_LOCAL");
7568 json.WriteString(
"Stats");
7569 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
7572 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
7574 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
7576 json.BeginString(
"Type ");
7577 json.ContinueString(typeIndex);
7582 json.WriteString(
"Flags");
7583 json.BeginArray(
true);
7584 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
7585 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
7587 json.WriteString(
"DEVICE_LOCAL");
7589 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7591 json.WriteString(
"HOST_VISIBLE");
7593 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
7595 json.WriteString(
"HOST_COHERENT");
7597 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
7599 json.WriteString(
"HOST_CACHED");
7601 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
7603 json.WriteString(
"LAZILY_ALLOCATED");
7609 json.WriteString(
"Stats");
7610 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
7619 if(detailedMap == VK_TRUE)
7621 allocator->PrintDetailedMap(json);
7627 const size_t len = sb.GetLength();
7628 char*
const pChars = vma_new_array(allocator,
char, len + 1);
7631 memcpy(pChars, sb.GetData(), len);
7634 *ppStatsString = pChars;
7638 VmaAllocator allocator,
7641 if(pStatsString != VMA_NULL)
7643 VMA_ASSERT(allocator);
7644 size_t len = strlen(pStatsString);
7645 vma_delete_array(allocator, pStatsString, len + 1);
7649 #endif // #if VMA_STATS_STRING_ENABLED 7654 VmaAllocator allocator,
7655 uint32_t memoryTypeBits,
7657 uint32_t* pMemoryTypeIndex)
7659 VMA_ASSERT(allocator != VK_NULL_HANDLE);
7660 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
7661 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
7663 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
7665 if(preferredFlags == 0)
7667 preferredFlags = requiredFlags;
7670 VMA_ASSERT((requiredFlags & ~preferredFlags) == 0);
7673 switch(pAllocationCreateInfo->
usage)
7678 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
7681 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
7684 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7685 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
7688 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7689 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
7695 *pMemoryTypeIndex = UINT32_MAX;
7696 uint32_t minCost = UINT32_MAX;
7697 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
7698 memTypeIndex < allocator->GetMemoryTypeCount();
7699 ++memTypeIndex, memTypeBit <<= 1)
7702 if((memTypeBit & memoryTypeBits) != 0)
7704 const VkMemoryPropertyFlags currFlags =
7705 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
7707 if((requiredFlags & ~currFlags) == 0)
7710 uint32_t currCost = CountBitsSet(preferredFlags & ~currFlags);
7712 if(currCost < minCost)
7714 *pMemoryTypeIndex = memTypeIndex;
7724 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
7728 VmaAllocator allocator,
7732 VMA_ASSERT(allocator && pCreateInfo && pPool);
7734 VMA_DEBUG_LOG(
"vmaCreatePool");
7736 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7738 return allocator->CreatePool(pCreateInfo, pPool);
7742 VmaAllocator allocator,
7745 VMA_ASSERT(allocator && pool);
7747 VMA_DEBUG_LOG(
"vmaDestroyPool");
7749 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7751 allocator->DestroyPool(pool);
7755 VmaAllocator allocator,
7759 VMA_ASSERT(allocator && pool && pPoolStats);
7761 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7763 allocator->GetPoolStats(pool, pPoolStats);
7767 VmaAllocator allocator,
7769 size_t* pLostAllocationCount)
7771 VMA_ASSERT(allocator && pool);
7773 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7775 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
7779 VmaAllocator allocator,
7780 const VkMemoryRequirements* pVkMemoryRequirements,
7782 VmaAllocation* pAllocation,
7785 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
7787 VMA_DEBUG_LOG(
"vmaAllocateMemory");
7789 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7791 VkResult result = allocator->AllocateMemory(
7792 *pVkMemoryRequirements,
7798 VMA_SUBALLOCATION_TYPE_UNKNOWN,
7801 if(pAllocationInfo && result == VK_SUCCESS)
7803 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7810 VmaAllocator allocator,
7813 VmaAllocation* pAllocation,
7816 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
7818 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
7820 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7822 VkMemoryRequirements vkMemReq = {};
7823 bool requiresDedicatedAllocation =
false;
7824 bool prefersDedicatedAllocation =
false;
7825 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
7826 requiresDedicatedAllocation,
7827 prefersDedicatedAllocation);
7829 VkResult result = allocator->AllocateMemory(
7831 requiresDedicatedAllocation,
7832 prefersDedicatedAllocation,
7836 VMA_SUBALLOCATION_TYPE_BUFFER,
7839 if(pAllocationInfo && result == VK_SUCCESS)
7841 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7848 VmaAllocator allocator,
7851 VmaAllocation* pAllocation,
7854 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
7856 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
7858 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7860 VkResult result = AllocateMemoryForImage(
7864 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
7867 if(pAllocationInfo && result == VK_SUCCESS)
7869 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7876 VmaAllocator allocator,
7877 VmaAllocation allocation)
7879 VMA_ASSERT(allocator && allocation);
7881 VMA_DEBUG_LOG(
"vmaFreeMemory");
7883 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7885 allocator->FreeMemory(allocation);
7889 VmaAllocator allocator,
7890 VmaAllocation allocation,
7893 VMA_ASSERT(allocator && allocation && pAllocationInfo);
7895 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7897 allocator->GetAllocationInfo(allocation, pAllocationInfo);
7901 VmaAllocator allocator,
7902 VmaAllocation allocation,
7905 VMA_ASSERT(allocator && allocation);
7907 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7909 allocation->SetUserData(pUserData);
7913 VmaAllocator allocator,
7914 VmaAllocation* pAllocation)
7916 VMA_ASSERT(allocator && pAllocation);
7918 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
7920 allocator->CreateLostAllocation(pAllocation);
7924 VmaAllocator allocator,
7925 VmaAllocation allocation,
7928 VMA_ASSERT(allocator && allocation && ppData);
7930 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7932 return (*allocator->GetVulkanFunctions().vkMapMemory)(
7933 allocator->m_hDevice,
7934 allocation->GetMemory(),
7935 allocation->GetOffset(),
7936 allocation->GetSize(),
7942 VmaAllocator allocator,
7943 VmaAllocation allocation)
7945 VMA_ASSERT(allocator && allocation);
7947 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7949 (*allocator->GetVulkanFunctions().vkUnmapMemory)(allocator->m_hDevice, allocation->GetMemory());
7954 VMA_ASSERT(allocator);
7956 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7958 allocator->UnmapPersistentlyMappedMemory();
7963 VMA_ASSERT(allocator);
7965 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7967 return allocator->MapPersistentlyMappedMemory();
7971 VmaAllocator allocator,
7972 VmaAllocation* pAllocations,
7973 size_t allocationCount,
7974 VkBool32* pAllocationsChanged,
7978 VMA_ASSERT(allocator && pAllocations);
7980 VMA_DEBUG_LOG(
"vmaDefragment");
7982 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7984 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
7988 VmaAllocator allocator,
7989 const VkBufferCreateInfo* pBufferCreateInfo,
7992 VmaAllocation* pAllocation,
7995 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
7997 VMA_DEBUG_LOG(
"vmaCreateBuffer");
7999 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8001 *pBuffer = VK_NULL_HANDLE;
8002 *pAllocation = VK_NULL_HANDLE;
8005 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
8006 allocator->m_hDevice,
8008 allocator->GetAllocationCallbacks(),
8013 VkMemoryRequirements vkMemReq = {};
8014 bool requiresDedicatedAllocation =
false;
8015 bool prefersDedicatedAllocation =
false;
8016 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
8017 requiresDedicatedAllocation, prefersDedicatedAllocation);
8020 res = allocator->AllocateMemory(
8022 requiresDedicatedAllocation,
8023 prefersDedicatedAllocation,
8026 *pAllocationCreateInfo,
8027 VMA_SUBALLOCATION_TYPE_BUFFER,
8032 res = (*allocator->GetVulkanFunctions().vkBindBufferMemory)(
8033 allocator->m_hDevice,
8035 (*pAllocation)->GetMemory(),
8036 (*pAllocation)->GetOffset());
8040 if(pAllocationInfo != VMA_NULL)
8042 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8046 allocator->FreeMemory(*pAllocation);
8047 *pAllocation = VK_NULL_HANDLE;
8050 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
8051 *pBuffer = VK_NULL_HANDLE;
8058 VmaAllocator allocator,
8060 VmaAllocation allocation)
8062 if(buffer != VK_NULL_HANDLE)
8064 VMA_ASSERT(allocator);
8066 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
8068 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8070 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
8072 allocator->FreeMemory(allocation);
8077 VmaAllocator allocator,
8078 const VkImageCreateInfo* pImageCreateInfo,
8081 VmaAllocation* pAllocation,
8084 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
8086 VMA_DEBUG_LOG(
"vmaCreateImage");
8088 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8090 *pImage = VK_NULL_HANDLE;
8091 *pAllocation = VK_NULL_HANDLE;
8094 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
8095 allocator->m_hDevice,
8097 allocator->GetAllocationCallbacks(),
8101 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
8102 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
8103 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
8106 res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
8110 res = (*allocator->GetVulkanFunctions().vkBindImageMemory)(
8111 allocator->m_hDevice,
8113 (*pAllocation)->GetMemory(),
8114 (*pAllocation)->GetOffset());
8118 if(pAllocationInfo != VMA_NULL)
8120 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8124 allocator->FreeMemory(*pAllocation);
8125 *pAllocation = VK_NULL_HANDLE;
8128 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
8129 *pImage = VK_NULL_HANDLE;
8136 VmaAllocator allocator,
8138 VmaAllocation allocation)
8140 if(image != VK_NULL_HANDLE)
8142 VMA_ASSERT(allocator);
8144 VMA_DEBUG_LOG(
"vmaDestroyImage");
8146 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8148 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
8150 allocator->FreeMemory(allocation);
8154 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:551
+
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:768
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
-
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:511
+
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:576
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
-
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:496
+
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:561
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
-
Memory will be used on device only, so faster access from the device is preferred. No need to be mappable on host.
Definition: vk_mem_alloc.h:677
-
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:490
-
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:962
-
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:508
-
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1115
-
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:832
+
Memory will be used on device only, so faster access from the device is preferred. No need to be mappable on host.
Definition: vk_mem_alloc.h:742
+
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:555
+
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1027
+
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:573
+
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1180
+
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:897
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
-
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:886
-
Definition: vk_mem_alloc.h:741
-
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:479
-
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:774
-
Definition: vk_mem_alloc.h:687
-
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks.
Definition: vk_mem_alloc.h:523
+
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:951
+
Definition: vk_mem_alloc.h:806
+
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:544
+
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:839
+
Definition: vk_mem_alloc.h:752
+
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks.
Definition: vk_mem_alloc.h:588
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
-
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:570
-
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:505
-
VkDeviceSize preferredSmallHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from small heaps <= 512 MB...
Definition: vk_mem_alloc.h:520
+
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:635
+
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:570
+
VkDeviceSize preferredSmallHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from small heaps <= 512 MB...
Definition: vk_mem_alloc.h:585
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
-
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:691
+
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:756
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation.
-
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:635
-
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:493
-
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:634
-
PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR
Definition: vk_mem_alloc.h:501
-
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1119
+
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:700
+
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:558
+
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:699
+
PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR
Definition: vk_mem_alloc.h:566
+
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1184
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:540
-
VmaStatInfo total
Definition: vk_mem_alloc.h:644
-
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1127
-
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:757
-
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1110
-
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:494
-
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:415
-
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:514
-
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:840
-
Definition: vk_mem_alloc.h:834
-
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:972
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:605
+
VmaStatInfo total
Definition: vk_mem_alloc.h:709
+
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1192
+
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:822
+
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1175
+
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:559
+
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:480
+
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:579
+
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:905
+
Definition: vk_mem_alloc.h:899
+
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1037
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
-
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:491
-
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:776
-
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:856
-
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:892
+
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:556
+
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:841
+
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:921
+
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:957
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
-
Definition: vk_mem_alloc.h:477
-
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:843
+
Definition: vk_mem_alloc.h:542
+
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:908
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
-
VmaMemoryUsage
Definition: vk_mem_alloc.h:672
+
VmaMemoryUsage
Definition: vk_mem_alloc.h:737
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
-
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1105
+
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1170
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
-
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1123
-
Memory will be used for frequent (dynamic) updates from host and reads on device (upload).
Definition: vk_mem_alloc.h:683
-
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:492
+
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1188
+
Memory will be used for frequent (dynamic) updates from host and reads on device (upload).
Definition: vk_mem_alloc.h:748
+
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:557
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
-
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:640
-
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:421
+
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:705
+
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:486
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
-
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:442
+
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:507
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
-
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:447
-
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1125
+
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:512
+
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1190
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
-
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:768
-
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:902
+
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:833
+
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:967
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
-
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:487
-
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:623
-
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:851
-
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:434
+
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:552
+
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:688
+
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:916
+
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:499
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
Definition: vk_mem_alloc.h:748
-
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:636
-
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:438
-
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:846
-
Memory will be used for frequent writing on device and readback on host (download).
Definition: vk_mem_alloc.h:686
+
Definition: vk_mem_alloc.h:813
+
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:701
+
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:503
+
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:911
+
Memory will be used for frequent writing on device and readback on host (download).
Definition: vk_mem_alloc.h:751
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
-
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:763
-
Definition: vk_mem_alloc.h:754
-
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:626
-
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:489
-
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool.
Definition: vk_mem_alloc.h:864
-
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory.
Definition: vk_mem_alloc.h:526
-
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:895
+
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:828
+
Definition: vk_mem_alloc.h:819
+
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:691
+
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:554
+
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool.
Definition: vk_mem_alloc.h:929
+
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory.
Definition: vk_mem_alloc.h:591
+
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:960
VkResult vmaMapPersistentlyMappedMemory(VmaAllocator allocator)
Maps back persistently mapped memory of types that are HOST_COHERENT and DEVICE_LOCAL.
-
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:752
-
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:781
+
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:817
+
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:846
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
-
const VkDeviceSize * pHeapSizeLimit
Either NULL or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:558
-
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:642
-
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:635
-
Definition: vk_mem_alloc.h:814
-
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:498
-
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:436
-
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:497
+
const VkDeviceSize * pHeapSizeLimit
Either NULL or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:623
+
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:707
+
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:700
+
Definition: vk_mem_alloc.h:879
+
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:563
+
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:501
+
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:562
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:878
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:943
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
-
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:983
-
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps. ...
Definition: vk_mem_alloc.h:517
-
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:635
-
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:632
+
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1048
+
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps. ...
Definition: vk_mem_alloc.h:582
+
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:700
+
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:697
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
-
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:883
-
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:967
-
Definition: vk_mem_alloc.h:750
-
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1121
-
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:485
+
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:948
+
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:1032
+
Definition: vk_mem_alloc.h:815
+
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1186
+
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:550
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
-
PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR
Definition: vk_mem_alloc.h:500
-
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:630
-
No intended memory usage specified. Use other members of VmaAllocationCreateInfo to specify your requ...
Definition: vk_mem_alloc.h:675
-
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:836
+
PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR
Definition: vk_mem_alloc.h:565
+
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:695
+
No intended memory usage specified. Use other members of VmaAllocationCreateInfo to specify your requ...
Definition: vk_mem_alloc.h:740
+
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:901
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
-
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:628
-
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:495
-
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:499
-
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:714
-
Memory will be mapped on host. Could be used for transfer to/from device.
Definition: vk_mem_alloc.h:680
-
void * pMappedData
Pointer to the beginning of this allocation as mapped data. Null if this alloaction is not persistent...
Definition: vk_mem_alloc.h:978
+
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:693
+
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:560
+
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:564
+
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:779
+
Memory will be mapped on host. Could be used for transfer to/from device.
Definition: vk_mem_alloc.h:745
+
void * pMappedData
Pointer to the beginning of this allocation as mapped data. Null if this alloaction is not persistent...
Definition: vk_mem_alloc.h:1043
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
-
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:475
+
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:540
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
-
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:488
-
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:948
+
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:553
+
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1013
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
-
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:730
+
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:795
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:805
-
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:636
-
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:643
+
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:870
+
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:701
+
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:708
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
-
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:889
-
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:636
+
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:954
+
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:701
void vmaUnmapPersistentlyMappedMemory(VmaAllocator allocator)
Unmaps persistently mapped memory of types that are HOST_COHERENT and DEVICE_LOCAL.
-
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:953
+
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1018