23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 161 #include <vulkan/vulkan.h> 168 VK_DEFINE_HANDLE(VmaAllocator)
172 VmaAllocator allocator,
174 VkDeviceMemory memory,
178 VmaAllocator allocator,
180 VkDeviceMemory memory,
235 VmaAllocator* pAllocator);
239 VmaAllocator allocator);
246 VmaAllocator allocator,
247 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
254 VmaAllocator allocator,
255 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
264 VmaAllocator allocator,
265 uint32_t memoryTypeIndex,
266 VkMemoryPropertyFlags* pFlags);
289 VmaAllocator allocator,
292 #define VMA_STATS_STRING_ENABLED 1 294 #if VMA_STATS_STRING_ENABLED 300 VmaAllocator allocator,
301 char** ppStatsString,
302 VkBool32 detailedMap);
305 VmaAllocator allocator,
308 #endif // #if VMA_STATS_STRING_ENABLED 403 VmaAllocator allocator,
404 uint32_t memoryTypeBits,
406 uint32_t* pMemoryTypeIndex);
415 VK_DEFINE_HANDLE(VmaAllocation)
466 VmaAllocator allocator,
467 const VkMemoryRequirements* pVkMemoryRequirements,
469 VmaAllocation* pAllocation,
479 VmaAllocator allocator,
482 VmaAllocation* pAllocation,
487 VmaAllocator allocator,
490 VmaAllocation* pAllocation,
495 VmaAllocator allocator,
496 VmaAllocation allocation);
500 VmaAllocator allocator,
501 VmaAllocation allocation,
506 VmaAllocator allocator,
507 VmaAllocation allocation,
519 VmaAllocator allocator,
520 VmaAllocation allocation,
524 VmaAllocator allocator,
525 VmaAllocation allocation);
647 VmaAllocator allocator,
648 VmaAllocation* pAllocations,
649 size_t allocationCount,
650 VkBool32* pAllocationsChanged,
677 VmaAllocator allocator,
678 const VkBufferCreateInfo* pCreateInfo,
681 VmaAllocation* pAllocation,
685 VmaAllocator allocator,
687 VmaAllocation allocation);
691 VmaAllocator allocator,
692 const VkImageCreateInfo* pCreateInfo,
695 VmaAllocation* pAllocation,
699 VmaAllocator allocator,
701 VmaAllocation allocation);
705 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 707 #ifdef VMA_IMPLEMENTATION 708 #undef VMA_IMPLEMENTATION 729 #if VMA_USE_STL_CONTAINERS 730 #define VMA_USE_STL_VECTOR 1 731 #define VMA_USE_STL_UNORDERED_MAP 1 732 #define VMA_USE_STL_LIST 1 735 #if VMA_USE_STL_VECTOR 739 #if VMA_USE_STL_UNORDERED_MAP 740 #include <unordered_map> 762 #define VMA_ASSERT(expr) assert(expr) 764 #define VMA_ASSERT(expr) 770 #ifndef VMA_HEAVY_ASSERT 772 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 774 #define VMA_HEAVY_ASSERT(expr) 780 #define VMA_NULL nullptr 784 #define VMA_ALIGN_OF(type) (__alignof(type)) 787 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 789 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 791 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 795 #ifndef VMA_SYSTEM_FREE 797 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 799 #define VMA_SYSTEM_FREE(ptr) free(ptr) 804 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 808 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 812 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 816 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 819 #ifndef VMA_DEBUG_LOG 820 #define VMA_DEBUG_LOG(format, ...) 830 #if VMA_STATS_STRING_ENABLED 831 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
833 _ultoa_s(num, outStr, strLen, 10);
835 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
837 _ui64toa_s(num, outStr, strLen, 10);
847 void Lock() { m_Mutex.lock(); }
848 void Unlock() { m_Mutex.unlock(); }
852 #define VMA_MUTEX VmaMutex 868 #define VMA_BEST_FIT (1) 871 #ifndef VMA_DEBUG_ALWAYS_OWN_MEMORY 876 #define VMA_DEBUG_ALWAYS_OWN_MEMORY (0) 879 #ifndef VMA_DEBUG_ALIGNMENT 884 #define VMA_DEBUG_ALIGNMENT (1) 887 #ifndef VMA_DEBUG_MARGIN 892 #define VMA_DEBUG_MARGIN (0) 895 #ifndef VMA_DEBUG_GLOBAL_MUTEX 900 #define VMA_DEBUG_GLOBAL_MUTEX (0) 903 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 908 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 911 #ifndef VMA_SMALL_HEAP_MAX_SIZE 912 #define VMA_SMALL_HEAP_MAX_SIZE (512 * 1024 * 1024) 916 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 917 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256 * 1024 * 1024) 921 #ifndef VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE 922 #define VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE (64 * 1024 * 1024) 930 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
931 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
934 static inline uint32_t CountBitsSet(uint32_t v)
936 uint32_t c = v - ((v >> 1) & 0x55555555);
937 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
938 c = ((c >> 4) + c) & 0x0F0F0F0F;
939 c = ((c >> 8) + c) & 0x00FF00FF;
940 c = ((c >> 16) + c) & 0x0000FFFF;
946 template <
typename T>
947 static inline T VmaAlignUp(T val, T align)
949 return (val + align - 1) / align * align;
953 template <
typename T>
954 inline T VmaRoundDiv(T x, T y)
956 return (x + (y / (T)2)) / y;
961 template<
typename Iterator,
typename Compare>
962 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
964 Iterator centerValue = end; --centerValue;
965 Iterator insertIndex = beg;
966 for(Iterator i = beg; i < centerValue; ++i)
968 if(cmp(*i, *centerValue))
972 VMA_SWAP(*i, *insertIndex);
977 if(insertIndex != centerValue)
979 VMA_SWAP(*insertIndex, *centerValue);
984 template<
typename Iterator,
typename Compare>
985 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
989 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
990 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
991 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
995 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 997 #endif // #ifndef VMA_SORT 1006 static inline bool VmaBlocksOnSamePage(
1007 VkDeviceSize resourceAOffset,
1008 VkDeviceSize resourceASize,
1009 VkDeviceSize resourceBOffset,
1010 VkDeviceSize pageSize)
1012 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
1013 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
1014 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
1015 VkDeviceSize resourceBStart = resourceBOffset;
1016 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
1017 return resourceAEndPage == resourceBStartPage;
1020 enum VmaSuballocationType
1022 VMA_SUBALLOCATION_TYPE_FREE = 0,
1023 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
1024 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
1025 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
1026 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
1027 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
1028 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
1037 static inline bool VmaIsBufferImageGranularityConflict(
1038 VmaSuballocationType suballocType1,
1039 VmaSuballocationType suballocType2)
1041 if(suballocType1 > suballocType2)
1043 VMA_SWAP(suballocType1, suballocType2);
1046 switch(suballocType1)
1048 case VMA_SUBALLOCATION_TYPE_FREE:
1050 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
1052 case VMA_SUBALLOCATION_TYPE_BUFFER:
1054 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
1055 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1056 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
1058 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
1059 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
1060 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1061 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
1063 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1064 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
1076 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
1077 m_pMutex(useMutex ? &mutex : VMA_NULL)
1094 VMA_MUTEX* m_pMutex;
1097 #if VMA_DEBUG_GLOBAL_MUTEX 1098 static VMA_MUTEX gDebugGlobalMutex;
1099 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex); 1101 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 1105 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
1116 template <
typename IterT,
typename KeyT,
typename CmpT>
1117 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
1119 size_t down = 0, up = (end - beg);
1122 const size_t mid = (down + up) / 2;
1123 if(cmp(*(beg+mid), key))
1138 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
1140 if((pAllocationCallbacks != VMA_NULL) &&
1141 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
1143 return (*pAllocationCallbacks->pfnAllocation)(
1144 pAllocationCallbacks->pUserData,
1147 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
1151 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
1155 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
1157 if((pAllocationCallbacks != VMA_NULL) &&
1158 (pAllocationCallbacks->pfnFree != VMA_NULL))
1160 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
1164 VMA_SYSTEM_FREE(ptr);
1168 template<
typename T>
1169 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
1171 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
1174 template<
typename T>
1175 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
1177 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
1180 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 1182 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 1184 template<
typename T>
1185 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
1188 VmaFree(pAllocationCallbacks, ptr);
1191 template<
typename T>
1192 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
1196 for(
size_t i = count; i--; )
1200 VmaFree(pAllocationCallbacks, ptr);
1205 template<
typename T>
1206 class VmaStlAllocator
1209 const VkAllocationCallbacks*
const m_pCallbacks;
1210 typedef T value_type;
1212 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
1213 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
1215 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
1216 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
1218 template<
typename U>
1219 bool operator==(
const VmaStlAllocator<U>& rhs)
const 1221 return m_pCallbacks == rhs.m_pCallbacks;
1223 template<
typename U>
1224 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 1226 return m_pCallbacks != rhs.m_pCallbacks;
1229 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
1232 #if VMA_USE_STL_VECTOR 1234 #define VmaVector std::vector 1236 template<
typename T,
typename allocatorT>
1237 static void VectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
1239 vec.insert(vec.begin() + index, item);
1242 template<
typename T,
typename allocatorT>
1243 static void VectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
1245 vec.erase(vec.begin() + index);
1248 #else // #if VMA_USE_STL_VECTOR 1253 template<
typename T,
typename AllocatorT>
1257 VmaVector(
const AllocatorT& allocator) :
1258 m_Allocator(allocator),
1265 VmaVector(
size_t count,
const AllocatorT& allocator) :
1266 m_Allocator(allocator),
1267 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator->m_pCallbacks, count) : VMA_NULL),
1273 VmaVector(
const VmaVector<T, AllocatorT>& src) :
1274 m_Allocator(src.m_Allocator),
1275 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src->m_pCallbacks, src.m_Count) : VMA_NULL),
1276 m_Count(src.m_Count),
1277 m_Capacity(src.m_Count)
1281 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
1287 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1290 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
1294 Resize(rhs.m_Count);
1297 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
1303 bool empty()
const {
return m_Count == 0; }
1304 size_t size()
const {
return m_Count; }
1305 T* data() {
return m_pArray; }
1306 const T* data()
const {
return m_pArray; }
1308 T& operator[](
size_t index)
1310 VMA_HEAVY_ASSERT(index < m_Count);
1311 return m_pArray[index];
1313 const T& operator[](
size_t index)
const 1315 VMA_HEAVY_ASSERT(index < m_Count);
1316 return m_pArray[index];
1321 VMA_HEAVY_ASSERT(m_Count > 0);
1324 const T& front()
const 1326 VMA_HEAVY_ASSERT(m_Count > 0);
1331 VMA_HEAVY_ASSERT(m_Count > 0);
1332 return m_pArray[m_Count - 1];
1334 const T& back()
const 1336 VMA_HEAVY_ASSERT(m_Count > 0);
1337 return m_pArray[m_Count - 1];
1340 void reserve(
size_t newCapacity,
bool freeMemory =
false)
1342 newCapacity = VMA_MAX(newCapacity, m_Count);
1344 if((newCapacity < m_Capacity) && !freeMemory)
1346 newCapacity = m_Capacity;
1349 if(newCapacity != m_Capacity)
1351 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
1354 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
1356 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1357 m_Capacity = newCapacity;
1358 m_pArray = newArray;
1362 void resize(
size_t newCount,
bool freeMemory =
false)
1364 size_t newCapacity = m_Capacity;
1365 if(newCount > m_Capacity)
1367 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
1371 newCapacity = newCount;
1374 if(newCapacity != m_Capacity)
1376 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
1377 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
1378 if(elementsToCopy != 0)
1380 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
1382 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1383 m_Capacity = newCapacity;
1384 m_pArray = newArray;
1390 void clear(
bool freeMemory =
false)
1392 resize(0, freeMemory);
1395 void insert(
size_t index,
const T& src)
1397 VMA_HEAVY_ASSERT(index <= m_Count);
1398 const size_t oldCount = size();
1399 resize(oldCount + 1);
1400 if(index < oldCount)
1402 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
1404 m_pArray[index] = src;
1407 void remove(
size_t index)
1409 VMA_HEAVY_ASSERT(index < m_Count);
1410 const size_t oldCount = size();
1411 if(index < oldCount - 1)
1413 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
1415 resize(oldCount - 1);
1418 void push_back(
const T& src)
1420 const size_t newIndex = size();
1421 resize(newIndex + 1);
1422 m_pArray[newIndex] = src;
1427 VMA_HEAVY_ASSERT(m_Count > 0);
1431 void push_front(
const T& src)
1438 VMA_HEAVY_ASSERT(m_Count > 0);
1442 typedef T* iterator;
1444 iterator begin() {
return m_pArray; }
1445 iterator end() {
return m_pArray + m_Count; }
1448 AllocatorT m_Allocator;
1454 template<
typename T,
typename allocatorT>
1455 static void VectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
1457 vec.insert(index, item);
1460 template<
typename T,
typename allocatorT>
1461 static void VectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
1466 #endif // #if VMA_USE_STL_VECTOR 1476 template<
typename T>
1477 class VmaPoolAllocator
1480 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
1481 ~VmaPoolAllocator();
1489 uint32_t NextFreeIndex;
1496 uint32_t FirstFreeIndex;
1499 const VkAllocationCallbacks* m_pAllocationCallbacks;
1500 size_t m_ItemsPerBlock;
1501 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
1503 ItemBlock& CreateNewBlock();
1506 template<
typename T>
1507 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
1508 m_pAllocationCallbacks(pAllocationCallbacks),
1509 m_ItemsPerBlock(itemsPerBlock),
1510 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
1512 VMA_ASSERT(itemsPerBlock > 0);
1515 template<
typename T>
1516 VmaPoolAllocator<T>::~VmaPoolAllocator()
1521 template<
typename T>
1522 void VmaPoolAllocator<T>::Clear()
1524 for(
size_t i = m_ItemBlocks.size(); i--; )
1525 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
1526 m_ItemBlocks.clear();
1529 template<
typename T>
1530 T* VmaPoolAllocator<T>::Alloc()
1532 for(
size_t i = m_ItemBlocks.size(); i--; )
1534 ItemBlock& block = m_ItemBlocks[i];
1536 if(block.FirstFreeIndex != UINT32_MAX)
1538 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
1539 block.FirstFreeIndex = pItem->NextFreeIndex;
1540 return &pItem->Value;
1545 ItemBlock& newBlock = CreateNewBlock();
1546 Item*
const pItem = &newBlock.pItems[0];
1547 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
1548 return &pItem->Value;
1551 template<
typename T>
1552 void VmaPoolAllocator<T>::Free(T* ptr)
1555 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
1557 ItemBlock& block = m_ItemBlocks[i];
1561 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
1564 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
1566 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
1567 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
1568 block.FirstFreeIndex = index;
1572 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
1575 template<
typename T>
1576 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
1578 ItemBlock newBlock = {
1579 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
1581 m_ItemBlocks.push_back(newBlock);
1584 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
1585 newBlock.pItems[i].NextFreeIndex = i + 1;
1586 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
1587 return m_ItemBlocks.back();
1593 #if VMA_USE_STL_LIST 1595 #define VmaList std::list 1597 #else // #if VMA_USE_STL_LIST 1599 template<
typename T>
1608 template<
typename T>
1612 typedef VmaListItem<T> ItemType;
1614 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
1618 size_t GetCount()
const {
return m_Count; }
1619 bool IsEmpty()
const {
return m_Count == 0; }
1621 ItemType* Front() {
return m_pFront; }
1622 const ItemType* Front()
const {
return m_pFront; }
1623 ItemType* Back() {
return m_pBack; }
1624 const ItemType* Back()
const {
return m_pBack; }
1626 ItemType* PushBack();
1627 ItemType* PushFront();
1628 ItemType* PushBack(
const T& value);
1629 ItemType* PushFront(
const T& value);
1634 ItemType* InsertBefore(ItemType* pItem);
1636 ItemType* InsertAfter(ItemType* pItem);
1638 ItemType* InsertBefore(ItemType* pItem,
const T& value);
1639 ItemType* InsertAfter(ItemType* pItem,
const T& value);
1641 void Remove(ItemType* pItem);
1644 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
1645 VmaPoolAllocator<ItemType> m_ItemAllocator;
1651 VmaRawList(
const VmaRawList<T>& src);
1652 VmaRawList<T>& operator=(
const VmaRawList<T>& rhs);
1655 template<
typename T>
1656 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
1657 m_pAllocationCallbacks(pAllocationCallbacks),
1658 m_ItemAllocator(pAllocationCallbacks, 128),
1665 template<
typename T>
1666 VmaRawList<T>::~VmaRawList()
1672 template<
typename T>
1673 void VmaRawList<T>::Clear()
1675 if(IsEmpty() ==
false)
1677 ItemType* pItem = m_pBack;
1678 while(pItem != VMA_NULL)
1680 ItemType*
const pPrevItem = pItem->pPrev;
1681 m_ItemAllocator.Free(pItem);
1684 m_pFront = VMA_NULL;
1690 template<
typename T>
1691 VmaListItem<T>* VmaRawList<T>::PushBack()
1693 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
1694 pNewItem->pNext = VMA_NULL;
1697 pNewItem->pPrev = VMA_NULL;
1698 m_pFront = pNewItem;
1704 pNewItem->pPrev = m_pBack;
1705 m_pBack->pNext = pNewItem;
1712 template<
typename T>
1713 VmaListItem<T>* VmaRawList<T>::PushFront()
1715 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
1716 pNewItem->pPrev = VMA_NULL;
1719 pNewItem->pNext = VMA_NULL;
1720 m_pFront = pNewItem;
1726 pNewItem->pNext = m_pFront;
1727 m_pFront->pPrev = pNewItem;
1728 m_pFront = pNewItem;
1734 template<
typename T>
1735 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
1737 ItemType*
const pNewItem = PushBack();
1738 pNewItem->Value = value;
1742 template<
typename T>
1743 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
1745 ItemType*
const pNewItem = PushFront();
1746 pNewItem->Value = value;
1750 template<
typename T>
1751 void VmaRawList<T>::PopBack()
1753 VMA_HEAVY_ASSERT(m_Count > 0);
1754 ItemType*
const pBackItem = m_pBack;
1755 ItemType*
const pPrevItem = pBackItem->pPrev;
1756 if(pPrevItem != VMA_NULL)
1758 pPrevItem->pNext = VMA_NULL;
1760 m_pBack = pPrevItem;
1761 m_ItemAllocator.Free(pBackItem);
1765 template<
typename T>
1766 void VmaRawList<T>::PopFront()
1768 VMA_HEAVY_ASSERT(m_Count > 0);
1769 ItemType*
const pFrontItem = m_pFront;
1770 ItemType*
const pNextItem = pFrontItem->pNext;
1771 if(pNextItem != VMA_NULL)
1773 pNextItem->pPrev = VMA_NULL;
1775 m_pFront = pNextItem;
1776 m_ItemAllocator.Free(pFrontItem);
1780 template<
typename T>
1781 void VmaRawList<T>::Remove(ItemType* pItem)
1783 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
1784 VMA_HEAVY_ASSERT(m_Count > 0);
1786 if(pItem->pPrev != VMA_NULL)
1788 pItem->pPrev->pNext = pItem->pNext;
1792 VMA_HEAVY_ASSERT(m_pFront == pItem);
1793 m_pFront = pItem->pNext;
1796 if(pItem->pNext != VMA_NULL)
1798 pItem->pNext->pPrev = pItem->pPrev;
1802 VMA_HEAVY_ASSERT(m_pBack == pItem);
1803 m_pBack = pItem->pPrev;
1806 m_ItemAllocator.Free(pItem);
1810 template<
typename T>
1811 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
1813 if(pItem != VMA_NULL)
1815 ItemType*
const prevItem = pItem->pPrev;
1816 ItemType*
const newItem = m_ItemAllocator.Alloc();
1817 newItem->pPrev = prevItem;
1818 newItem->pNext = pItem;
1819 pItem->pPrev = newItem;
1820 if(prevItem != VMA_NULL)
1822 prevItem->pNext = newItem;
1826 VMA_HEAVY_ASSERT(m_pFront == pItem);
1836 template<
typename T>
1837 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
1839 if(pItem != VMA_NULL)
1841 ItemType*
const nextItem = pItem->pNext;
1842 ItemType*
const newItem = m_ItemAllocator.Alloc();
1843 newItem->pNext = nextItem;
1844 newItem->pPrev = pItem;
1845 pItem->pNext = newItem;
1846 if(nextItem != VMA_NULL)
1848 nextItem->pPrev = newItem;
1852 VMA_HEAVY_ASSERT(m_pBack == pItem);
1862 template<
typename T>
1863 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
1865 ItemType*
const newItem = InsertBefore(pItem);
1866 newItem->Value = value;
1870 template<
typename T>
1871 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
1873 ItemType*
const newItem = InsertAfter(pItem);
1874 newItem->Value = value;
1878 template<
typename T,
typename AllocatorT>
1891 T& operator*()
const 1893 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
1894 return m_pItem->Value;
1896 T* operator->()
const 1898 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
1899 return &m_pItem->Value;
1902 iterator& operator++()
1904 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
1905 m_pItem = m_pItem->pNext;
1908 iterator& operator--()
1910 if(m_pItem != VMA_NULL)
1912 m_pItem = m_pItem->pPrev;
1916 VMA_HEAVY_ASSERT(!m_pList.IsEmpty());
1917 m_pItem = m_pList->Back();
1922 iterator operator++(
int)
1924 iterator result = *
this;
1928 iterator operator--(
int)
1930 iterator result = *
this;
1935 bool operator==(
const iterator& rhs)
const 1937 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
1938 return m_pItem == rhs.m_pItem;
1940 bool operator!=(
const iterator& rhs)
const 1942 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
1943 return m_pItem != rhs.m_pItem;
1947 VmaRawList<T>* m_pList;
1948 VmaListItem<T>* m_pItem;
1950 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
1956 friend class VmaList<T, AllocatorT>;
1957 friend class VmaList<T, AllocatorT>:: const_iterator;
1960 class const_iterator
1969 const_iterator(
const iterator& src) :
1970 m_pList(src.m_pList),
1971 m_pItem(src.m_pItem)
1975 const T& operator*()
const 1977 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
1978 return m_pItem->Value;
1980 const T* operator->()
const 1982 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
1983 return &m_pItem->Value;
1986 const_iterator& operator++()
1988 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
1989 m_pItem = m_pItem->pNext;
1992 const_iterator& operator--()
1994 if(m_pItem != VMA_NULL)
1996 m_pItem = m_pItem->pPrev;
2000 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
2001 m_pItem = m_pList->Back();
2006 const_iterator operator++(
int)
2008 const_iterator result = *
this;
2012 const_iterator operator--(
int)
2014 const_iterator result = *
this;
2019 bool operator==(
const const_iterator& rhs)
const 2021 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2022 return m_pItem == rhs.m_pItem;
2024 bool operator!=(
const const_iterator& rhs)
const 2026 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2027 return m_pItem != rhs.m_pItem;
2031 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
2037 const VmaRawList<T>* m_pList;
2038 const VmaListItem<T>* m_pItem;
2040 friend class VmaList<T, AllocatorT>;
2043 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
2045 bool empty()
const {
return m_RawList.IsEmpty(); }
2046 size_t size()
const {
return m_RawList.GetCount(); }
2048 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
2049 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
2051 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
2052 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
2054 void clear() { m_RawList.Clear(); }
2055 void push_back(
const T& value) { m_RawList.PushBack(value); }
2056 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
2057 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
2060 VmaRawList<T> m_RawList;
2063 #endif // #if VMA_USE_STL_LIST 2068 #if VMA_USE_STL_UNORDERED_MAP 2070 #define VmaPair std::pair 2072 #define VMA_MAP_TYPE(KeyT, ValueT) \ 2073 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 2075 #else // #if VMA_USE_STL_UNORDERED_MAP 2077 template<
typename T1,
typename T2>
2083 VmaPair() : first(), second() { }
2084 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
2090 template<
typename KeyT,
typename ValueT>
2094 typedef VmaPair<KeyT, ValueT> PairType;
2095 typedef PairType* iterator;
2097 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
2099 iterator begin() {
return m_Vector.begin(); }
2100 iterator end() {
return m_Vector.end(); }
2102 void insert(
const PairType& pair);
2103 iterator find(
const KeyT& key);
2104 void erase(iterator it);
2107 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
2110 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 2112 template<
typename FirstT,
typename SecondT>
2113 struct VmaPairFirstLess
2115 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 2117 return lhs.first < rhs.first;
2119 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 2121 return lhs.first < rhsFirst;
2125 template<
typename KeyT,
typename ValueT>
2126 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
2128 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2130 m_Vector.data() + m_Vector.size(),
2132 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
2133 VectorInsert(m_Vector, indexToInsert, pair);
2136 template<
typename KeyT,
typename ValueT>
2137 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
2139 PairType* it = VmaBinaryFindFirstNotLess(
2141 m_Vector.data() + m_Vector.size(),
2143 VmaPairFirstLess<KeyT, ValueT>());
2144 if((it != m_Vector.end()) && (it->first == key))
2150 return m_Vector.end();
2154 template<
typename KeyT,
typename ValueT>
2155 void VmaMap<KeyT, ValueT>::erase(iterator it)
2157 VectorRemove(m_Vector, it - m_Vector.begin());
2160 #endif // #if VMA_USE_STL_UNORDERED_MAP 2166 enum VMA_BLOCK_VECTOR_TYPE
2168 VMA_BLOCK_VECTOR_TYPE_UNMAPPED,
2169 VMA_BLOCK_VECTOR_TYPE_MAPPED,
2170 VMA_BLOCK_VECTOR_TYPE_COUNT
2176 VMA_BLOCK_VECTOR_TYPE_MAPPED :
2177 VMA_BLOCK_VECTOR_TYPE_UNMAPPED;
2180 struct VmaAllocation_T
2183 enum ALLOCATION_TYPE
2185 ALLOCATION_TYPE_NONE,
2186 ALLOCATION_TYPE_BLOCK,
2187 ALLOCATION_TYPE_OWN,
2192 memset(
this, 0,
sizeof(VmaAllocation_T));
2195 void InitBlockAllocation(
2197 VkDeviceSize offset,
2198 VkDeviceSize alignment,
2200 VmaSuballocationType suballocationType,
2203 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2204 VMA_ASSERT(block != VMA_NULL);
2205 m_Type = ALLOCATION_TYPE_BLOCK;
2206 m_Alignment = alignment;
2208 m_pUserData = pUserData;
2209 m_SuballocationType = suballocationType;
2210 m_BlockAllocation.m_Block = block;
2211 m_BlockAllocation.m_Offset = offset;
2214 void ChangeBlockAllocation(
2216 VkDeviceSize offset)
2218 VMA_ASSERT(block != VMA_NULL);
2219 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
2220 m_BlockAllocation.m_Block = block;
2221 m_BlockAllocation.m_Offset = offset;
2224 void InitOwnAllocation(
2225 uint32_t memoryTypeIndex,
2226 VkDeviceMemory hMemory,
2227 VmaSuballocationType suballocationType,
2233 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2234 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
2235 m_Type = ALLOCATION_TYPE_OWN;
2238 m_pUserData = pUserData;
2239 m_SuballocationType = suballocationType;
2240 m_OwnAllocation.m_MemoryTypeIndex = memoryTypeIndex;
2241 m_OwnAllocation.m_hMemory = hMemory;
2242 m_OwnAllocation.m_PersistentMap = persistentMap;
2243 m_OwnAllocation.m_pMappedData = pMappedData;
2246 ALLOCATION_TYPE GetType()
const {
return m_Type; }
2247 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
2248 VkDeviceSize GetSize()
const {
return m_Size; }
2249 void* GetUserData()
const {
return m_pUserData; }
2250 void SetUserData(
void* pUserData) { m_pUserData = pUserData; }
2251 VmaSuballocationType GetSuballocationType()
const {
return m_SuballocationType; }
2253 VmaBlock* GetBlock()
const 2255 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
2256 return m_BlockAllocation.m_Block;
2258 VkDeviceSize GetOffset()
const 2260 return (m_Type == ALLOCATION_TYPE_BLOCK) ? m_BlockAllocation.m_Offset : 0;
2262 VkDeviceMemory GetMemory()
const;
2263 uint32_t GetMemoryTypeIndex()
const;
2264 VMA_BLOCK_VECTOR_TYPE GetBlockVectorType()
const;
2265 void* GetMappedData()
const;
2267 VkResult OwnAllocMapPersistentlyMappedMemory(VkDevice hDevice)
2269 VMA_ASSERT(m_Type == ALLOCATION_TYPE_OWN);
2270 if(m_OwnAllocation.m_PersistentMap)
2272 return vkMapMemory(hDevice, m_OwnAllocation.m_hMemory, 0, VK_WHOLE_SIZE, 0, &m_OwnAllocation.m_pMappedData);
2276 void OwnAllocUnmapPersistentlyMappedMemory(VkDevice hDevice)
2278 VMA_ASSERT(m_Type == ALLOCATION_TYPE_OWN);
2279 if(m_OwnAllocation.m_pMappedData)
2281 VMA_ASSERT(m_OwnAllocation.m_PersistentMap);
2282 vkUnmapMemory(hDevice, m_OwnAllocation.m_hMemory);
2283 m_OwnAllocation.m_pMappedData = VMA_NULL;
2288 VkDeviceSize m_Alignment;
2289 VkDeviceSize m_Size;
2291 ALLOCATION_TYPE m_Type;
2292 VmaSuballocationType m_SuballocationType;
2297 struct BlockAllocation
2300 VkDeviceSize m_Offset;
2301 } m_BlockAllocation;
2304 struct OwnAllocation
2306 uint32_t m_MemoryTypeIndex;
2307 VkDeviceMemory m_hMemory;
2308 bool m_PersistentMap;
2309 void* m_pMappedData;
2318 struct VmaSuballocation
2320 VkDeviceSize offset;
2322 VmaSuballocationType type;
2325 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
2328 struct VmaAllocationRequest
2330 VmaSuballocationList::iterator freeSuballocationItem;
2331 VkDeviceSize offset;
2339 uint32_t m_MemoryTypeIndex;
2340 VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
2341 VkDeviceMemory m_hMemory;
2342 VkDeviceSize m_Size;
2343 bool m_PersistentMap;
2344 void* m_pMappedData;
2345 uint32_t m_FreeCount;
2346 VkDeviceSize m_SumFreeSize;
2347 VmaSuballocationList m_Suballocations;
2350 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
2352 VmaBlock(VmaAllocator hAllocator);
2356 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
2361 uint32_t newMemoryTypeIndex,
2362 VMA_BLOCK_VECTOR_TYPE newBlockVectorType,
2363 VkDeviceMemory newMemory,
2364 VkDeviceSize newSize,
2368 void Destroy(VmaAllocator allocator);
2371 bool Validate()
const;
2376 bool CreateAllocationRequest(
2377 VkDeviceSize bufferImageGranularity,
2378 VkDeviceSize allocSize,
2379 VkDeviceSize allocAlignment,
2380 VmaSuballocationType allocType,
2381 VmaAllocationRequest* pAllocationRequest);
2385 bool CheckAllocation(
2386 VkDeviceSize bufferImageGranularity,
2387 VkDeviceSize allocSize,
2388 VkDeviceSize allocAlignment,
2389 VmaSuballocationType allocType,
2390 VmaSuballocationList::const_iterator freeSuballocItem,
2391 VkDeviceSize* pOffset)
const;
2394 bool IsEmpty()
const;
2399 const VmaAllocationRequest& request,
2400 VmaSuballocationType type,
2401 VkDeviceSize allocSize);
2404 void Free(
const VmaAllocation allocation);
2406 #if VMA_STATS_STRING_ENABLED 2407 void PrintDetailedMap(
class VmaStringBuilder& sb)
const;
2412 void MergeFreeWithNext(VmaSuballocationList::iterator item);
2415 void FreeSuballocation(VmaSuballocationList::iterator suballocItem);
2418 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
2421 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
2424 struct VmaPointerLess
2426 bool operator()(
const void* lhs,
const void* rhs)
const 2434 struct VmaBlockVector
2437 VmaVector< VmaBlock*, VmaStlAllocator<VmaBlock*> > m_Blocks;
2439 VmaBlockVector(VmaAllocator hAllocator);
2442 bool IsEmpty()
const {
return m_Blocks.empty(); }
2445 void Remove(VmaBlock* pBlock);
2449 void IncrementallySortBlocks();
2452 void AddStats(
VmaStats* pStats, uint32_t memTypeIndex, uint32_t memHeapIndex)
const;
2454 #if VMA_STATS_STRING_ENABLED 2455 void PrintDetailedMap(
class VmaStringBuilder& sb)
const;
2458 void UnmapPersistentlyMappedMemory();
2459 VkResult MapPersistentlyMappedMemory();
2462 VmaAllocator m_hAllocator;
2466 struct VmaAllocator_T
2470 bool m_AllocationCallbacksSpecified;
2471 VkAllocationCallbacks m_AllocationCallbacks;
2473 VkDeviceSize m_PreferredLargeHeapBlockSize;
2474 VkDeviceSize m_PreferredSmallHeapBlockSize;
2477 uint32_t m_UnmapPersistentlyMappedMemoryCounter;
2479 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
2480 VkPhysicalDeviceMemoryProperties m_MemProps;
2482 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES][VMA_BLOCK_VECTOR_TYPE_COUNT];
2486 bool m_HasEmptyBlock[VK_MAX_MEMORY_TYPES];
2487 VMA_MUTEX m_BlocksMutex[VK_MAX_MEMORY_TYPES];
2490 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
2491 AllocationVectorType* m_pOwnAllocations[VK_MAX_MEMORY_TYPES][VMA_BLOCK_VECTOR_TYPE_COUNT];
2492 VMA_MUTEX m_OwnAllocationsMutex[VK_MAX_MEMORY_TYPES];
2497 const VkAllocationCallbacks* GetAllocationCallbacks()
const 2499 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
2502 VkDeviceSize GetPreferredBlockSize(uint32_t memTypeIndex)
const;
2504 VkDeviceSize GetBufferImageGranularity()
const 2507 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
2508 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
2511 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
2512 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
2515 VkResult AllocateMemory(
2516 const VkMemoryRequirements& vkMemReq,
2518 VmaSuballocationType suballocType,
2519 VmaAllocation* pAllocation);
2522 void FreeMemory(
const VmaAllocation allocation);
2524 void CalculateStats(
VmaStats* pStats);
2526 #if VMA_STATS_STRING_ENABLED 2527 void PrintDetailedMap(
class VmaStringBuilder& sb);
2530 void UnmapPersistentlyMappedMemory();
2531 VkResult MapPersistentlyMappedMemory();
2533 VkResult Defragment(
2534 VmaAllocation* pAllocations,
2535 size_t allocationCount,
2536 VkBool32* pAllocationsChanged,
2540 static void GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo);
2543 VkPhysicalDevice m_PhysicalDevice;
2545 VkResult AllocateMemoryOfType(
2546 const VkMemoryRequirements& vkMemReq,
2548 uint32_t memTypeIndex,
2549 VmaSuballocationType suballocType,
2550 VmaAllocation* pAllocation);
2553 VkResult AllocateOwnMemory(
2555 VmaSuballocationType suballocType,
2556 uint32_t memTypeIndex,
2559 VmaAllocation* pAllocation);
2562 void FreeOwnMemory(VmaAllocation allocation);
2568 static void* VmaMalloc(VmaAllocator hAllocator,
size_t size,
size_t alignment)
2570 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
2573 static void VmaFree(VmaAllocator hAllocator,
void* ptr)
2575 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
2578 template<
typename T>
2579 static T* VmaAllocate(VmaAllocator hAllocator)
2581 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
2584 template<
typename T>
2585 static T* VmaAllocateArray(VmaAllocator hAllocator,
size_t count)
2587 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
2590 template<
typename T>
2591 static void vma_delete(VmaAllocator hAllocator, T* ptr)
2596 VmaFree(hAllocator, ptr);
2600 template<
typename T>
2601 static void vma_delete_array(VmaAllocator hAllocator, T* ptr,
size_t count)
2605 for(
size_t i = count; i--; )
2607 VmaFree(hAllocator, ptr);
2614 #if VMA_STATS_STRING_ENABLED 2616 class VmaStringBuilder
2619 VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
2620 size_t GetLength()
const {
return m_Data.size(); }
2621 const char* GetData()
const {
return m_Data.data(); }
2623 void Add(
char ch) { m_Data.push_back(ch); }
2624 void Add(
const char* pStr);
2625 void AddNewLine() { Add(
'\n'); }
2626 void AddNumber(uint32_t num);
2627 void AddNumber(uint64_t num);
2628 void AddBool(
bool b) { Add(b ?
"true" :
"false"); }
2629 void AddNull() { Add(
"null"); }
2630 void AddString(
const char* pStr);
2633 VmaVector< char, VmaStlAllocator<char> > m_Data;
2636 void VmaStringBuilder::Add(
const char* pStr)
2638 const size_t strLen = strlen(pStr);
2641 const size_t oldCount = m_Data.size();
2642 m_Data.resize(oldCount + strLen);
2643 memcpy(m_Data.data() + oldCount, pStr, strLen);
2647 void VmaStringBuilder::AddNumber(uint32_t num)
2650 VmaUint32ToStr(buf,
sizeof(buf), num);
2654 void VmaStringBuilder::AddNumber(uint64_t num)
2657 VmaUint64ToStr(buf,
sizeof(buf), num);
2661 void VmaStringBuilder::AddString(
const char* pStr)
2664 const size_t strLen = strlen(pStr);
2665 for(
size_t i = 0; i < strLen; ++i)
2692 VMA_ASSERT(0 &&
"Character not currently supported.");
2701 VkDeviceMemory VmaAllocation_T::GetMemory()
const 2703 return (m_Type == ALLOCATION_TYPE_BLOCK) ?
2704 m_BlockAllocation.m_Block->m_hMemory : m_OwnAllocation.m_hMemory;
2707 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 2709 return (m_Type == ALLOCATION_TYPE_BLOCK) ?
2710 m_BlockAllocation.m_Block->m_MemoryTypeIndex : m_OwnAllocation.m_MemoryTypeIndex;
2713 VMA_BLOCK_VECTOR_TYPE VmaAllocation_T::GetBlockVectorType()
const 2715 return (m_Type == ALLOCATION_TYPE_BLOCK) ?
2716 m_BlockAllocation.m_Block->m_BlockVectorType :
2717 (m_OwnAllocation.m_PersistentMap ? VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED);
2720 void* VmaAllocation_T::GetMappedData()
const 2724 case ALLOCATION_TYPE_BLOCK:
2725 if(m_BlockAllocation.m_Block->m_pMappedData != VMA_NULL)
2727 return (
char*)m_BlockAllocation.m_Block->m_pMappedData + m_BlockAllocation.m_Offset;
2734 case ALLOCATION_TYPE_OWN:
2735 return m_OwnAllocation.m_pMappedData;
2743 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
2752 static void VmaPrintStatInfo(VmaStringBuilder& sb,
const VmaStatInfo& stat)
2754 sb.Add(
"{ \"Allocations\": ");
2756 sb.Add(
", \"Suballocations\": ");
2758 sb.Add(
", \"UnusedRanges\": ");
2760 sb.Add(
", \"UsedBytes\": ");
2762 sb.Add(
", \"UnusedBytes\": ");
2764 sb.Add(
", \"SuballocationSize\": { \"Min\": ");
2766 sb.Add(
", \"Avg\": ");
2768 sb.Add(
", \"Max\": ");
2770 sb.Add(
" }, \"UnusedRangeSize\": { \"Min\": ");
2772 sb.Add(
", \"Avg\": ");
2774 sb.Add(
", \"Max\": ");
2779 #endif // #if VMA_STATS_STRING_ENABLED 2781 struct VmaSuballocationItemSizeLess
2784 const VmaSuballocationList::iterator lhs,
2785 const VmaSuballocationList::iterator rhs)
const 2787 return lhs->size < rhs->size;
2790 const VmaSuballocationList::iterator lhs,
2791 VkDeviceSize rhsSize)
const 2793 return lhs->size < rhsSize;
2797 VmaBlock::VmaBlock(VmaAllocator hAllocator) :
2798 m_MemoryTypeIndex(UINT32_MAX),
2799 m_BlockVectorType(VMA_BLOCK_VECTOR_TYPE_COUNT),
2800 m_hMemory(VK_NULL_HANDLE),
2802 m_PersistentMap(false),
2803 m_pMappedData(VMA_NULL),
2806 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
2807 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
2811 void VmaBlock::Init(
2812 uint32_t newMemoryTypeIndex,
2813 VMA_BLOCK_VECTOR_TYPE newBlockVectorType,
2814 VkDeviceMemory newMemory,
2815 VkDeviceSize newSize,
2819 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
2821 m_MemoryTypeIndex = newMemoryTypeIndex;
2822 m_BlockVectorType = newBlockVectorType;
2823 m_hMemory = newMemory;
2825 m_PersistentMap = persistentMap;
2826 m_pMappedData = pMappedData;
2828 m_SumFreeSize = newSize;
2830 m_Suballocations.clear();
2831 m_FreeSuballocationsBySize.clear();
2833 VmaSuballocation suballoc = {};
2834 suballoc.offset = 0;
2835 suballoc.size = newSize;
2836 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
2838 m_Suballocations.push_back(suballoc);
2839 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
2841 m_FreeSuballocationsBySize.push_back(suballocItem);
2844 void VmaBlock::Destroy(VmaAllocator allocator)
2846 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
2847 if(m_pMappedData != VMA_NULL)
2849 vkUnmapMemory(allocator->m_hDevice, m_hMemory);
2850 m_pMappedData = VMA_NULL;
2854 if(allocator->m_DeviceMemoryCallbacks.pfnFree != VMA_NULL)
2856 (*allocator->m_DeviceMemoryCallbacks.pfnFree)(allocator, m_MemoryTypeIndex, m_hMemory, m_Size);
2859 vkFreeMemory(allocator->m_hDevice, m_hMemory, allocator->GetAllocationCallbacks());
2860 m_hMemory = VK_NULL_HANDLE;
2863 bool VmaBlock::Validate()
const 2865 if((m_hMemory == VK_NULL_HANDLE) ||
2867 m_Suballocations.empty())
2873 VkDeviceSize calculatedOffset = 0;
2875 uint32_t calculatedFreeCount = 0;
2877 VkDeviceSize calculatedSumFreeSize = 0;
2880 size_t freeSuballocationsToRegister = 0;
2882 bool prevFree =
false;
2884 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
2885 suballocItem != m_Suballocations.cend();
2888 const VmaSuballocation& subAlloc = *suballocItem;
2891 if(subAlloc.offset != calculatedOffset)
2896 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
2898 if(prevFree && currFree)
2902 prevFree = currFree;
2906 calculatedSumFreeSize += subAlloc.size;
2907 ++calculatedFreeCount;
2908 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
2910 ++freeSuballocationsToRegister;
2914 calculatedOffset += subAlloc.size;
2919 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
2924 VkDeviceSize lastSize = 0;
2925 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
2927 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
2930 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
2935 if(suballocItem->size < lastSize)
2940 lastSize = suballocItem->size;
2945 (calculatedOffset == m_Size) &&
2946 (calculatedSumFreeSize == m_SumFreeSize) &&
2947 (calculatedFreeCount == m_FreeCount);
2960 bool VmaBlock::CreateAllocationRequest(
2961 VkDeviceSize bufferImageGranularity,
2962 VkDeviceSize allocSize,
2963 VkDeviceSize allocAlignment,
2964 VmaSuballocationType allocType,
2965 VmaAllocationRequest* pAllocationRequest)
2967 VMA_ASSERT(allocSize > 0);
2968 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
2969 VMA_ASSERT(pAllocationRequest != VMA_NULL);
2970 VMA_HEAVY_ASSERT(Validate());
2973 if(m_SumFreeSize < allocSize)
3008 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
3009 if(freeSuballocCount > 0)
3014 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
3015 m_FreeSuballocationsBySize.data(),
3016 m_FreeSuballocationsBySize.data() + freeSuballocCount,
3018 VmaSuballocationItemSizeLess());
3019 size_t index = it - m_FreeSuballocationsBySize.data();
3020 for(; index < freeSuballocCount; ++index)
3022 VkDeviceSize offset = 0;
3023 const VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[index];
3024 if(CheckAllocation(bufferImageGranularity, allocSize, allocAlignment, allocType, suballocItem, &offset))
3026 pAllocationRequest->freeSuballocationItem = suballocItem;
3027 pAllocationRequest->offset = offset;
3035 for(
size_t index = freeSuballocCount; index--; )
3037 VkDeviceSize offset = 0;
3038 const VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[index];
3039 if(CheckAllocation(bufferImageGranularity, allocSize, allocAlignment, allocType, suballocItem, &offset))
3041 pAllocationRequest->freeSuballocationItem = suballocItem;
3042 pAllocationRequest->offset = offset;
3052 bool VmaBlock::CheckAllocation(
3053 VkDeviceSize bufferImageGranularity,
3054 VkDeviceSize allocSize,
3055 VkDeviceSize allocAlignment,
3056 VmaSuballocationType allocType,
3057 VmaSuballocationList::const_iterator freeSuballocItem,
3058 VkDeviceSize* pOffset)
const 3060 VMA_ASSERT(allocSize > 0);
3061 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
3062 VMA_ASSERT(freeSuballocItem != m_Suballocations.cend());
3063 VMA_ASSERT(pOffset != VMA_NULL);
3065 const VmaSuballocation& suballoc = *freeSuballocItem;
3066 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
3069 if(suballoc.size < allocSize)
3075 *pOffset = suballoc.offset;
3078 if((VMA_DEBUG_MARGIN > 0) && freeSuballocItem != m_Suballocations.cbegin())
3080 *pOffset += VMA_DEBUG_MARGIN;
3084 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
3085 *pOffset = VmaAlignUp(*pOffset, alignment);
3089 if(bufferImageGranularity > 1)
3091 bool bufferImageGranularityConflict =
false;
3092 VmaSuballocationList::const_iterator prevSuballocItem = freeSuballocItem;
3093 while(prevSuballocItem != m_Suballocations.cbegin())
3096 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
3097 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
3099 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
3101 bufferImageGranularityConflict =
true;
3109 if(bufferImageGranularityConflict)
3111 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
3116 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
3119 VmaSuballocationList::const_iterator next = freeSuballocItem;
3121 const VkDeviceSize requiredEndMargin =
3122 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
3125 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
3132 if(bufferImageGranularity > 1)
3134 VmaSuballocationList::const_iterator nextSuballocItem = freeSuballocItem;
3136 while(nextSuballocItem != m_Suballocations.cend())
3138 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
3139 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
3141 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
3159 bool VmaBlock::IsEmpty()
const 3161 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
3164 void VmaBlock::Alloc(
3165 const VmaAllocationRequest& request,
3166 VmaSuballocationType type,
3167 VkDeviceSize allocSize)
3169 VMA_ASSERT(request.freeSuballocationItem != m_Suballocations.end());
3170 VmaSuballocation& suballoc = *request.freeSuballocationItem;
3172 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
3174 VMA_ASSERT(request.offset >= suballoc.offset);
3175 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
3176 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
3177 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
3181 UnregisterFreeSuballocation(request.freeSuballocationItem);
3183 suballoc.offset = request.offset;
3184 suballoc.size = allocSize;
3185 suballoc.type = type;
3190 VmaSuballocation paddingSuballoc = {};
3191 paddingSuballoc.offset = request.offset + allocSize;
3192 paddingSuballoc.size = paddingEnd;
3193 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
3194 VmaSuballocationList::iterator next = request.freeSuballocationItem;
3196 const VmaSuballocationList::iterator paddingEndItem =
3197 m_Suballocations.insert(next, paddingSuballoc);
3198 RegisterFreeSuballocation(paddingEndItem);
3204 VmaSuballocation paddingSuballoc = {};
3205 paddingSuballoc.offset = request.offset - paddingBegin;
3206 paddingSuballoc.size = paddingBegin;
3207 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
3208 const VmaSuballocationList::iterator paddingBeginItem =
3209 m_Suballocations.insert(request.freeSuballocationItem, paddingSuballoc);
3210 RegisterFreeSuballocation(paddingBeginItem);
3214 m_FreeCount = m_FreeCount - 1;
3215 if(paddingBegin > 0)
3223 m_SumFreeSize -= allocSize;
3226 void VmaBlock::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
3229 VmaSuballocation& suballoc = *suballocItem;
3230 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
3234 m_SumFreeSize += suballoc.size;
3237 bool mergeWithNext =
false;
3238 bool mergeWithPrev =
false;
3240 VmaSuballocationList::iterator nextItem = suballocItem;
3242 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
3244 mergeWithNext =
true;
3247 VmaSuballocationList::iterator prevItem = suballocItem;
3248 if(suballocItem != m_Suballocations.begin())
3251 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
3253 mergeWithPrev =
true;
3259 UnregisterFreeSuballocation(nextItem);
3260 MergeFreeWithNext(suballocItem);
3265 UnregisterFreeSuballocation(prevItem);
3266 MergeFreeWithNext(prevItem);
3267 RegisterFreeSuballocation(prevItem);
3270 RegisterFreeSuballocation(suballocItem);
3273 void VmaBlock::Free(
const VmaAllocation allocation)
3275 const VkDeviceSize allocationOffset = allocation->GetOffset();
3276 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
3277 suballocItem != m_Suballocations.end();
3280 VmaSuballocation& suballoc = *suballocItem;
3281 if(suballoc.offset == allocationOffset)
3283 FreeSuballocation(suballocItem);
3284 VMA_HEAVY_ASSERT(Validate());
3288 VMA_ASSERT(0 &&
"Not found!");
3291 #if VMA_STATS_STRING_ENABLED 3293 void VmaBlock::PrintDetailedMap(
class VmaStringBuilder& sb)
const 3295 sb.Add(
"{\n\t\t\t\"Bytes\": ");
3296 sb.AddNumber(m_Size);
3297 sb.Add(
",\n\t\t\t\"FreeBytes\": ");
3298 sb.AddNumber(m_SumFreeSize);
3299 sb.Add(
",\n\t\t\t\"Suballocations\": ");
3300 sb.AddNumber(m_Suballocations.size());
3301 sb.Add(
",\n\t\t\t\"FreeSuballocations\": ");
3302 sb.AddNumber(m_FreeCount);
3303 sb.Add(
",\n\t\t\t\"SuballocationList\": [");
3306 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
3307 suballocItem != m_Suballocations.cend();
3308 ++suballocItem, ++i)
3312 sb.Add(
",\n\t\t\t\t{ \"Type\": ");
3316 sb.Add(
"\n\t\t\t\t{ \"Type\": ");
3318 sb.AddString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
3319 sb.Add(
", \"Size\": ");
3320 sb.AddNumber(suballocItem->size);
3321 sb.Add(
", \"Offset\": ");
3322 sb.AddNumber(suballocItem->offset);
3326 sb.Add(
"\n\t\t\t]\n\t\t}");
3329 #endif // #if VMA_STATS_STRING_ENABLED 3331 void VmaBlock::MergeFreeWithNext(VmaSuballocationList::iterator item)
3333 VMA_ASSERT(item != m_Suballocations.end());
3334 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
3336 VmaSuballocationList::iterator nextItem = item;
3338 VMA_ASSERT(nextItem != m_Suballocations.end());
3339 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
3341 item->size += nextItem->size;
3343 m_Suballocations.erase(nextItem);
3346 void VmaBlock::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
3348 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
3349 VMA_ASSERT(item->size > 0);
3351 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
3353 if(m_FreeSuballocationsBySize.empty())
3355 m_FreeSuballocationsBySize.push_back(item);
3359 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
3360 m_FreeSuballocationsBySize.data(),
3361 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
3363 VmaSuballocationItemSizeLess());
3364 size_t index = it - m_FreeSuballocationsBySize.data();
3365 VectorInsert(m_FreeSuballocationsBySize, index, item);
3370 void VmaBlock::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
3372 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
3373 VMA_ASSERT(item->size > 0);
3375 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
3377 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
3378 m_FreeSuballocationsBySize.data(),
3379 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
3381 VmaSuballocationItemSizeLess());
3382 for(
size_t index = it - m_FreeSuballocationsBySize.data();
3383 index < m_FreeSuballocationsBySize.size();
3386 if(m_FreeSuballocationsBySize[index] == item)
3388 VectorRemove(m_FreeSuballocationsBySize, index);
3391 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
3393 VMA_ASSERT(0 &&
"Not found.");
3399 memset(&outInfo, 0,
sizeof(outInfo));
3404 static void CalcAllocationStatInfo(
VmaStatInfo& outInfo,
const VmaBlock& alloc)
3408 const uint32_t rangeCount = (uint32_t)alloc.m_Suballocations.size();
3420 for(VmaSuballocationList::const_iterator suballocItem = alloc.m_Suballocations.cbegin();
3421 suballocItem != alloc.m_Suballocations.cend();
3424 const VmaSuballocation& suballoc = *suballocItem;
3425 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
3452 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
3460 VmaBlockVector::VmaBlockVector(VmaAllocator hAllocator) :
3461 m_hAllocator(hAllocator),
3462 m_Blocks(VmaStlAllocator<VmaBlock*>(hAllocator->GetAllocationCallbacks()))
3466 VmaBlockVector::~VmaBlockVector()
3468 for(
size_t i = m_Blocks.size(); i--; )
3470 m_Blocks[i]->Destroy(m_hAllocator);
3471 vma_delete(m_hAllocator, m_Blocks[i]);
3475 void VmaBlockVector::Remove(VmaBlock* pBlock)
3477 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
3479 if(m_Blocks[blockIndex] == pBlock)
3481 VectorRemove(m_Blocks, blockIndex);
3488 void VmaBlockVector::IncrementallySortBlocks()
3491 for(
size_t i = 1; i < m_Blocks.size(); ++i)
3493 if(m_Blocks[i - 1]->m_SumFreeSize > m_Blocks[i]->m_SumFreeSize)
3495 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
3501 #if VMA_STATS_STRING_ENABLED 3503 void VmaBlockVector::PrintDetailedMap(
class VmaStringBuilder& sb)
const 3505 for(
size_t i = 0; i < m_Blocks.size(); ++i)
3515 m_Blocks[i]->PrintDetailedMap(sb);
3519 #endif // #if VMA_STATS_STRING_ENABLED 3521 void VmaBlockVector::UnmapPersistentlyMappedMemory()
3523 for(
size_t i = m_Blocks.size(); i--; )
3525 VmaBlock* pBlock = m_Blocks[i];
3526 if(pBlock->m_pMappedData != VMA_NULL)
3528 VMA_ASSERT(pBlock->m_PersistentMap !=
false);
3529 vkUnmapMemory(m_hAllocator->m_hDevice, pBlock->m_hMemory);
3530 pBlock->m_pMappedData = VMA_NULL;
3535 VkResult VmaBlockVector::MapPersistentlyMappedMemory()
3537 VkResult finalResult = VK_SUCCESS;
3538 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
3540 VmaBlock* pBlock = m_Blocks[i];
3541 if(pBlock->m_PersistentMap)
3543 VMA_ASSERT(pBlock->m_pMappedData ==
nullptr);
3544 VkResult localResult = vkMapMemory(m_hAllocator->m_hDevice, pBlock->m_hMemory, 0, VK_WHOLE_SIZE, 0, &pBlock->m_pMappedData);
3545 if(localResult != VK_SUCCESS)
3547 finalResult = localResult;
3554 void VmaBlockVector::AddStats(
VmaStats* pStats, uint32_t memTypeIndex, uint32_t memHeapIndex)
const 3556 for(uint32_t allocIndex = 0; allocIndex < m_Blocks.size(); ++allocIndex)
3558 const VmaBlock*
const pBlock = m_Blocks[allocIndex];
3560 VMA_HEAVY_ASSERT(pBlock->Validate());
3562 CalcAllocationStatInfo(allocationStatInfo, *pBlock);
3563 VmaAddStatInfo(pStats->
total, allocationStatInfo);
3564 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
3565 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
3572 class VmaDefragmentator
3575 const VkAllocationCallbacks* m_pAllocationCallbacks;
3576 VkDeviceSize m_BufferImageGranularity;
3577 uint32_t m_MemTypeIndex;
3578 VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
3579 VkDeviceSize m_BytesMoved;
3580 uint32_t m_AllocationsMoved;
3582 struct AllocationInfo
3584 VmaAllocation m_hAllocation;
3585 VkBool32* m_pChanged;
3588 m_hAllocation(VK_NULL_HANDLE),
3589 m_pChanged(VMA_NULL)
3594 struct AllocationInfoSizeGreater
3596 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 3598 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
3603 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3608 bool m_HasNonMovableAllocations;
3609 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3611 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
3613 m_HasNonMovableAllocations(true),
3614 m_Allocations(pAllocationCallbacks),
3615 m_pMappedDataForDefragmentation(VMA_NULL)
3619 void CalcHasNonMovableAllocations()
3621 const size_t blockAllocCount =
3622 m_pBlock->m_Suballocations.size() - m_pBlock->m_FreeCount;
3623 const size_t defragmentAllocCount = m_Allocations.size();
3624 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
3627 void SortAllocationsBySizeDescecnding()
3629 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
3632 VkResult EnsureMapping(VkDevice hDevice,
void** ppMappedData)
3635 if(m_pMappedDataForDefragmentation)
3637 *ppMappedData = m_pMappedDataForDefragmentation;
3642 if(m_pBlock->m_PersistentMap)
3644 VMA_ASSERT(m_pBlock->m_pMappedData != VMA_NULL);
3645 *ppMappedData = m_pBlock->m_pMappedData;
3650 VkResult res = vkMapMemory(hDevice, m_pBlock->m_hMemory, 0, VK_WHOLE_SIZE, 0, &m_pMappedDataForDefragmentation);
3651 *ppMappedData = m_pMappedDataForDefragmentation;
3655 void Unmap(VkDevice hDevice)
3657 if(m_pMappedDataForDefragmentation != VMA_NULL)
3659 vkUnmapMemory(hDevice, m_pBlock->m_hMemory);
3665 void* m_pMappedDataForDefragmentation;
3668 struct BlockPointerLess
3670 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaBlock* pRhsBlock)
const 3672 return pLhsBlockInfo->m_pBlock < pRhsBlock;
3674 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3676 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
3682 struct BlockInfoCompareMoveDestination
3684 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3686 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
3690 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
3694 if(pLhsBlockInfo->m_pBlock->m_SumFreeSize < pRhsBlockInfo->m_pBlock->m_SumFreeSize)
3702 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
3703 BlockInfoVector m_Blocks;
3705 VkResult DefragmentRound(
3706 VkDeviceSize maxBytesToMove,
3707 uint32_t maxAllocationsToMove);
3709 static bool MoveMakesSense(
3710 size_t dstBlockIndex, VkDeviceSize dstOffset,
3711 size_t srcBlockIndex, VkDeviceSize srcOffset);
3716 const VkAllocationCallbacks* pAllocationCallbacks,
3717 VkDeviceSize bufferImageGranularity,
3718 uint32_t memTypeIndex,
3719 VMA_BLOCK_VECTOR_TYPE blockVectorType);
3721 ~VmaDefragmentator();
3723 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
3724 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
3726 void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
3728 VkResult Defragment(
3729 VmaBlockVector* pBlockVector,
3730 VkDeviceSize maxBytesToMove,
3731 uint32_t maxAllocationsToMove);
3734 VmaDefragmentator::VmaDefragmentator(
3736 const VkAllocationCallbacks* pAllocationCallbacks,
3737 VkDeviceSize bufferImageGranularity,
3738 uint32_t memTypeIndex,
3739 VMA_BLOCK_VECTOR_TYPE blockVectorType) :
3741 m_pAllocationCallbacks(pAllocationCallbacks),
3742 m_BufferImageGranularity(bufferImageGranularity),
3743 m_MemTypeIndex(memTypeIndex),
3744 m_BlockVectorType(blockVectorType),
3746 m_AllocationsMoved(0),
3747 m_Allocations(VmaStlAllocator<AllocationInfo>(pAllocationCallbacks)),
3748 m_Blocks(VmaStlAllocator<BlockInfo*>(pAllocationCallbacks))
3752 VmaDefragmentator::~VmaDefragmentator()
3754 for(
size_t i = m_Blocks.size(); i--; )
3756 vma_delete(m_pAllocationCallbacks, m_Blocks[i]);
3760 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
3762 AllocationInfo allocInfo;
3763 allocInfo.m_hAllocation = hAlloc;
3764 allocInfo.m_pChanged = pChanged;
3765 m_Allocations.push_back(allocInfo);
3768 VkResult VmaDefragmentator::DefragmentRound(
3769 VkDeviceSize maxBytesToMove,
3770 uint32_t maxAllocationsToMove)
3772 if(m_Blocks.empty())
3777 size_t srcBlockIndex = m_Blocks.size() - 1;
3778 size_t srcAllocIndex = SIZE_MAX;
3784 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
3786 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
3789 if(srcBlockIndex == 0)
3796 srcAllocIndex = SIZE_MAX;
3801 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
3805 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
3806 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
3808 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
3809 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
3810 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
3811 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
3814 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
3816 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
3817 VmaAllocationRequest dstAllocRequest;
3818 if(pDstBlockInfo->m_pBlock->CreateAllocationRequest(
3819 m_BufferImageGranularity,
3823 &dstAllocRequest) &&
3825 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
3828 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
3829 (m_BytesMoved + size > maxBytesToMove))
3831 return VK_INCOMPLETE;
3834 void* pDstMappedData = VMA_NULL;
3835 VkResult res = pDstBlockInfo->EnsureMapping(m_hDevice, &pDstMappedData);
3836 if(res != VK_SUCCESS)
3841 void* pSrcMappedData = VMA_NULL;
3842 res = pSrcBlockInfo->EnsureMapping(m_hDevice, &pSrcMappedData);
3843 if(res != VK_SUCCESS)
3850 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
3851 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
3854 pDstBlockInfo->m_pBlock->Alloc(dstAllocRequest, suballocType, size);
3855 pSrcBlockInfo->m_pBlock->Free(allocInfo.m_hAllocation);
3857 allocInfo.m_hAllocation->ChangeBlockAllocation(pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
3859 if(allocInfo.m_pChanged != VMA_NULL)
3861 *allocInfo.m_pChanged = VK_TRUE;
3864 ++m_AllocationsMoved;
3865 m_BytesMoved += size;
3867 VectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
3875 if(srcAllocIndex > 0)
3881 if(srcBlockIndex > 0)
3884 srcAllocIndex = SIZE_MAX;
3894 VkResult VmaDefragmentator::Defragment(
3895 VmaBlockVector* pBlockVector,
3896 VkDeviceSize maxBytesToMove,
3897 uint32_t maxAllocationsToMove)
3899 if(m_Allocations.empty())
3905 const size_t blockCount = pBlockVector->m_Blocks.size();
3906 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
3908 BlockInfo* pBlockInfo = vma_new(m_pAllocationCallbacks, BlockInfo)(m_pAllocationCallbacks);
3909 pBlockInfo->m_pBlock = pBlockVector->m_Blocks[blockIndex];
3910 m_Blocks.push_back(pBlockInfo);
3914 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
3917 for(
size_t allocIndex = 0, allocCount = m_Allocations.size(); allocIndex < allocCount; ++allocIndex)
3919 AllocationInfo& allocInfo = m_Allocations[allocIndex];
3920 VmaBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
3921 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
3922 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
3924 (*it)->m_Allocations.push_back(allocInfo);
3931 m_Allocations.clear();
3933 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
3935 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
3936 pBlockInfo->CalcHasNonMovableAllocations();
3937 pBlockInfo->SortAllocationsBySizeDescecnding();
3941 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
3944 VkResult result = VK_SUCCESS;
3945 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
3947 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
3951 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
3953 m_Blocks[blockIndex]->Unmap(m_hDevice);
3959 bool VmaDefragmentator::MoveMakesSense(
3960 size_t dstBlockIndex, VkDeviceSize dstOffset,
3961 size_t srcBlockIndex, VkDeviceSize srcOffset)
3963 if(dstBlockIndex < srcBlockIndex)
3967 if(dstBlockIndex > srcBlockIndex)
3971 if(dstOffset < srcOffset)
3983 m_PhysicalDevice(pCreateInfo->physicalDevice),
3984 m_hDevice(pCreateInfo->device),
3985 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
3986 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
3987 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
3988 m_PreferredLargeHeapBlockSize(0),
3989 m_PreferredSmallHeapBlockSize(0),
3990 m_UnmapPersistentlyMappedMemoryCounter(0)
3994 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
3995 memset(&m_MemProps, 0,
sizeof(m_MemProps));
3996 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
3998 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
3999 memset(&m_HasEmptyBlock, 0,
sizeof(m_HasEmptyBlock));
4000 memset(&m_pOwnAllocations, 0,
sizeof(m_pOwnAllocations));
4013 vkGetPhysicalDeviceProperties(m_PhysicalDevice, &m_PhysicalDeviceProperties);
4014 vkGetPhysicalDeviceMemoryProperties(m_PhysicalDevice, &m_MemProps);
4016 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
4018 for(
size_t j = 0; j < VMA_BLOCK_VECTOR_TYPE_COUNT; ++j)
4020 m_pBlockVectors[i][j] = vma_new(
this, VmaBlockVector)(
this);
4021 m_pOwnAllocations[i][j] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
4026 VmaAllocator_T::~VmaAllocator_T()
4028 for(
size_t i = GetMemoryTypeCount(); i--; )
4030 for(
size_t j = VMA_BLOCK_VECTOR_TYPE_COUNT; j--; )
4032 vma_delete(
this, m_pOwnAllocations[i][j]);
4033 vma_delete(
this, m_pBlockVectors[i][j]);
4038 VkDeviceSize VmaAllocator_T::GetPreferredBlockSize(uint32_t memTypeIndex)
const 4040 VkDeviceSize heapSize = m_MemProps.memoryHeaps[m_MemProps.memoryTypes[memTypeIndex].heapIndex].size;
4041 return (heapSize <= VMA_SMALL_HEAP_MAX_SIZE) ?
4042 m_PreferredSmallHeapBlockSize : m_PreferredLargeHeapBlockSize;
4045 VkResult VmaAllocator_T::AllocateMemoryOfType(
4046 const VkMemoryRequirements& vkMemReq,
4048 uint32_t memTypeIndex,
4049 VmaSuballocationType suballocType,
4050 VmaAllocation* pAllocation)
4052 VMA_ASSERT(pAllocation != VMA_NULL);
4053 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
4055 const VkDeviceSize preferredBlockSize = GetPreferredBlockSize(memTypeIndex);
4057 const bool ownMemory =
4059 VMA_DEBUG_ALWAYS_OWN_MEMORY ||
4061 vkMemReq.size > preferredBlockSize / 2);
4067 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
4071 return AllocateOwnMemory(
4082 uint32_t blockVectorType = VmaMemoryRequirementFlagsToBlockVectorType(vmaMemReq.
flags);
4084 VmaMutexLock lock(m_BlocksMutex[memTypeIndex], m_UseMutex);
4085 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
4086 VMA_ASSERT(blockVector);
4090 for(
size_t allocIndex = 0; allocIndex < blockVector->m_Blocks.size(); ++allocIndex )
4092 VmaBlock*
const pBlock = blockVector->m_Blocks[allocIndex];
4094 VmaAllocationRequest allocRequest = {};
4096 if(pBlock->CreateAllocationRequest(
4097 GetBufferImageGranularity(),
4104 if(pBlock->IsEmpty())
4106 m_HasEmptyBlock[memTypeIndex] =
false;
4109 pBlock->Alloc(allocRequest, suballocType, vkMemReq.size);
4110 *pAllocation = vma_new(
this, VmaAllocation_T)();
4111 (*pAllocation)->InitBlockAllocation(
4113 allocRequest.offset,
4118 VMA_HEAVY_ASSERT(pBlock->Validate());
4119 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)allocIndex);
4127 VMA_DEBUG_LOG(
" FAILED due to VMA_MEMORY_REQUIREMENT_NEVER_ALLOCATE_BIT");
4128 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
4133 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
4134 allocInfo.memoryTypeIndex = memTypeIndex;
4135 allocInfo.allocationSize = preferredBlockSize;
4136 VkDeviceMemory mem = VK_NULL_HANDLE;
4137 VkResult res = vkAllocateMemory(m_hDevice, &allocInfo, GetAllocationCallbacks(), &mem);
4141 allocInfo.allocationSize /= 2;
4142 if(allocInfo.allocationSize >= vkMemReq.size)
4144 res = vkAllocateMemory(m_hDevice, &allocInfo, GetAllocationCallbacks(), &mem);
4148 allocInfo.allocationSize /= 2;
4149 if(allocInfo.allocationSize >= vkMemReq.size)
4151 res = vkAllocateMemory(m_hDevice, &allocInfo, GetAllocationCallbacks(), &mem);
4159 res = AllocateOwnMemory(
4166 if(res == VK_SUCCESS)
4169 VMA_DEBUG_LOG(
" Allocated as OwnMemory");
4175 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
4183 void* pMappedData = VMA_NULL;
4185 if(persistentMap && m_UnmapPersistentlyMappedMemoryCounter == 0)
4187 res = vkMapMemory(m_hDevice, mem, 0, VK_WHOLE_SIZE, 0, &pMappedData);
4190 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
4191 vkFreeMemory(m_hDevice, mem, GetAllocationCallbacks());
4197 if(m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
4199 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, memTypeIndex, mem, allocInfo.allocationSize);
4203 VmaBlock*
const pBlock = vma_new(
this, VmaBlock)(
this);
4206 (VMA_BLOCK_VECTOR_TYPE)blockVectorType,
4208 allocInfo.allocationSize,
4212 blockVector->m_Blocks.push_back(pBlock);
4215 VmaAllocationRequest allocRequest = {};
4216 allocRequest.freeSuballocationItem = pBlock->m_Suballocations.begin();
4217 allocRequest.offset = 0;
4218 pBlock->Alloc(allocRequest, suballocType, vkMemReq.size);
4219 *pAllocation = vma_new(
this, VmaAllocation_T)();
4220 (*pAllocation)->InitBlockAllocation(
4222 allocRequest.offset,
4227 VMA_HEAVY_ASSERT(pBlock->Validate());
4228 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
4234 VkResult VmaAllocator_T::AllocateOwnMemory(
4236 VmaSuballocationType suballocType,
4237 uint32_t memTypeIndex,
4240 VmaAllocation* pAllocation)
4242 VMA_ASSERT(pAllocation);
4244 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
4245 allocInfo.memoryTypeIndex = memTypeIndex;
4246 allocInfo.allocationSize = size;
4249 VkDeviceMemory hMemory = VK_NULL_HANDLE;
4250 VkResult res = vkAllocateMemory(m_hDevice, &allocInfo, GetAllocationCallbacks(), &hMemory);
4253 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
4257 void* pMappedData =
nullptr;
4260 if(m_UnmapPersistentlyMappedMemoryCounter == 0)
4262 res = vkMapMemory(m_hDevice, hMemory, 0, VK_WHOLE_SIZE, 0, &pMappedData);
4265 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
4266 vkFreeMemory(m_hDevice, hMemory, GetAllocationCallbacks());
4273 if(m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
4275 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, memTypeIndex, hMemory, size);
4278 *pAllocation = vma_new(
this, VmaAllocation_T)();
4279 (*pAllocation)->InitOwnAllocation(memTypeIndex, hMemory, suballocType, map, pMappedData, size, pUserData);
4283 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
4284 AllocationVectorType* pOwnAllocations = m_pOwnAllocations[memTypeIndex][map ? VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED];
4285 VMA_ASSERT(pOwnAllocations);
4286 VmaAllocation*
const pOwnAllocationsBeg = pOwnAllocations->data();
4287 VmaAllocation*
const pOwnAllocationsEnd = pOwnAllocationsBeg + pOwnAllocations->size();
4288 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4292 VmaPointerLess()) - pOwnAllocationsBeg;
4293 VectorInsert(*pOwnAllocations, indexToInsert, *pAllocation);
4296 VMA_DEBUG_LOG(
" Allocated OwnMemory MemoryTypeIndex=#%u", memTypeIndex);
4301 VkResult VmaAllocator_T::AllocateMemory(
4302 const VkMemoryRequirements& vkMemReq,
4304 VmaSuballocationType suballocType,
4305 VmaAllocation* pAllocation)
4310 VMA_ASSERT(0 &&
"Specifying VMA_MEMORY_REQUIREMENT_OWN_MEMORY_BIT together with VMA_MEMORY_REQUIREMENT_NEVER_ALLOCATE_BIT makes no sense.");
4311 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
4315 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
4316 uint32_t memTypeIndex = UINT32_MAX;
4318 if(res == VK_SUCCESS)
4320 res = AllocateMemoryOfType(vkMemReq, vmaMemReq, memTypeIndex, suballocType, pAllocation);
4322 if(res == VK_SUCCESS)
4332 memoryTypeBits &= ~(1u << memTypeIndex);
4335 if(res == VK_SUCCESS)
4337 res = AllocateMemoryOfType(vkMemReq, vmaMemReq, memTypeIndex, suballocType, pAllocation);
4339 if(res == VK_SUCCESS)
4349 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
4359 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
4361 VMA_ASSERT(allocation);
4363 if(allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK)
4365 VmaBlock* pBlockToDelete = VMA_NULL;
4367 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
4368 const VMA_BLOCK_VECTOR_TYPE blockVectorType = allocation->GetBlockVectorType();
4370 VmaMutexLock lock(m_BlocksMutex[memTypeIndex], m_UseMutex);
4372 VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
4373 VmaBlock* pBlock = allocation->GetBlock();
4375 pBlock->Free(allocation);
4376 VMA_HEAVY_ASSERT(pBlock->Validate());
4378 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
4381 if(pBlock->IsEmpty())
4384 if(m_HasEmptyBlock[memTypeIndex])
4386 pBlockToDelete = pBlock;
4387 pBlockVector->Remove(pBlock);
4392 m_HasEmptyBlock[memTypeIndex] =
true;
4396 pBlockVector->IncrementallySortBlocks();
4400 if(pBlockToDelete != VMA_NULL)
4402 VMA_DEBUG_LOG(
" Deleted empty allocation");
4403 pBlockToDelete->Destroy(
this);
4404 vma_delete(
this, pBlockToDelete);
4407 vma_delete(
this, allocation);
4411 FreeOwnMemory(allocation);
4415 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
4417 InitStatInfo(pStats->
total);
4418 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
4420 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
4423 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
4425 VmaMutexLock allocationsLock(m_BlocksMutex[memTypeIndex], m_UseMutex);
4426 const uint32_t heapIndex = m_MemProps.memoryTypes[memTypeIndex].heapIndex;
4427 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
4429 const VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
4430 VMA_ASSERT(pBlockVector);
4431 pBlockVector->AddStats(pStats, memTypeIndex, heapIndex);
4435 VmaPostprocessCalcStatInfo(pStats->
total);
4436 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
4437 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
4438 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
4439 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
4442 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
4444 void VmaAllocator_T::UnmapPersistentlyMappedMemory()
4446 if(m_UnmapPersistentlyMappedMemoryCounter++ == 0)
4448 if(m_PhysicalDeviceProperties.vendorID == VMA_VENDOR_ID_AMD)
4450 for(
size_t memTypeIndex = m_MemProps.memoryTypeCount; memTypeIndex--; )
4452 const VkMemoryPropertyFlags memFlags = m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
4453 if((memFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0 &&
4454 (memFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
4458 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
4459 AllocationVectorType* pOwnAllocationsVector = m_pOwnAllocations[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
4460 for(
size_t ownAllocIndex = pOwnAllocationsVector->size(); ownAllocIndex--; )
4462 VmaAllocation hAlloc = (*pOwnAllocationsVector)[ownAllocIndex];
4463 hAlloc->OwnAllocUnmapPersistentlyMappedMemory(m_hDevice);
4469 VmaMutexLock lock(m_BlocksMutex[memTypeIndex], m_UseMutex);
4470 VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
4471 pBlockVector->UnmapPersistentlyMappedMemory();
4479 VkResult VmaAllocator_T::MapPersistentlyMappedMemory()
4481 VMA_ASSERT(m_UnmapPersistentlyMappedMemoryCounter > 0);
4482 if(--m_UnmapPersistentlyMappedMemoryCounter == 0)
4484 VkResult finalResult = VK_SUCCESS;
4485 if(m_PhysicalDeviceProperties.vendorID == VMA_VENDOR_ID_AMD)
4487 for(
size_t memTypeIndex = 0; memTypeIndex < m_MemProps.memoryTypeCount; ++memTypeIndex)
4489 const VkMemoryPropertyFlags memFlags = m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
4490 if((memFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0 &&
4491 (memFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
4495 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
4496 AllocationVectorType* pAllocationsVector = m_pOwnAllocations[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
4497 for(
size_t ownAllocIndex = 0, ownAllocCount = pAllocationsVector->size(); ownAllocIndex < ownAllocCount; ++ownAllocIndex)
4499 VmaAllocation hAlloc = (*pAllocationsVector)[ownAllocIndex];
4500 hAlloc->OwnAllocMapPersistentlyMappedMemory(m_hDevice);
4506 VmaMutexLock lock(m_BlocksMutex[memTypeIndex], m_UseMutex);
4507 VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
4508 VkResult localResult = pBlockVector->MapPersistentlyMappedMemory();
4509 if(localResult != VK_SUCCESS)
4511 finalResult = localResult;
4523 VkResult VmaAllocator_T::Defragment(
4524 VmaAllocation* pAllocations,
4525 size_t allocationCount,
4526 VkBool32* pAllocationsChanged,
4530 if(pAllocationsChanged != VMA_NULL)
4532 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
4534 if(pDefragmentationStats != VMA_NULL)
4536 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
4539 if(m_UnmapPersistentlyMappedMemoryCounter > 0)
4541 VMA_DEBUG_LOG(
"ERROR: Cannot defragment when inside vmaUnmapPersistentlyMappedMemory.");
4542 return VK_ERROR_MEMORY_MAP_FAILED;
4546 const VkDeviceSize bufferImageGranularity = GetBufferImageGranularity();
4547 VmaDefragmentator* pDefragmentators[VK_MAX_MEMORY_TYPES][VMA_BLOCK_VECTOR_TYPE_COUNT];
4548 memset(pDefragmentators, 0,
sizeof(pDefragmentators));
4549 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
4552 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
4554 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
4556 pDefragmentators[memTypeIndex][blockVectorType] = vma_new(
this, VmaDefragmentator)(
4558 GetAllocationCallbacks(),
4559 bufferImageGranularity,
4561 (VMA_BLOCK_VECTOR_TYPE)blockVectorType);
4567 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
4569 VmaAllocation hAlloc = pAllocations[allocIndex];
4571 if(hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK)
4573 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
4575 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
4577 const VMA_BLOCK_VECTOR_TYPE blockVectorType = hAlloc->GetBlockVectorType();
4578 VkBool32* pChanged = (pAllocationsChanged != VMA_NULL) ?
4579 &pAllocationsChanged[allocIndex] : VMA_NULL;
4580 pDefragmentators[memTypeIndex][blockVectorType]->AddAllocation(hAlloc, pChanged);
4587 VkResult result = VK_SUCCESS;
4590 VkDeviceSize maxBytesToMove = SIZE_MAX;
4591 uint32_t maxAllocationsToMove = UINT32_MAX;
4592 if(pDefragmentationInfo != VMA_NULL)
4597 for(uint32_t memTypeIndex = 0;
4598 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
4602 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
4604 VmaMutexLock lock(m_BlocksMutex[memTypeIndex], m_UseMutex);
4606 for(uint32_t blockVectorType = 0;
4607 (blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT) && (result == VK_SUCCESS);
4610 VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
4613 result = pDefragmentators[memTypeIndex][blockVectorType]->Defragment(pBlockVector, maxBytesToMove, maxAllocationsToMove);
4616 if(pDefragmentationStats != VMA_NULL)
4618 const VkDeviceSize
bytesMoved = pDefragmentators[memTypeIndex][blockVectorType]->GetBytesMoved();
4619 const uint32_t
allocationsMoved = pDefragmentators[memTypeIndex][blockVectorType]->GetAllocationsMoved();
4622 VMA_ASSERT(bytesMoved <= maxBytesToMove);
4623 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
4629 for(
size_t blockIndex = pBlockVector->m_Blocks.size(); blockIndex--; )
4631 VmaBlock* pBlock = pBlockVector->m_Blocks[blockIndex];
4632 if(pBlock->IsEmpty())
4634 if(pDefragmentationStats != VMA_NULL)
4637 pDefragmentationStats->
bytesFreed += pBlock->m_Size;
4640 VectorRemove(pBlockVector->m_Blocks, blockIndex);
4641 pBlock->Destroy(
this);
4642 vma_delete(
this, pBlock);
4647 if(blockVectorType == VMA_BLOCK_VECTOR_TYPE_COUNT - 1)
4649 m_HasEmptyBlock[memTypeIndex] =
false;
4656 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
4658 for(
size_t blockVectorType = VMA_BLOCK_VECTOR_TYPE_COUNT; blockVectorType--; )
4660 vma_delete(
this, pDefragmentators[memTypeIndex][blockVectorType]);
4667 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo)
4669 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
4670 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
4671 pAllocationInfo->
offset = hAllocation->GetOffset();
4672 pAllocationInfo->
size = hAllocation->GetSize();
4673 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
4674 pAllocationInfo->
pUserData = hAllocation->GetUserData();
4677 void VmaAllocator_T::FreeOwnMemory(VmaAllocation allocation)
4679 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_OWN);
4681 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
4683 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
4684 AllocationVectorType*
const pOwnAllocations = m_pOwnAllocations[memTypeIndex][allocation->GetBlockVectorType()];
4685 VMA_ASSERT(pOwnAllocations);
4686 VmaAllocation*
const pOwnAllocationsBeg = pOwnAllocations->data();
4687 VmaAllocation*
const pOwnAllocationsEnd = pOwnAllocationsBeg + pOwnAllocations->size();
4688 VmaAllocation*
const pOwnAllocationIt = VmaBinaryFindFirstNotLess(
4693 if(pOwnAllocationIt != pOwnAllocationsEnd)
4695 const size_t ownAllocationIndex = pOwnAllocationIt - pOwnAllocationsBeg;
4696 VectorRemove(*pOwnAllocations, ownAllocationIndex);
4704 VkDeviceMemory hMemory = allocation->GetMemory();
4707 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
4709 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memTypeIndex, hMemory, allocation->GetSize());
4712 if(allocation->GetMappedData() != VMA_NULL)
4714 vkUnmapMemory(m_hDevice, hMemory);
4717 vkFreeMemory(m_hDevice, hMemory, GetAllocationCallbacks());
4719 VMA_DEBUG_LOG(
" Freed OwnMemory MemoryTypeIndex=%u", memTypeIndex);
4721 vma_delete(
this, allocation);
4724 #if VMA_STATS_STRING_ENABLED 4726 void VmaAllocator_T::PrintDetailedMap(VmaStringBuilder& sb)
4728 bool ownAllocationsStarted =
false;
4729 for(
size_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
4731 VmaMutexLock ownAllocationsLock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
4732 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
4734 AllocationVectorType*
const pOwnAllocVector = m_pOwnAllocations[memTypeIndex][blockVectorType];
4735 VMA_ASSERT(pOwnAllocVector);
4736 if(pOwnAllocVector->empty() ==
false)
4738 if(ownAllocationsStarted)
4740 sb.Add(
",\n\t\"Type ");
4744 sb.Add(
",\n\"OwnAllocations\": {\n\t\"Type ");
4745 ownAllocationsStarted =
true;
4747 sb.AddNumber(memTypeIndex);
4748 if(blockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
4754 for(
size_t i = 0; i < pOwnAllocVector->size(); ++i)
4756 const VmaAllocation hAlloc = (*pOwnAllocVector)[i];
4759 sb.Add(
",\n\t\t{ \"Size\": ");
4763 sb.Add(
"\n\t\t{ \"Size\": ");
4765 sb.AddNumber(hAlloc->GetSize());
4766 sb.Add(
", \"Type\": ");
4767 sb.AddString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
4775 if(ownAllocationsStarted)
4781 bool allocationsStarted =
false;
4782 for(
size_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
4784 VmaMutexLock globalAllocationsLock(m_BlocksMutex[memTypeIndex], m_UseMutex);
4785 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
4787 if(m_pBlockVectors[memTypeIndex][blockVectorType]->IsEmpty() ==
false)
4789 if(allocationsStarted)
4791 sb.Add(
",\n\t\"Type ");
4795 sb.Add(
",\n\"Allocations\": {\n\t\"Type ");
4796 allocationsStarted =
true;
4798 sb.AddNumber(memTypeIndex);
4799 if(blockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
4805 m_pBlockVectors[memTypeIndex][blockVectorType]->PrintDetailedMap(sb);
4811 if(allocationsStarted)
4818 #endif // #if VMA_STATS_STRING_ENABLED 4820 static VkResult AllocateMemoryForImage(
4821 VmaAllocator allocator,
4824 VmaSuballocationType suballocType,
4825 VmaAllocation* pAllocation)
4827 VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pMemoryRequirements && pAllocation);
4829 VkMemoryRequirements vkMemReq = {};
4830 vkGetImageMemoryRequirements(allocator->m_hDevice, image, &vkMemReq);
4832 return allocator->AllocateMemory(
4834 *pMemoryRequirements,
4844 VmaAllocator* pAllocator)
4846 VMA_ASSERT(pCreateInfo && pAllocator);
4847 VMA_DEBUG_LOG(
"vmaCreateAllocator");
4853 VmaAllocator allocator)
4855 if(allocator != VK_NULL_HANDLE)
4857 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
4858 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
4859 vma_delete(&allocationCallbacks, allocator);
4864 VmaAllocator allocator,
4865 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
4867 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
4868 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
4872 VmaAllocator allocator,
4873 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
4875 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
4876 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
4880 VmaAllocator allocator,
4881 uint32_t memoryTypeIndex,
4882 VkMemoryPropertyFlags* pFlags)
4884 VMA_ASSERT(allocator && pFlags);
4885 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
4886 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
4890 VmaAllocator allocator,
4893 VMA_ASSERT(allocator && pStats);
4894 VMA_DEBUG_GLOBAL_MUTEX_LOCK
4895 allocator->CalculateStats(pStats);
4898 #if VMA_STATS_STRING_ENABLED 4901 VmaAllocator allocator,
4902 char** ppStatsString,
4903 VkBool32 detailedMap)
4905 VMA_ASSERT(allocator && ppStatsString);
4906 VMA_DEBUG_GLOBAL_MUTEX_LOCK
4908 VmaStringBuilder sb(allocator);
4911 allocator->CalculateStats(&stats);
4913 sb.Add(
"{\n\"Total\": ");
4914 VmaPrintStatInfo(sb, stats.
total);
4916 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
4918 sb.Add(
",\n\"Heap ");
4919 sb.AddNumber(heapIndex);
4920 sb.Add(
"\": {\n\t\"Size\": ");
4921 sb.AddNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
4922 sb.Add(
",\n\t\"Flags\": ");
4923 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
4925 sb.AddString(
"DEVICE_LOCAL");
4933 sb.Add(
",\n\t\"Stats:\": ");
4934 VmaPrintStatInfo(sb, stats.
memoryHeap[heapIndex]);
4937 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
4939 if(allocator->m_MemProps.memoryTypes[typeIndex].heapIndex == heapIndex)
4941 sb.Add(
",\n\t\"Type ");
4942 sb.AddNumber(typeIndex);
4943 sb.Add(
"\": {\n\t\t\"Flags\": \"");
4944 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
4945 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
4947 sb.Add(
" DEVICE_LOCAL");
4949 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
4951 sb.Add(
" HOST_VISIBLE");
4953 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
4955 sb.Add(
" HOST_COHERENT");
4957 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
4959 sb.Add(
" HOST_CACHED");
4961 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
4963 sb.Add(
" LAZILY_ALLOCATED");
4968 sb.Add(
",\n\t\t\"Stats\": ");
4969 VmaPrintStatInfo(sb, stats.
memoryType[typeIndex]);
4976 if(detailedMap == VK_TRUE)
4978 allocator->PrintDetailedMap(sb);
4983 const size_t len = sb.GetLength();
4984 char*
const pChars = vma_new_array(allocator,
char, len + 1);
4987 memcpy(pChars, sb.GetData(), len);
4990 *ppStatsString = pChars;
4994 VmaAllocator allocator,
4997 if(pStatsString != VMA_NULL)
4999 VMA_ASSERT(allocator);
5000 size_t len = strlen(pStatsString);
5001 vma_delete_array(allocator, pStatsString, len + 1);
5005 #endif // #if VMA_STATS_STRING_ENABLED 5010 VmaAllocator allocator,
5011 uint32_t memoryTypeBits,
5013 uint32_t* pMemoryTypeIndex)
5015 VMA_ASSERT(allocator != VK_NULL_HANDLE);
5016 VMA_ASSERT(pMemoryRequirements != VMA_NULL);
5017 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
5019 uint32_t requiredFlags = pMemoryRequirements->
requiredFlags;
5021 if(preferredFlags == 0)
5023 preferredFlags = requiredFlags;
5026 VMA_ASSERT((requiredFlags & ~preferredFlags) == 0);
5029 switch(pMemoryRequirements->
usage)
5034 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
5037 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
5040 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
5041 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
5044 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
5045 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
5053 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
5056 *pMemoryTypeIndex = UINT32_MAX;
5057 uint32_t minCost = UINT32_MAX;
5058 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
5059 memTypeIndex < allocator->GetMemoryTypeCount();
5060 ++memTypeIndex, memTypeBit <<= 1)
5063 if((memTypeBit & memoryTypeBits) != 0)
5065 const VkMemoryPropertyFlags currFlags =
5066 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
5068 if((requiredFlags & ~currFlags) == 0)
5071 uint32_t currCost = CountBitsSet(preferredFlags & ~currFlags);
5073 if(currCost < minCost)
5075 *pMemoryTypeIndex = memTypeIndex;
5085 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
5089 VmaAllocator allocator,
5090 const VkMemoryRequirements* pVkMemoryRequirements,
5092 VmaAllocation* pAllocation,
5095 VMA_ASSERT(allocator && pVkMemoryRequirements && pVmaMemoryRequirements && pAllocation);
5097 VMA_DEBUG_LOG(
"vmaAllocateMemory");
5099 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5101 return allocator->AllocateMemory(
5102 *pVkMemoryRequirements,
5103 *pVmaMemoryRequirements,
5104 VMA_SUBALLOCATION_TYPE_UNKNOWN,
5109 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
5114 VmaAllocator allocator,
5117 VmaAllocation* pAllocation,
5120 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pMemoryRequirements && pAllocation);
5122 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
5124 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5126 VkMemoryRequirements vkMemReq = {};
5127 vkGetBufferMemoryRequirements(allocator->m_hDevice, buffer, &vkMemReq);
5129 return allocator->AllocateMemory(
5131 *pMemoryRequirements,
5132 VMA_SUBALLOCATION_TYPE_BUFFER,
5137 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
5142 VmaAllocator allocator,
5145 VmaAllocation* pAllocation,
5148 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pMemoryRequirements && pAllocation);
5150 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
5152 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5154 return AllocateMemoryForImage(
5157 pMemoryRequirements,
5158 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
5163 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
5168 VmaAllocator allocator,
5169 VmaAllocation allocation)
5171 VMA_ASSERT(allocator && allocation);
5173 VMA_DEBUG_LOG(
"vmaFreeMemory");
5175 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5177 allocator->FreeMemory(allocation);
5181 VmaAllocator allocator,
5182 VmaAllocation allocation,
5185 VMA_ASSERT(allocator && allocation && pAllocationInfo);
5187 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5189 allocator->GetAllocationInfo(allocation, pAllocationInfo);
5193 VmaAllocator allocator,
5194 VmaAllocation allocation,
5197 VMA_ASSERT(allocator && allocation);
5199 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5201 allocation->SetUserData(pUserData);
5205 VmaAllocator allocator,
5206 VmaAllocation allocation,
5209 VMA_ASSERT(allocator && allocation && ppData);
5211 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5213 return vkMapMemory(allocator->m_hDevice, allocation->GetMemory(),
5214 allocation->GetOffset(), allocation->GetSize(), 0, ppData);
5218 VmaAllocator allocator,
5219 VmaAllocation allocation)
5221 VMA_ASSERT(allocator && allocation);
5223 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5225 vkUnmapMemory(allocator->m_hDevice, allocation->GetMemory());
5230 VMA_ASSERT(allocator);
5232 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5234 allocator->UnmapPersistentlyMappedMemory();
5239 VMA_ASSERT(allocator);
5241 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5243 return allocator->MapPersistentlyMappedMemory();
5247 VmaAllocator allocator,
5248 VmaAllocation* pAllocations,
5249 size_t allocationCount,
5250 VkBool32* pAllocationsChanged,
5254 VMA_ASSERT(allocator && pAllocations);
5256 VMA_DEBUG_LOG(
"vmaDefragment");
5258 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5260 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
5264 VmaAllocator allocator,
5265 const VkBufferCreateInfo* pCreateInfo,
5268 VmaAllocation* pAllocation,
5271 VMA_ASSERT(allocator && pCreateInfo && pMemoryRequirements && pBuffer && pAllocation);
5273 VMA_DEBUG_LOG(
"vmaCreateBuffer");
5275 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5278 VkResult res = vkCreateBuffer(allocator->m_hDevice, pCreateInfo, allocator->GetAllocationCallbacks(), pBuffer);
5282 VkMemoryRequirements vkMemReq = {};
5283 vkGetBufferMemoryRequirements(allocator->m_hDevice, *pBuffer, &vkMemReq);
5286 res = allocator->AllocateMemory(
5288 *pMemoryRequirements,
5289 VMA_SUBALLOCATION_TYPE_BUFFER,
5294 res = vkBindBufferMemory(allocator->m_hDevice, *pBuffer, (*pAllocation)->GetMemory(), (*pAllocation)->GetOffset());
5298 if(pAllocationInfo != VMA_NULL)
5300 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
5304 allocator->FreeMemory(*pAllocation);
5307 vkDestroyBuffer(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
5314 VmaAllocator allocator,
5316 VmaAllocation allocation)
5318 if(buffer != VK_NULL_HANDLE)
5320 VMA_ASSERT(allocator);
5322 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
5324 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5326 vkDestroyBuffer(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
5328 allocator->FreeMemory(allocation);
5333 VmaAllocator allocator,
5334 const VkImageCreateInfo* pCreateInfo,
5337 VmaAllocation* pAllocation,
5340 VMA_ASSERT(allocator && pCreateInfo && pMemoryRequirements && pImage && pAllocation);
5342 VMA_DEBUG_LOG(
"vmaCreateImage");
5344 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5347 VkResult res = vkCreateImage(allocator->m_hDevice, pCreateInfo, allocator->GetAllocationCallbacks(), pImage);
5350 VkMappedMemoryRange mem = {};
5351 VmaSuballocationType suballocType = pCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
5352 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
5353 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
5356 res = AllocateMemoryForImage(allocator, *pImage, pMemoryRequirements, suballocType, pAllocation);
5360 res = vkBindImageMemory(allocator->m_hDevice, *pImage, (*pAllocation)->GetMemory(), (*pAllocation)->GetOffset());
5364 if(pAllocationInfo != VMA_NULL)
5366 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
5370 allocator->FreeMemory(*pAllocation);
5373 vkDestroyImage(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
5380 VmaAllocator allocator,
5382 VmaAllocation allocation)
5384 if(image != VK_NULL_HANDLE)
5386 VMA_ASSERT(allocator);
5388 VMA_DEBUG_LOG(
"vmaDestroyImage");
5390 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5392 vkDestroyImage(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
5394 allocator->FreeMemory(allocation);
5398 #endif // #ifdef VMA_IMPLEMENTATION VmaMemoryRequirementFlagBits
Flags to be passed as VmaMemoryRequirements::flags.
Definition: vk_mem_alloc.h:336
+
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:345
+
struct VmaMemoryRequirements VmaMemoryRequirements
+
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:214
+
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pCreateInfo, const VmaMemoryRequirements *pMemoryRequirements, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
+
Memory will be used for writing on device and readback on host.
Definition: vk_mem_alloc.h:331
+
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:374
+
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:431
+
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:561
+
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation.
+
void vmaUnmapPersistentlyMappedMemory(VmaAllocator allocator)
Unmaps persistently mapped memory of types that is HOST_COHERENT and DEVICE_LOCAL.
+
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
+
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
+
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:177
+
const VkAllocationCallbacks * pAllocationCallbacks
Custom allocation callbacks.
Definition: vk_mem_alloc.h:226
+
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaMemoryRequirements *pMemoryRequirements, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
+
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:208
+
VkDeviceSize preferredSmallHeapBlockSize
Size of a single memory block to allocate for resources from a small heap <= 512 MB.
Definition: vk_mem_alloc.h:223
+
VmaMemoryRequirementFlags flags
Definition: vk_mem_alloc.h:369
+
VkFlags VmaAllocatorFlags
Definition: vk_mem_alloc.h:205
+
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:565
+
VmaStatInfo total
Definition: vk_mem_alloc.h:284
+
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:573
+
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:556
+
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:217
+
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:441
+
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
+
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:354
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
-
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer)
-
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pCreateInfo, const VmaMemoryRequirements *pMemoryRequirements, VkImage *pImage, VkMappedMemoryRange *pMemory, uint32_t *pMemoryTypeIndex)
Function similar to vmaCreateBuffer().
-
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:232
+
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:551
+
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:569
+
Definition: vk_mem_alloc.h:363
+
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:385
+
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:280
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
-
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:310
-
VmaMemoryUsage
Definition: vk_mem_alloc.h:271
+
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:383
+
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:571
+
VmaMemoryUsage
Definition: vk_mem_alloc.h:317
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
-
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pCreateInfo, const VmaMemoryRequirements *pMemoryRequirements, VkBuffer *pBuffer, VkMappedMemoryRange *pMemory, uint32_t *pMemoryTypeIndex)
+
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:201
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
-
Definition: vk_mem_alloc.h:220
-
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:305
-
Definition: vk_mem_alloc.h:286
-
VkBool32 neverAllocate
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:317
-
VkDeviceSize UnusedRangeSizeMax
Definition: vk_mem_alloc.h:228
-
VkDeviceSize SuballocationSizeMax
Definition: vk_mem_alloc.h:227
+
VmaAllocatorFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:196
+
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
+
Definition: vk_mem_alloc.h:268
+
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:188
+
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:378
+
Definition: vk_mem_alloc.h:367
+
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:192
+
VkResult vmaMapPersistentlyMappedMemory(VmaAllocator allocator)
Maps back persistently mapped memory of types that is HOST_COHERENT and DEVICE_LOCAL.
+
VkFlags VmaMemoryRequirementFlags
Definition: vk_mem_alloc.h:365
+
VkDeviceSize UnusedRangeSizeMax
Definition: vk_mem_alloc.h:276
+
VkDeviceSize SuballocationSizeMax
Definition: vk_mem_alloc.h:275
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
-
VkBool32 ownMemory
Set to true if this allocation should have its own memory block.
Definition: vk_mem_alloc.h:296
-
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:234
-
void vmaDestroyImage(VmaAllocator allocator, VkImage image)
-
uint32_t AllocationCount
Definition: vk_mem_alloc.h:222
+
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:171
+
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaMemoryRequirements *pVmaMemoryRequirements, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
+
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory.
Definition: vk_mem_alloc.h:229
+
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:282
+
uint32_t AllocationCount
Definition: vk_mem_alloc.h:270
+
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
+
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:190
+
VmaAllocatorFlags flags
Flags for created allocator. Use VmaAllocatorFlagBits enum.
Definition: vk_mem_alloc.h:211
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
-
VkDeviceSize UsedBytes
Definition: vk_mem_alloc.h:225
-
VkDeviceSize preferredLargeHeapBlockSize
Size of a single memory block to allocate for resources.
Definition: vk_mem_alloc.h:175
-
uint32_t UnusedRangeCount
Definition: vk_mem_alloc.h:224
-
Memory will be mapped on host. Could be used for transfer to device.
Definition: vk_mem_alloc.h:278
+
VkDeviceSize UsedBytes
Definition: vk_mem_alloc.h:273
+
void * pUserData
Custom general-purpose pointer that was passed as VmaMemoryRequirements::pUserData or set using vmaSe...
Definition: vk_mem_alloc.h:452
+
VkDeviceSize preferredLargeHeapBlockSize
Size of a single memory block to allocate for resources.
Definition: vk_mem_alloc.h:220
+
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaMemoryRequirements *pMemoryRequirements, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
+
uint32_t UnusedRangeCount
Definition: vk_mem_alloc.h:272
+
Memory will be mapped on host. Could be used for transfer to device.
Definition: vk_mem_alloc.h:325
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
-
uint32_t SuballocationCount
Definition: vk_mem_alloc.h:223
-
VkDeviceSize UnusedRangeSizeAvg
Definition: vk_mem_alloc.h:228
-
VkDeviceSize SuballocationSizeMin
Definition: vk_mem_alloc.h:227
-
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaMemoryRequirements *pMemoryRequirements, VkMappedMemoryRange *pMemory, uint32_t *pMemoryTypeIndex)
-
VkDeviceSize SuballocationSizeAvg
Definition: vk_mem_alloc.h:227
+
uint32_t SuballocationCount
Definition: vk_mem_alloc.h:271
+
VkDeviceSize UnusedRangeSizeAvg
Definition: vk_mem_alloc.h:276
+
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:436
+
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:567
+
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
+
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
+
VkDeviceSize SuballocationSizeMin
Definition: vk_mem_alloc.h:275
+
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pCreateInfo, const VmaMemoryRequirements *pMemoryRequirements, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
+
void * pMappedData
Pointer to the beginning of this allocation as mapped data. Null if this alloaction is not persistent...
Definition: vk_mem_alloc.h:447
+
VkDeviceSize SuballocationSizeAvg
Definition: vk_mem_alloc.h:275
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
-
No intended memory usage specified.
Definition: vk_mem_alloc.h:274
-
Definition: vk_mem_alloc.h:283
-
Memory will be used for frequent (dynamic) updates from host and reads on device. ...
Definition: vk_mem_alloc.h:280
+
Set to use a memory that will be persistently mapped and retrieve pointer to it.
Definition: vk_mem_alloc.h:361
+
No intended memory usage specified.
Definition: vk_mem_alloc.h:320
+
Definition: vk_mem_alloc.h:332
+
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:419
+
Memory will be used for frequent (dynamic) updates from host and reads on device. ...
Definition: vk_mem_alloc.h:328
+
Definition: vk_mem_alloc.h:203
+
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
-
Memory will be used on device only, no need to be mapped on host.
Definition: vk_mem_alloc.h:276
+
Memory will be used on device only, no need to be mapped on host.
Definition: vk_mem_alloc.h:322
struct VmaStatInfo VmaStatInfo
-
VkDeviceSize UnusedBytes
Definition: vk_mem_alloc.h:226
-
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:235
+
VkDeviceSize UnusedBytes
Definition: vk_mem_alloc.h:274
+
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
+
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:283
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaMemoryRequirements *pMemoryRequirements, uint32_t *pMemoryTypeIndex)
-
void vmaFreeMemory(VmaAllocator allocator, const VkMappedMemoryRange *pMemory)
Frees memory previously allocated using vmaAllocateMemoryForBuffer() or vmaAllocateMemoryForImage().
-
VkDeviceSize UnusedRangeSizeMin
Definition: vk_mem_alloc.h:228
+
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
+
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
+
VkDeviceSize UnusedRangeSizeMin
Definition: vk_mem_alloc.h:276
+
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:424