23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 935 #include <vulkan/vulkan.h> 937 VK_DEFINE_HANDLE(VmaAllocator)
941 VmaAllocator allocator,
943 VkDeviceMemory memory,
947 VmaAllocator allocator,
949 VkDeviceMemory memory,
1098 VmaAllocator* pAllocator);
1102 VmaAllocator allocator);
1109 VmaAllocator allocator,
1110 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1117 VmaAllocator allocator,
1118 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1127 VmaAllocator allocator,
1128 uint32_t memoryTypeIndex,
1129 VkMemoryPropertyFlags* pFlags);
1140 VmaAllocator allocator,
1141 uint32_t frameIndex);
1171 VmaAllocator allocator,
1174 #define VMA_STATS_STRING_ENABLED 1 1176 #if VMA_STATS_STRING_ENABLED 1182 VmaAllocator allocator,
1183 char** ppStatsString,
1184 VkBool32 detailedMap);
1187 VmaAllocator allocator,
1188 char* pStatsString);
1190 #endif // #if VMA_STATS_STRING_ENABLED 1192 VK_DEFINE_HANDLE(VmaPool)
1375 VmaAllocator allocator,
1376 uint32_t memoryTypeBits,
1378 uint32_t* pMemoryTypeIndex);
1393 VmaAllocator allocator,
1394 const VkBufferCreateInfo* pBufferCreateInfo,
1396 uint32_t* pMemoryTypeIndex);
1411 VmaAllocator allocator,
1412 const VkImageCreateInfo* pImageCreateInfo,
1414 uint32_t* pMemoryTypeIndex);
1515 VmaAllocator allocator,
1522 VmaAllocator allocator,
1532 VmaAllocator allocator,
1543 VmaAllocator allocator,
1545 size_t* pLostAllocationCount);
1547 VK_DEFINE_HANDLE(VmaAllocation)
1603 VmaAllocator allocator,
1604 const VkMemoryRequirements* pVkMemoryRequirements,
1606 VmaAllocation* pAllocation,
1616 VmaAllocator allocator,
1619 VmaAllocation* pAllocation,
1624 VmaAllocator allocator,
1627 VmaAllocation* pAllocation,
1632 VmaAllocator allocator,
1633 VmaAllocation allocation);
1652 VmaAllocator allocator,
1653 VmaAllocation allocation,
1671 VmaAllocator allocator,
1672 VmaAllocation allocation);
1688 VmaAllocator allocator,
1689 VmaAllocation allocation,
1703 VmaAllocator allocator,
1704 VmaAllocation* pAllocation);
1741 VmaAllocator allocator,
1742 VmaAllocation allocation,
1750 VmaAllocator allocator,
1751 VmaAllocation allocation);
1862 VmaAllocator allocator,
1863 VmaAllocation* pAllocations,
1864 size_t allocationCount,
1865 VkBool32* pAllocationsChanged,
1896 VmaAllocator allocator,
1897 const VkBufferCreateInfo* pBufferCreateInfo,
1900 VmaAllocation* pAllocation,
1915 VmaAllocator allocator,
1917 VmaAllocation allocation);
1921 VmaAllocator allocator,
1922 const VkImageCreateInfo* pImageCreateInfo,
1925 VmaAllocation* pAllocation,
1940 VmaAllocator allocator,
1942 VmaAllocation allocation);
1948 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 1951 #ifdef __INTELLISENSE__ 1952 #define VMA_IMPLEMENTATION 1955 #ifdef VMA_IMPLEMENTATION 1956 #undef VMA_IMPLEMENTATION 1978 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 1979 #define VMA_STATIC_VULKAN_FUNCTIONS 1 1991 #if VMA_USE_STL_CONTAINERS 1992 #define VMA_USE_STL_VECTOR 1 1993 #define VMA_USE_STL_UNORDERED_MAP 1 1994 #define VMA_USE_STL_LIST 1 1997 #if VMA_USE_STL_VECTOR 2001 #if VMA_USE_STL_UNORDERED_MAP 2002 #include <unordered_map> 2005 #if VMA_USE_STL_LIST 2014 #include <algorithm> 2018 #if !defined(_WIN32) && !defined(__APPLE__) 2024 #define VMA_NULL nullptr 2027 #if defined(__APPLE__) || defined(__ANDROID__) 2029 void *aligned_alloc(
size_t alignment,
size_t size)
2032 if(alignment <
sizeof(
void*))
2034 alignment =
sizeof(
void*);
2038 if(posix_memalign(&pointer, alignment, size) == 0)
2047 #define VMA_ASSERT(expr) assert(expr) 2049 #define VMA_ASSERT(expr) 2055 #ifndef VMA_HEAVY_ASSERT 2057 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 2059 #define VMA_HEAVY_ASSERT(expr) 2063 #ifndef VMA_ALIGN_OF 2064 #define VMA_ALIGN_OF(type) (__alignof(type)) 2067 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 2069 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 2071 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 2075 #ifndef VMA_SYSTEM_FREE 2077 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 2079 #define VMA_SYSTEM_FREE(ptr) free(ptr) 2084 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 2088 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 2092 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 2096 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 2099 #ifndef VMA_DEBUG_LOG 2100 #define VMA_DEBUG_LOG(format, ...) 2110 #if VMA_STATS_STRING_ENABLED 2111 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
2113 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
2115 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
2117 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
2119 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
2121 snprintf(outStr, strLen,
"%p", ptr);
2131 void Lock() { m_Mutex.lock(); }
2132 void Unlock() { m_Mutex.unlock(); }
2136 #define VMA_MUTEX VmaMutex 2147 #ifndef VMA_ATOMIC_UINT32 2148 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 2151 #ifndef VMA_BEST_FIT 2164 #define VMA_BEST_FIT (1) 2167 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 2172 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 2175 #ifndef VMA_DEBUG_ALIGNMENT 2180 #define VMA_DEBUG_ALIGNMENT (1) 2183 #ifndef VMA_DEBUG_MARGIN 2188 #define VMA_DEBUG_MARGIN (0) 2191 #ifndef VMA_DEBUG_GLOBAL_MUTEX 2196 #define VMA_DEBUG_GLOBAL_MUTEX (0) 2199 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 2204 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 2207 #ifndef VMA_SMALL_HEAP_MAX_SIZE 2208 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 2212 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 2213 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 2217 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
2223 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
2224 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
2227 static inline uint32_t VmaCountBitsSet(uint32_t v)
2229 uint32_t c = v - ((v >> 1) & 0x55555555);
2230 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
2231 c = ((c >> 4) + c) & 0x0F0F0F0F;
2232 c = ((c >> 8) + c) & 0x00FF00FF;
2233 c = ((c >> 16) + c) & 0x0000FFFF;
2239 template <
typename T>
2240 static inline T VmaAlignUp(T val, T align)
2242 return (val + align - 1) / align * align;
2246 template <
typename T>
2247 inline T VmaRoundDiv(T x, T y)
2249 return (x + (y / (T)2)) / y;
2254 template<
typename Iterator,
typename Compare>
2255 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
2257 Iterator centerValue = end; --centerValue;
2258 Iterator insertIndex = beg;
2259 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
2261 if(cmp(*memTypeIndex, *centerValue))
2263 if(insertIndex != memTypeIndex)
2265 VMA_SWAP(*memTypeIndex, *insertIndex);
2270 if(insertIndex != centerValue)
2272 VMA_SWAP(*insertIndex, *centerValue);
2277 template<
typename Iterator,
typename Compare>
2278 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
2282 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
2283 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
2284 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
2288 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 2290 #endif // #ifndef VMA_SORT 2299 static inline bool VmaBlocksOnSamePage(
2300 VkDeviceSize resourceAOffset,
2301 VkDeviceSize resourceASize,
2302 VkDeviceSize resourceBOffset,
2303 VkDeviceSize pageSize)
2305 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
2306 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
2307 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
2308 VkDeviceSize resourceBStart = resourceBOffset;
2309 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
2310 return resourceAEndPage == resourceBStartPage;
2313 enum VmaSuballocationType
2315 VMA_SUBALLOCATION_TYPE_FREE = 0,
2316 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
2317 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
2318 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
2319 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
2320 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
2321 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
2330 static inline bool VmaIsBufferImageGranularityConflict(
2331 VmaSuballocationType suballocType1,
2332 VmaSuballocationType suballocType2)
2334 if(suballocType1 > suballocType2)
2336 VMA_SWAP(suballocType1, suballocType2);
2339 switch(suballocType1)
2341 case VMA_SUBALLOCATION_TYPE_FREE:
2343 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
2345 case VMA_SUBALLOCATION_TYPE_BUFFER:
2347 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2348 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2349 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
2351 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2352 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
2353 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2354 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
2356 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2357 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
2369 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
2370 m_pMutex(useMutex ? &mutex : VMA_NULL)
2387 VMA_MUTEX* m_pMutex;
2390 #if VMA_DEBUG_GLOBAL_MUTEX 2391 static VMA_MUTEX gDebugGlobalMutex;
2392 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 2394 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 2398 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
2409 template <
typename IterT,
typename KeyT,
typename CmpT>
2410 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
2412 size_t down = 0, up = (end - beg);
2415 const size_t mid = (down + up) / 2;
2416 if(cmp(*(beg+mid), key))
2431 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
2433 if((pAllocationCallbacks != VMA_NULL) &&
2434 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
2436 return (*pAllocationCallbacks->pfnAllocation)(
2437 pAllocationCallbacks->pUserData,
2440 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
2444 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
2448 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
2450 if((pAllocationCallbacks != VMA_NULL) &&
2451 (pAllocationCallbacks->pfnFree != VMA_NULL))
2453 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
2457 VMA_SYSTEM_FREE(ptr);
2461 template<
typename T>
2462 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
2464 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
2467 template<
typename T>
2468 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
2470 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
2473 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 2475 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 2477 template<
typename T>
2478 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
2481 VmaFree(pAllocationCallbacks, ptr);
2484 template<
typename T>
2485 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
2489 for(
size_t i = count; i--; )
2493 VmaFree(pAllocationCallbacks, ptr);
2498 template<
typename T>
2499 class VmaStlAllocator
2502 const VkAllocationCallbacks*
const m_pCallbacks;
2503 typedef T value_type;
2505 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
2506 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
2508 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
2509 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
2511 template<
typename U>
2512 bool operator==(
const VmaStlAllocator<U>& rhs)
const 2514 return m_pCallbacks == rhs.m_pCallbacks;
2516 template<
typename U>
2517 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 2519 return m_pCallbacks != rhs.m_pCallbacks;
2522 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
2525 #if VMA_USE_STL_VECTOR 2527 #define VmaVector std::vector 2529 template<
typename T,
typename allocatorT>
2530 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
2532 vec.insert(vec.begin() + index, item);
2535 template<
typename T,
typename allocatorT>
2536 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
2538 vec.erase(vec.begin() + index);
2541 #else // #if VMA_USE_STL_VECTOR 2546 template<
typename T,
typename AllocatorT>
2550 typedef T value_type;
2552 VmaVector(
const AllocatorT& allocator) :
2553 m_Allocator(allocator),
2560 VmaVector(
size_t count,
const AllocatorT& allocator) :
2561 m_Allocator(allocator),
2562 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
2568 VmaVector(
const VmaVector<T, AllocatorT>& src) :
2569 m_Allocator(src.m_Allocator),
2570 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
2571 m_Count(src.m_Count),
2572 m_Capacity(src.m_Count)
2576 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
2582 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2585 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
2589 resize(rhs.m_Count);
2592 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
2598 bool empty()
const {
return m_Count == 0; }
2599 size_t size()
const {
return m_Count; }
2600 T* data() {
return m_pArray; }
2601 const T* data()
const {
return m_pArray; }
2603 T& operator[](
size_t index)
2605 VMA_HEAVY_ASSERT(index < m_Count);
2606 return m_pArray[index];
2608 const T& operator[](
size_t index)
const 2610 VMA_HEAVY_ASSERT(index < m_Count);
2611 return m_pArray[index];
2616 VMA_HEAVY_ASSERT(m_Count > 0);
2619 const T& front()
const 2621 VMA_HEAVY_ASSERT(m_Count > 0);
2626 VMA_HEAVY_ASSERT(m_Count > 0);
2627 return m_pArray[m_Count - 1];
2629 const T& back()
const 2631 VMA_HEAVY_ASSERT(m_Count > 0);
2632 return m_pArray[m_Count - 1];
2635 void reserve(
size_t newCapacity,
bool freeMemory =
false)
2637 newCapacity = VMA_MAX(newCapacity, m_Count);
2639 if((newCapacity < m_Capacity) && !freeMemory)
2641 newCapacity = m_Capacity;
2644 if(newCapacity != m_Capacity)
2646 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
2649 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
2651 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2652 m_Capacity = newCapacity;
2653 m_pArray = newArray;
2657 void resize(
size_t newCount,
bool freeMemory =
false)
2659 size_t newCapacity = m_Capacity;
2660 if(newCount > m_Capacity)
2662 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
2666 newCapacity = newCount;
2669 if(newCapacity != m_Capacity)
2671 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
2672 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
2673 if(elementsToCopy != 0)
2675 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
2677 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2678 m_Capacity = newCapacity;
2679 m_pArray = newArray;
2685 void clear(
bool freeMemory =
false)
2687 resize(0, freeMemory);
2690 void insert(
size_t index,
const T& src)
2692 VMA_HEAVY_ASSERT(index <= m_Count);
2693 const size_t oldCount = size();
2694 resize(oldCount + 1);
2695 if(index < oldCount)
2697 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
2699 m_pArray[index] = src;
2702 void remove(
size_t index)
2704 VMA_HEAVY_ASSERT(index < m_Count);
2705 const size_t oldCount = size();
2706 if(index < oldCount - 1)
2708 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
2710 resize(oldCount - 1);
2713 void push_back(
const T& src)
2715 const size_t newIndex = size();
2716 resize(newIndex + 1);
2717 m_pArray[newIndex] = src;
2722 VMA_HEAVY_ASSERT(m_Count > 0);
2726 void push_front(
const T& src)
2733 VMA_HEAVY_ASSERT(m_Count > 0);
2737 typedef T* iterator;
2739 iterator begin() {
return m_pArray; }
2740 iterator end() {
return m_pArray + m_Count; }
2743 AllocatorT m_Allocator;
2749 template<
typename T,
typename allocatorT>
2750 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
2752 vec.insert(index, item);
2755 template<
typename T,
typename allocatorT>
2756 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
2761 #endif // #if VMA_USE_STL_VECTOR 2763 template<
typename CmpLess,
typename VectorT>
2764 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
2766 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2768 vector.data() + vector.size(),
2770 CmpLess()) - vector.data();
2771 VmaVectorInsert(vector, indexToInsert, value);
2772 return indexToInsert;
2775 template<
typename CmpLess,
typename VectorT>
2776 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
2779 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2784 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
2786 size_t indexToRemove = it - vector.begin();
2787 VmaVectorRemove(vector, indexToRemove);
2793 template<
typename CmpLess,
typename VectorT>
2794 size_t VmaVectorFindSorted(
const VectorT& vector,
const typename VectorT::value_type& value)
2797 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2799 vector.data() + vector.size(),
2802 if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
2804 return it - vector.begin();
2808 return vector.size();
2820 template<
typename T>
2821 class VmaPoolAllocator
2824 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
2825 ~VmaPoolAllocator();
2833 uint32_t NextFreeIndex;
2840 uint32_t FirstFreeIndex;
2843 const VkAllocationCallbacks* m_pAllocationCallbacks;
2844 size_t m_ItemsPerBlock;
2845 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
2847 ItemBlock& CreateNewBlock();
2850 template<
typename T>
2851 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
2852 m_pAllocationCallbacks(pAllocationCallbacks),
2853 m_ItemsPerBlock(itemsPerBlock),
2854 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
2856 VMA_ASSERT(itemsPerBlock > 0);
2859 template<
typename T>
2860 VmaPoolAllocator<T>::~VmaPoolAllocator()
2865 template<
typename T>
2866 void VmaPoolAllocator<T>::Clear()
2868 for(
size_t i = m_ItemBlocks.size(); i--; )
2869 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
2870 m_ItemBlocks.clear();
2873 template<
typename T>
2874 T* VmaPoolAllocator<T>::Alloc()
2876 for(
size_t i = m_ItemBlocks.size(); i--; )
2878 ItemBlock& block = m_ItemBlocks[i];
2880 if(block.FirstFreeIndex != UINT32_MAX)
2882 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
2883 block.FirstFreeIndex = pItem->NextFreeIndex;
2884 return &pItem->Value;
2889 ItemBlock& newBlock = CreateNewBlock();
2890 Item*
const pItem = &newBlock.pItems[0];
2891 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
2892 return &pItem->Value;
2895 template<
typename T>
2896 void VmaPoolAllocator<T>::Free(T* ptr)
2899 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
2901 ItemBlock& block = m_ItemBlocks[i];
2905 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
2908 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
2910 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
2911 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
2912 block.FirstFreeIndex = index;
2916 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
2919 template<
typename T>
2920 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
2922 ItemBlock newBlock = {
2923 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
2925 m_ItemBlocks.push_back(newBlock);
2928 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
2929 newBlock.pItems[i].NextFreeIndex = i + 1;
2930 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
2931 return m_ItemBlocks.back();
2937 #if VMA_USE_STL_LIST 2939 #define VmaList std::list 2941 #else // #if VMA_USE_STL_LIST 2943 template<
typename T>
2952 template<
typename T>
2956 typedef VmaListItem<T> ItemType;
2958 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
2962 size_t GetCount()
const {
return m_Count; }
2963 bool IsEmpty()
const {
return m_Count == 0; }
2965 ItemType* Front() {
return m_pFront; }
2966 const ItemType* Front()
const {
return m_pFront; }
2967 ItemType* Back() {
return m_pBack; }
2968 const ItemType* Back()
const {
return m_pBack; }
2970 ItemType* PushBack();
2971 ItemType* PushFront();
2972 ItemType* PushBack(
const T& value);
2973 ItemType* PushFront(
const T& value);
2978 ItemType* InsertBefore(ItemType* pItem);
2980 ItemType* InsertAfter(ItemType* pItem);
2982 ItemType* InsertBefore(ItemType* pItem,
const T& value);
2983 ItemType* InsertAfter(ItemType* pItem,
const T& value);
2985 void Remove(ItemType* pItem);
2988 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
2989 VmaPoolAllocator<ItemType> m_ItemAllocator;
2995 VmaRawList(
const VmaRawList<T>& src);
2996 VmaRawList<T>& operator=(
const VmaRawList<T>& rhs);
2999 template<
typename T>
3000 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
3001 m_pAllocationCallbacks(pAllocationCallbacks),
3002 m_ItemAllocator(pAllocationCallbacks, 128),
3009 template<
typename T>
3010 VmaRawList<T>::~VmaRawList()
3016 template<
typename T>
3017 void VmaRawList<T>::Clear()
3019 if(IsEmpty() ==
false)
3021 ItemType* pItem = m_pBack;
3022 while(pItem != VMA_NULL)
3024 ItemType*
const pPrevItem = pItem->pPrev;
3025 m_ItemAllocator.Free(pItem);
3028 m_pFront = VMA_NULL;
3034 template<
typename T>
3035 VmaListItem<T>* VmaRawList<T>::PushBack()
3037 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3038 pNewItem->pNext = VMA_NULL;
3041 pNewItem->pPrev = VMA_NULL;
3042 m_pFront = pNewItem;
3048 pNewItem->pPrev = m_pBack;
3049 m_pBack->pNext = pNewItem;
3056 template<
typename T>
3057 VmaListItem<T>* VmaRawList<T>::PushFront()
3059 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3060 pNewItem->pPrev = VMA_NULL;
3063 pNewItem->pNext = VMA_NULL;
3064 m_pFront = pNewItem;
3070 pNewItem->pNext = m_pFront;
3071 m_pFront->pPrev = pNewItem;
3072 m_pFront = pNewItem;
3078 template<
typename T>
3079 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
3081 ItemType*
const pNewItem = PushBack();
3082 pNewItem->Value = value;
3086 template<
typename T>
3087 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
3089 ItemType*
const pNewItem = PushFront();
3090 pNewItem->Value = value;
3094 template<
typename T>
3095 void VmaRawList<T>::PopBack()
3097 VMA_HEAVY_ASSERT(m_Count > 0);
3098 ItemType*
const pBackItem = m_pBack;
3099 ItemType*
const pPrevItem = pBackItem->pPrev;
3100 if(pPrevItem != VMA_NULL)
3102 pPrevItem->pNext = VMA_NULL;
3104 m_pBack = pPrevItem;
3105 m_ItemAllocator.Free(pBackItem);
3109 template<
typename T>
3110 void VmaRawList<T>::PopFront()
3112 VMA_HEAVY_ASSERT(m_Count > 0);
3113 ItemType*
const pFrontItem = m_pFront;
3114 ItemType*
const pNextItem = pFrontItem->pNext;
3115 if(pNextItem != VMA_NULL)
3117 pNextItem->pPrev = VMA_NULL;
3119 m_pFront = pNextItem;
3120 m_ItemAllocator.Free(pFrontItem);
3124 template<
typename T>
3125 void VmaRawList<T>::Remove(ItemType* pItem)
3127 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
3128 VMA_HEAVY_ASSERT(m_Count > 0);
3130 if(pItem->pPrev != VMA_NULL)
3132 pItem->pPrev->pNext = pItem->pNext;
3136 VMA_HEAVY_ASSERT(m_pFront == pItem);
3137 m_pFront = pItem->pNext;
3140 if(pItem->pNext != VMA_NULL)
3142 pItem->pNext->pPrev = pItem->pPrev;
3146 VMA_HEAVY_ASSERT(m_pBack == pItem);
3147 m_pBack = pItem->pPrev;
3150 m_ItemAllocator.Free(pItem);
3154 template<
typename T>
3155 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
3157 if(pItem != VMA_NULL)
3159 ItemType*
const prevItem = pItem->pPrev;
3160 ItemType*
const newItem = m_ItemAllocator.Alloc();
3161 newItem->pPrev = prevItem;
3162 newItem->pNext = pItem;
3163 pItem->pPrev = newItem;
3164 if(prevItem != VMA_NULL)
3166 prevItem->pNext = newItem;
3170 VMA_HEAVY_ASSERT(m_pFront == pItem);
3180 template<
typename T>
3181 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
3183 if(pItem != VMA_NULL)
3185 ItemType*
const nextItem = pItem->pNext;
3186 ItemType*
const newItem = m_ItemAllocator.Alloc();
3187 newItem->pNext = nextItem;
3188 newItem->pPrev = pItem;
3189 pItem->pNext = newItem;
3190 if(nextItem != VMA_NULL)
3192 nextItem->pPrev = newItem;
3196 VMA_HEAVY_ASSERT(m_pBack == pItem);
3206 template<
typename T>
3207 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
3209 ItemType*
const newItem = InsertBefore(pItem);
3210 newItem->Value = value;
3214 template<
typename T>
3215 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
3217 ItemType*
const newItem = InsertAfter(pItem);
3218 newItem->Value = value;
3222 template<
typename T,
typename AllocatorT>
3235 T& operator*()
const 3237 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3238 return m_pItem->Value;
3240 T* operator->()
const 3242 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3243 return &m_pItem->Value;
3246 iterator& operator++()
3248 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3249 m_pItem = m_pItem->pNext;
3252 iterator& operator--()
3254 if(m_pItem != VMA_NULL)
3256 m_pItem = m_pItem->pPrev;
3260 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3261 m_pItem = m_pList->Back();
3266 iterator operator++(
int)
3268 iterator result = *
this;
3272 iterator operator--(
int)
3274 iterator result = *
this;
3279 bool operator==(
const iterator& rhs)
const 3281 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3282 return m_pItem == rhs.m_pItem;
3284 bool operator!=(
const iterator& rhs)
const 3286 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3287 return m_pItem != rhs.m_pItem;
3291 VmaRawList<T>* m_pList;
3292 VmaListItem<T>* m_pItem;
3294 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
3300 friend class VmaList<T, AllocatorT>;
3303 class const_iterator
3312 const_iterator(
const iterator& src) :
3313 m_pList(src.m_pList),
3314 m_pItem(src.m_pItem)
3318 const T& operator*()
const 3320 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3321 return m_pItem->Value;
3323 const T* operator->()
const 3325 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3326 return &m_pItem->Value;
3329 const_iterator& operator++()
3331 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3332 m_pItem = m_pItem->pNext;
3335 const_iterator& operator--()
3337 if(m_pItem != VMA_NULL)
3339 m_pItem = m_pItem->pPrev;
3343 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3344 m_pItem = m_pList->Back();
3349 const_iterator operator++(
int)
3351 const_iterator result = *
this;
3355 const_iterator operator--(
int)
3357 const_iterator result = *
this;
3362 bool operator==(
const const_iterator& rhs)
const 3364 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3365 return m_pItem == rhs.m_pItem;
3367 bool operator!=(
const const_iterator& rhs)
const 3369 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3370 return m_pItem != rhs.m_pItem;
3374 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
3380 const VmaRawList<T>* m_pList;
3381 const VmaListItem<T>* m_pItem;
3383 friend class VmaList<T, AllocatorT>;
3386 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
3388 bool empty()
const {
return m_RawList.IsEmpty(); }
3389 size_t size()
const {
return m_RawList.GetCount(); }
3391 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
3392 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
3394 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
3395 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
3397 void clear() { m_RawList.Clear(); }
3398 void push_back(
const T& value) { m_RawList.PushBack(value); }
3399 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
3400 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
3403 VmaRawList<T> m_RawList;
3406 #endif // #if VMA_USE_STL_LIST 3414 #if VMA_USE_STL_UNORDERED_MAP 3416 #define VmaPair std::pair 3418 #define VMA_MAP_TYPE(KeyT, ValueT) \ 3419 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 3421 #else // #if VMA_USE_STL_UNORDERED_MAP 3423 template<
typename T1,
typename T2>
3429 VmaPair() : first(), second() { }
3430 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
3436 template<
typename KeyT,
typename ValueT>
3440 typedef VmaPair<KeyT, ValueT> PairType;
3441 typedef PairType* iterator;
3443 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
3445 iterator begin() {
return m_Vector.begin(); }
3446 iterator end() {
return m_Vector.end(); }
3448 void insert(
const PairType& pair);
3449 iterator find(
const KeyT& key);
3450 void erase(iterator it);
3453 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
3456 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 3458 template<
typename FirstT,
typename SecondT>
3459 struct VmaPairFirstLess
3461 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 3463 return lhs.first < rhs.first;
3465 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 3467 return lhs.first < rhsFirst;
3471 template<
typename KeyT,
typename ValueT>
3472 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
3474 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3476 m_Vector.data() + m_Vector.size(),
3478 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
3479 VmaVectorInsert(m_Vector, indexToInsert, pair);
3482 template<
typename KeyT,
typename ValueT>
3483 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
3485 PairType* it = VmaBinaryFindFirstNotLess(
3487 m_Vector.data() + m_Vector.size(),
3489 VmaPairFirstLess<KeyT, ValueT>());
3490 if((it != m_Vector.end()) && (it->first == key))
3496 return m_Vector.end();
3500 template<
typename KeyT,
typename ValueT>
3501 void VmaMap<KeyT, ValueT>::erase(iterator it)
3503 VmaVectorRemove(m_Vector, it - m_Vector.begin());
3506 #endif // #if VMA_USE_STL_UNORDERED_MAP 3512 class VmaDeviceMemoryBlock;
3514 struct VmaAllocation_T
3517 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
3521 FLAG_USER_DATA_STRING = 0x01,
3525 enum ALLOCATION_TYPE
3527 ALLOCATION_TYPE_NONE,
3528 ALLOCATION_TYPE_BLOCK,
3529 ALLOCATION_TYPE_DEDICATED,
3532 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
3535 m_pUserData(VMA_NULL),
3536 m_LastUseFrameIndex(currentFrameIndex),
3537 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
3538 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
3540 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
3546 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
3549 VMA_ASSERT(m_pUserData == VMA_NULL);
3552 void InitBlockAllocation(
3554 VmaDeviceMemoryBlock* block,
3555 VkDeviceSize offset,
3556 VkDeviceSize alignment,
3558 VmaSuballocationType suballocationType,
3562 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3563 VMA_ASSERT(block != VMA_NULL);
3564 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3565 m_Alignment = alignment;
3567 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3568 m_SuballocationType = (uint8_t)suballocationType;
3569 m_BlockAllocation.m_hPool = hPool;
3570 m_BlockAllocation.m_Block = block;
3571 m_BlockAllocation.m_Offset = offset;
3572 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
3577 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3578 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
3579 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3580 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
3581 m_BlockAllocation.m_Block = VMA_NULL;
3582 m_BlockAllocation.m_Offset = 0;
3583 m_BlockAllocation.m_CanBecomeLost =
true;
3586 void ChangeBlockAllocation(
3587 VmaAllocator hAllocator,
3588 VmaDeviceMemoryBlock* block,
3589 VkDeviceSize offset);
3592 void InitDedicatedAllocation(
3593 uint32_t memoryTypeIndex,
3594 VkDeviceMemory hMemory,
3595 VmaSuballocationType suballocationType,
3599 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3600 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
3601 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
3604 m_SuballocationType = (uint8_t)suballocationType;
3605 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3606 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
3607 m_DedicatedAllocation.m_hMemory = hMemory;
3608 m_DedicatedAllocation.m_pMappedData = pMappedData;
3611 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
3612 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
3613 VkDeviceSize GetSize()
const {
return m_Size; }
3614 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
3615 void* GetUserData()
const {
return m_pUserData; }
3616 void SetUserData(VmaAllocator hAllocator,
void* pUserData);
3617 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
3619 VmaDeviceMemoryBlock* GetBlock()
const 3621 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
3622 return m_BlockAllocation.m_Block;
3624 VkDeviceSize GetOffset()
const;
3625 VkDeviceMemory GetMemory()
const;
3626 uint32_t GetMemoryTypeIndex()
const;
3627 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
3628 void* GetMappedData()
const;
3629 bool CanBecomeLost()
const;
3630 VmaPool GetPool()
const;
3632 uint32_t GetLastUseFrameIndex()
const 3634 return m_LastUseFrameIndex.load();
3636 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
3638 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
3648 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3650 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
3652 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
3663 void BlockAllocMap();
3664 void BlockAllocUnmap();
3665 VkResult DedicatedAllocMap(VmaAllocator hAllocator,
void** ppData);
3666 void DedicatedAllocUnmap(VmaAllocator hAllocator);
3669 VkDeviceSize m_Alignment;
3670 VkDeviceSize m_Size;
3672 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
3674 uint8_t m_SuballocationType;
3681 struct BlockAllocation
3684 VmaDeviceMemoryBlock* m_Block;
3685 VkDeviceSize m_Offset;
3686 bool m_CanBecomeLost;
3690 struct DedicatedAllocation
3692 uint32_t m_MemoryTypeIndex;
3693 VkDeviceMemory m_hMemory;
3694 void* m_pMappedData;
3700 BlockAllocation m_BlockAllocation;
3702 DedicatedAllocation m_DedicatedAllocation;
3705 void FreeUserDataString(VmaAllocator hAllocator);
3712 struct VmaSuballocation
3714 VkDeviceSize offset;
3716 VmaAllocation hAllocation;
3717 VmaSuballocationType type;
3720 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
3723 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
3738 struct VmaAllocationRequest
3740 VkDeviceSize offset;
3741 VkDeviceSize sumFreeSize;
3742 VkDeviceSize sumItemSize;
3743 VmaSuballocationList::iterator item;
3744 size_t itemsToMakeLostCount;
3746 VkDeviceSize CalcCost()
const 3748 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
3756 class VmaBlockMetadata
3759 VmaBlockMetadata(VmaAllocator hAllocator);
3760 ~VmaBlockMetadata();
3761 void Init(VkDeviceSize size);
3764 bool Validate()
const;
3765 VkDeviceSize GetSize()
const {
return m_Size; }
3766 size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
3767 VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
3768 VkDeviceSize GetUnusedRangeSizeMax()
const;
3770 bool IsEmpty()
const;
3772 void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
3775 #if VMA_STATS_STRING_ENABLED 3776 void PrintDetailedMap(
class VmaJsonWriter& json)
const;
3780 void CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest);
3785 bool CreateAllocationRequest(
3786 uint32_t currentFrameIndex,
3787 uint32_t frameInUseCount,
3788 VkDeviceSize bufferImageGranularity,
3789 VkDeviceSize allocSize,
3790 VkDeviceSize allocAlignment,
3791 VmaSuballocationType allocType,
3792 bool canMakeOtherLost,
3793 VmaAllocationRequest* pAllocationRequest);
3795 bool MakeRequestedAllocationsLost(
3796 uint32_t currentFrameIndex,
3797 uint32_t frameInUseCount,
3798 VmaAllocationRequest* pAllocationRequest);
3800 uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3804 const VmaAllocationRequest& request,
3805 VmaSuballocationType type,
3806 VkDeviceSize allocSize,
3807 VmaAllocation hAllocation);
3810 void Free(
const VmaAllocation allocation);
3811 void FreeAtOffset(VkDeviceSize offset);
3814 VkDeviceSize m_Size;
3815 uint32_t m_FreeCount;
3816 VkDeviceSize m_SumFreeSize;
3817 VmaSuballocationList m_Suballocations;
3820 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
3822 bool ValidateFreeSuballocationList()
const;
3826 bool CheckAllocation(
3827 uint32_t currentFrameIndex,
3828 uint32_t frameInUseCount,
3829 VkDeviceSize bufferImageGranularity,
3830 VkDeviceSize allocSize,
3831 VkDeviceSize allocAlignment,
3832 VmaSuballocationType allocType,
3833 VmaSuballocationList::const_iterator suballocItem,
3834 bool canMakeOtherLost,
3835 VkDeviceSize* pOffset,
3836 size_t* itemsToMakeLostCount,
3837 VkDeviceSize* pSumFreeSize,
3838 VkDeviceSize* pSumItemSize)
const;
3840 void MergeFreeWithNext(VmaSuballocationList::iterator item);
3844 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
3847 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
3850 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
3854 class VmaDeviceMemoryMapping
3857 VmaDeviceMemoryMapping();
3858 ~VmaDeviceMemoryMapping();
3860 void* GetMappedData()
const {
return m_pMappedData; }
3863 VkResult Map(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count,
void **ppData);
3864 void Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count);
3868 uint32_t m_MapCount;
3869 void* m_pMappedData;
3878 class VmaDeviceMemoryBlock
3881 uint32_t m_MemoryTypeIndex;
3882 VkDeviceMemory m_hMemory;
3883 VmaDeviceMemoryMapping m_Mapping;
3884 VmaBlockMetadata m_Metadata;
3886 VmaDeviceMemoryBlock(VmaAllocator hAllocator);
3888 ~VmaDeviceMemoryBlock()
3890 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
3895 uint32_t newMemoryTypeIndex,
3896 VkDeviceMemory newMemory,
3897 VkDeviceSize newSize);
3899 void Destroy(VmaAllocator allocator);
3902 bool Validate()
const;
3905 VkResult Map(VmaAllocator hAllocator, uint32_t count,
void** ppData);
3906 void Unmap(VmaAllocator hAllocator, uint32_t count);
3909 struct VmaPointerLess
3911 bool operator()(
const void* lhs,
const void* rhs)
const 3917 class VmaDefragmentator;
3925 struct VmaBlockVector
3928 VmaAllocator hAllocator,
3929 uint32_t memoryTypeIndex,
3930 VkDeviceSize preferredBlockSize,
3931 size_t minBlockCount,
3932 size_t maxBlockCount,
3933 VkDeviceSize bufferImageGranularity,
3934 uint32_t frameInUseCount,
3938 VkResult CreateMinBlocks();
3940 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
3941 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
3942 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
3943 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
3947 bool IsEmpty()
const {
return m_Blocks.empty(); }
3950 VmaPool hCurrentPool,
3951 uint32_t currentFrameIndex,
3952 const VkMemoryRequirements& vkMemReq,
3954 VmaSuballocationType suballocType,
3955 VmaAllocation* pAllocation);
3958 VmaAllocation hAllocation);
3963 #if VMA_STATS_STRING_ENABLED 3964 void PrintDetailedMap(
class VmaJsonWriter& json);
3967 void MakePoolAllocationsLost(
3968 uint32_t currentFrameIndex,
3969 size_t* pLostAllocationCount);
3971 VmaDefragmentator* EnsureDefragmentator(
3972 VmaAllocator hAllocator,
3973 uint32_t currentFrameIndex);
3975 VkResult Defragment(
3977 VkDeviceSize& maxBytesToMove,
3978 uint32_t& maxAllocationsToMove);
3980 void DestroyDefragmentator();
3983 friend class VmaDefragmentator;
3985 const VmaAllocator m_hAllocator;
3986 const uint32_t m_MemoryTypeIndex;
3987 const VkDeviceSize m_PreferredBlockSize;
3988 const size_t m_MinBlockCount;
3989 const size_t m_MaxBlockCount;
3990 const VkDeviceSize m_BufferImageGranularity;
3991 const uint32_t m_FrameInUseCount;
3992 const bool m_IsCustomPool;
3995 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
3999 bool m_HasEmptyBlock;
4000 VmaDefragmentator* m_pDefragmentator;
4002 size_t CalcMaxBlockSize()
const;
4005 void Remove(VmaDeviceMemoryBlock* pBlock);
4009 void IncrementallySortBlocks();
4011 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
4017 VmaBlockVector m_BlockVector;
4021 VmaAllocator hAllocator,
4025 VmaBlockVector& GetBlockVector() {
return m_BlockVector; }
4027 #if VMA_STATS_STRING_ENABLED 4032 class VmaDefragmentator
4034 const VmaAllocator m_hAllocator;
4035 VmaBlockVector*
const m_pBlockVector;
4036 uint32_t m_CurrentFrameIndex;
4037 VkDeviceSize m_BytesMoved;
4038 uint32_t m_AllocationsMoved;
4040 struct AllocationInfo
4042 VmaAllocation m_hAllocation;
4043 VkBool32* m_pChanged;
4046 m_hAllocation(VK_NULL_HANDLE),
4047 m_pChanged(VMA_NULL)
4052 struct AllocationInfoSizeGreater
4054 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 4056 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
4061 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
4065 VmaDeviceMemoryBlock* m_pBlock;
4066 bool m_HasNonMovableAllocations;
4067 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
4069 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
4071 m_HasNonMovableAllocations(true),
4072 m_Allocations(pAllocationCallbacks),
4073 m_pMappedDataForDefragmentation(VMA_NULL)
4077 void CalcHasNonMovableAllocations()
4079 const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
4080 const size_t defragmentAllocCount = m_Allocations.size();
4081 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
4084 void SortAllocationsBySizeDescecnding()
4086 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
4089 VkResult EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData);
4090 void Unmap(VmaAllocator hAllocator);
4094 void* m_pMappedDataForDefragmentation;
4097 struct BlockPointerLess
4099 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 4101 return pLhsBlockInfo->m_pBlock < pRhsBlock;
4103 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 4105 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
4111 struct BlockInfoCompareMoveDestination
4113 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 4115 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
4119 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
4123 if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
4131 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
4132 BlockInfoVector m_Blocks;
4134 VkResult DefragmentRound(
4135 VkDeviceSize maxBytesToMove,
4136 uint32_t maxAllocationsToMove);
4138 static bool MoveMakesSense(
4139 size_t dstBlockIndex, VkDeviceSize dstOffset,
4140 size_t srcBlockIndex, VkDeviceSize srcOffset);
4144 VmaAllocator hAllocator,
4145 VmaBlockVector* pBlockVector,
4146 uint32_t currentFrameIndex);
4148 ~VmaDefragmentator();
4150 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
4151 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
4153 void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
4155 VkResult Defragment(
4156 VkDeviceSize maxBytesToMove,
4157 uint32_t maxAllocationsToMove);
4161 struct VmaAllocator_T
4164 bool m_UseKhrDedicatedAllocation;
4166 bool m_AllocationCallbacksSpecified;
4167 VkAllocationCallbacks m_AllocationCallbacks;
4171 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
4172 VMA_MUTEX m_HeapSizeLimitMutex;
4174 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
4175 VkPhysicalDeviceMemoryProperties m_MemProps;
4178 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
4181 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
4182 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
4183 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
4188 const VkAllocationCallbacks* GetAllocationCallbacks()
const 4190 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
4194 return m_VulkanFunctions;
4197 VkDeviceSize GetBufferImageGranularity()
const 4200 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
4201 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
4204 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
4205 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
4207 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 4209 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
4210 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
4213 void GetBufferMemoryRequirements(
4215 VkMemoryRequirements& memReq,
4216 bool& requiresDedicatedAllocation,
4217 bool& prefersDedicatedAllocation)
const;
4218 void GetImageMemoryRequirements(
4220 VkMemoryRequirements& memReq,
4221 bool& requiresDedicatedAllocation,
4222 bool& prefersDedicatedAllocation)
const;
4225 VkResult AllocateMemory(
4226 const VkMemoryRequirements& vkMemReq,
4227 bool requiresDedicatedAllocation,
4228 bool prefersDedicatedAllocation,
4229 VkBuffer dedicatedBuffer,
4230 VkImage dedicatedImage,
4232 VmaSuballocationType suballocType,
4233 VmaAllocation* pAllocation);
4236 void FreeMemory(
const VmaAllocation allocation);
4238 void CalculateStats(
VmaStats* pStats);
4240 #if VMA_STATS_STRING_ENABLED 4241 void PrintDetailedMap(
class VmaJsonWriter& json);
4244 VkResult Defragment(
4245 VmaAllocation* pAllocations,
4246 size_t allocationCount,
4247 VkBool32* pAllocationsChanged,
4251 void GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo);
4252 bool TouchAllocation(VmaAllocation hAllocation);
4255 void DestroyPool(VmaPool pool);
4256 void GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats);
4258 void SetCurrentFrameIndex(uint32_t frameIndex);
4260 void MakePoolAllocationsLost(
4262 size_t* pLostAllocationCount);
4264 void CreateLostAllocation(VmaAllocation* pAllocation);
4266 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
4267 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
4269 VkResult Map(VmaAllocation hAllocation,
void** ppData);
4270 void Unmap(VmaAllocation hAllocation);
4273 VkDeviceSize m_PreferredLargeHeapBlockSize;
4275 VkPhysicalDevice m_PhysicalDevice;
4276 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
4278 VMA_MUTEX m_PoolsMutex;
4280 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
4286 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
4288 VkResult AllocateMemoryOfType(
4289 const VkMemoryRequirements& vkMemReq,
4290 bool dedicatedAllocation,
4291 VkBuffer dedicatedBuffer,
4292 VkImage dedicatedImage,
4294 uint32_t memTypeIndex,
4295 VmaSuballocationType suballocType,
4296 VmaAllocation* pAllocation);
4299 VkResult AllocateDedicatedMemory(
4301 VmaSuballocationType suballocType,
4302 uint32_t memTypeIndex,
4304 bool isUserDataString,
4306 VkBuffer dedicatedBuffer,
4307 VkImage dedicatedImage,
4308 VmaAllocation* pAllocation);
4311 void FreeDedicatedMemory(VmaAllocation allocation);
4317 static void* VmaMalloc(VmaAllocator hAllocator,
size_t size,
size_t alignment)
4319 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
4322 static void VmaFree(VmaAllocator hAllocator,
void* ptr)
4324 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
4327 template<
typename T>
4328 static T* VmaAllocate(VmaAllocator hAllocator)
4330 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
4333 template<
typename T>
4334 static T* VmaAllocateArray(VmaAllocator hAllocator,
size_t count)
4336 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
4339 template<
typename T>
4340 static void vma_delete(VmaAllocator hAllocator, T* ptr)
4345 VmaFree(hAllocator, ptr);
4349 template<
typename T>
4350 static void vma_delete_array(VmaAllocator hAllocator, T* ptr,
size_t count)
4354 for(
size_t i = count; i--; )
4356 VmaFree(hAllocator, ptr);
4363 #if VMA_STATS_STRING_ENABLED 4365 class VmaStringBuilder
4368 VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
4369 size_t GetLength()
const {
return m_Data.size(); }
4370 const char* GetData()
const {
return m_Data.data(); }
4372 void Add(
char ch) { m_Data.push_back(ch); }
4373 void Add(
const char* pStr);
4374 void AddNewLine() { Add(
'\n'); }
4375 void AddNumber(uint32_t num);
4376 void AddNumber(uint64_t num);
4377 void AddPointer(
const void* ptr);
4380 VmaVector< char, VmaStlAllocator<char> > m_Data;
4383 void VmaStringBuilder::Add(
const char* pStr)
4385 const size_t strLen = strlen(pStr);
4388 const size_t oldCount = m_Data.size();
4389 m_Data.resize(oldCount + strLen);
4390 memcpy(m_Data.data() + oldCount, pStr, strLen);
4394 void VmaStringBuilder::AddNumber(uint32_t num)
4397 VmaUint32ToStr(buf,
sizeof(buf), num);
4401 void VmaStringBuilder::AddNumber(uint64_t num)
4404 VmaUint64ToStr(buf,
sizeof(buf), num);
4408 void VmaStringBuilder::AddPointer(
const void* ptr)
4411 VmaPtrToStr(buf,
sizeof(buf), ptr);
4415 #endif // #if VMA_STATS_STRING_ENABLED 4420 #if VMA_STATS_STRING_ENABLED 4425 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
4428 void BeginObject(
bool singleLine =
false);
4431 void BeginArray(
bool singleLine =
false);
4434 void WriteString(
const char* pStr);
4435 void BeginString(
const char* pStr = VMA_NULL);
4436 void ContinueString(
const char* pStr);
4437 void ContinueString(uint32_t n);
4438 void ContinueString(uint64_t n);
4439 void ContinueString_Pointer(
const void* ptr);
4440 void EndString(
const char* pStr = VMA_NULL);
4442 void WriteNumber(uint32_t n);
4443 void WriteNumber(uint64_t n);
4444 void WriteBool(
bool b);
4448 static const char*
const INDENT;
4450 enum COLLECTION_TYPE
4452 COLLECTION_TYPE_OBJECT,
4453 COLLECTION_TYPE_ARRAY,
4457 COLLECTION_TYPE type;
4458 uint32_t valueCount;
4459 bool singleLineMode;
4462 VmaStringBuilder& m_SB;
4463 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
4464 bool m_InsideString;
4466 void BeginValue(
bool isString);
4467 void WriteIndent(
bool oneLess =
false);
4470 const char*
const VmaJsonWriter::INDENT =
" ";
4472 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
4474 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
4475 m_InsideString(false)
4479 VmaJsonWriter::~VmaJsonWriter()
4481 VMA_ASSERT(!m_InsideString);
4482 VMA_ASSERT(m_Stack.empty());
4485 void VmaJsonWriter::BeginObject(
bool singleLine)
4487 VMA_ASSERT(!m_InsideString);
4493 item.type = COLLECTION_TYPE_OBJECT;
4494 item.valueCount = 0;
4495 item.singleLineMode = singleLine;
4496 m_Stack.push_back(item);
4499 void VmaJsonWriter::EndObject()
4501 VMA_ASSERT(!m_InsideString);
4506 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
4510 void VmaJsonWriter::BeginArray(
bool singleLine)
4512 VMA_ASSERT(!m_InsideString);
4518 item.type = COLLECTION_TYPE_ARRAY;
4519 item.valueCount = 0;
4520 item.singleLineMode = singleLine;
4521 m_Stack.push_back(item);
4524 void VmaJsonWriter::EndArray()
4526 VMA_ASSERT(!m_InsideString);
4531 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
4535 void VmaJsonWriter::WriteString(
const char* pStr)
4541 void VmaJsonWriter::BeginString(
const char* pStr)
4543 VMA_ASSERT(!m_InsideString);
4547 m_InsideString =
true;
4548 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4550 ContinueString(pStr);
4554 void VmaJsonWriter::ContinueString(
const char* pStr)
4556 VMA_ASSERT(m_InsideString);
4558 const size_t strLen = strlen(pStr);
4559 for(
size_t i = 0; i < strLen; ++i)
4592 VMA_ASSERT(0 &&
"Character not currently supported.");
4598 void VmaJsonWriter::ContinueString(uint32_t n)
4600 VMA_ASSERT(m_InsideString);
4604 void VmaJsonWriter::ContinueString(uint64_t n)
4606 VMA_ASSERT(m_InsideString);
4610 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
4612 VMA_ASSERT(m_InsideString);
4613 m_SB.AddPointer(ptr);
4616 void VmaJsonWriter::EndString(
const char* pStr)
4618 VMA_ASSERT(m_InsideString);
4619 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4621 ContinueString(pStr);
4624 m_InsideString =
false;
4627 void VmaJsonWriter::WriteNumber(uint32_t n)
4629 VMA_ASSERT(!m_InsideString);
4634 void VmaJsonWriter::WriteNumber(uint64_t n)
4636 VMA_ASSERT(!m_InsideString);
4641 void VmaJsonWriter::WriteBool(
bool b)
4643 VMA_ASSERT(!m_InsideString);
4645 m_SB.Add(b ?
"true" :
"false");
4648 void VmaJsonWriter::WriteNull()
4650 VMA_ASSERT(!m_InsideString);
4655 void VmaJsonWriter::BeginValue(
bool isString)
4657 if(!m_Stack.empty())
4659 StackItem& currItem = m_Stack.back();
4660 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4661 currItem.valueCount % 2 == 0)
4663 VMA_ASSERT(isString);
4666 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4667 currItem.valueCount % 2 != 0)
4671 else if(currItem.valueCount > 0)
4680 ++currItem.valueCount;
4684 void VmaJsonWriter::WriteIndent(
bool oneLess)
4686 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
4690 size_t count = m_Stack.size();
4691 if(count > 0 && oneLess)
4695 for(
size_t i = 0; i < count; ++i)
4702 #endif // #if VMA_STATS_STRING_ENABLED 4706 void VmaAllocation_T::SetUserData(VmaAllocator hAllocator,
void* pUserData)
4708 if(IsUserDataString())
4710 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
4712 FreeUserDataString(hAllocator);
4714 if(pUserData != VMA_NULL)
4716 const char*
const newStrSrc = (
char*)pUserData;
4717 const size_t newStrLen = strlen(newStrSrc);
4718 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
4719 memcpy(newStrDst, newStrSrc, newStrLen + 1);
4720 m_pUserData = newStrDst;
4725 m_pUserData = pUserData;
4729 void VmaAllocation_T::ChangeBlockAllocation(
4730 VmaAllocator hAllocator,
4731 VmaDeviceMemoryBlock* block,
4732 VkDeviceSize offset)
4734 VMA_ASSERT(block != VMA_NULL);
4735 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4738 if(block != m_BlockAllocation.m_Block)
4740 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
4741 if(IsPersistentMap())
4743 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
4744 block->Map(hAllocator, mapRefCount, VMA_NULL);
4747 m_BlockAllocation.m_Block = block;
4748 m_BlockAllocation.m_Offset = offset;
4751 VkDeviceSize VmaAllocation_T::GetOffset()
const 4755 case ALLOCATION_TYPE_BLOCK:
4756 return m_BlockAllocation.m_Offset;
4757 case ALLOCATION_TYPE_DEDICATED:
4765 VkDeviceMemory VmaAllocation_T::GetMemory()
const 4769 case ALLOCATION_TYPE_BLOCK:
4770 return m_BlockAllocation.m_Block->m_hMemory;
4771 case ALLOCATION_TYPE_DEDICATED:
4772 return m_DedicatedAllocation.m_hMemory;
4775 return VK_NULL_HANDLE;
4779 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 4783 case ALLOCATION_TYPE_BLOCK:
4784 return m_BlockAllocation.m_Block->m_MemoryTypeIndex;
4785 case ALLOCATION_TYPE_DEDICATED:
4786 return m_DedicatedAllocation.m_MemoryTypeIndex;
4793 void* VmaAllocation_T::GetMappedData()
const 4797 case ALLOCATION_TYPE_BLOCK:
4800 void* pBlockData = m_BlockAllocation.m_Block->m_Mapping.GetMappedData();
4801 VMA_ASSERT(pBlockData != VMA_NULL);
4802 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
4809 case ALLOCATION_TYPE_DEDICATED:
4810 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
4811 return m_DedicatedAllocation.m_pMappedData;
4818 bool VmaAllocation_T::CanBecomeLost()
const 4822 case ALLOCATION_TYPE_BLOCK:
4823 return m_BlockAllocation.m_CanBecomeLost;
4824 case ALLOCATION_TYPE_DEDICATED:
4832 VmaPool VmaAllocation_T::GetPool()
const 4834 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4835 return m_BlockAllocation.m_hPool;
4838 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4840 VMA_ASSERT(CanBecomeLost());
4846 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
4849 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
4854 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
4860 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
4870 void VmaAllocation_T::FreeUserDataString(VmaAllocator hAllocator)
4872 VMA_ASSERT(IsUserDataString());
4873 if(m_pUserData != VMA_NULL)
4875 char*
const oldStr = (
char*)m_pUserData;
4876 const size_t oldStrLen = strlen(oldStr);
4877 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
4878 m_pUserData = VMA_NULL;
4882 void VmaAllocation_T::BlockAllocMap()
4884 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
4886 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
4892 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
4896 void VmaAllocation_T::BlockAllocUnmap()
4898 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
4900 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
4906 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
4910 VkResult VmaAllocation_T::DedicatedAllocMap(VmaAllocator hAllocator,
void** ppData)
4912 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
4916 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
4918 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
4919 *ppData = m_DedicatedAllocation.m_pMappedData;
4925 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
4926 return VK_ERROR_MEMORY_MAP_FAILED;
4931 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
4932 hAllocator->m_hDevice,
4933 m_DedicatedAllocation.m_hMemory,
4938 if(result == VK_SUCCESS)
4940 m_DedicatedAllocation.m_pMappedData = *ppData;
4947 void VmaAllocation_T::DedicatedAllocUnmap(VmaAllocator hAllocator)
4949 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
4951 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
4956 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
4957 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
4958 hAllocator->m_hDevice,
4959 m_DedicatedAllocation.m_hMemory);
4964 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
4968 #if VMA_STATS_STRING_ENABLED 4971 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
4980 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
4984 json.WriteString(
"Blocks");
4987 json.WriteString(
"Allocations");
4990 json.WriteString(
"UnusedRanges");
4993 json.WriteString(
"UsedBytes");
4996 json.WriteString(
"UnusedBytes");
5001 json.WriteString(
"AllocationSize");
5002 json.BeginObject(
true);
5003 json.WriteString(
"Min");
5005 json.WriteString(
"Avg");
5007 json.WriteString(
"Max");
5014 json.WriteString(
"UnusedRangeSize");
5015 json.BeginObject(
true);
5016 json.WriteString(
"Min");
5018 json.WriteString(
"Avg");
5020 json.WriteString(
"Max");
5028 #endif // #if VMA_STATS_STRING_ENABLED 5030 struct VmaSuballocationItemSizeLess
5033 const VmaSuballocationList::iterator lhs,
5034 const VmaSuballocationList::iterator rhs)
const 5036 return lhs->size < rhs->size;
5039 const VmaSuballocationList::iterator lhs,
5040 VkDeviceSize rhsSize)
const 5042 return lhs->size < rhsSize;
5049 VmaBlockMetadata::VmaBlockMetadata(VmaAllocator hAllocator) :
5053 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
5054 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
5058 VmaBlockMetadata::~VmaBlockMetadata()
5062 void VmaBlockMetadata::Init(VkDeviceSize size)
5066 m_SumFreeSize = size;
5068 VmaSuballocation suballoc = {};
5069 suballoc.offset = 0;
5070 suballoc.size = size;
5071 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5072 suballoc.hAllocation = VK_NULL_HANDLE;
5074 m_Suballocations.push_back(suballoc);
5075 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
5077 m_FreeSuballocationsBySize.push_back(suballocItem);
5080 bool VmaBlockMetadata::Validate()
const 5082 if(m_Suballocations.empty())
5088 VkDeviceSize calculatedOffset = 0;
5090 uint32_t calculatedFreeCount = 0;
5092 VkDeviceSize calculatedSumFreeSize = 0;
5095 size_t freeSuballocationsToRegister = 0;
5097 bool prevFree =
false;
5099 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5100 suballocItem != m_Suballocations.cend();
5103 const VmaSuballocation& subAlloc = *suballocItem;
5106 if(subAlloc.offset != calculatedOffset)
5111 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
5113 if(prevFree && currFree)
5118 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
5125 calculatedSumFreeSize += subAlloc.size;
5126 ++calculatedFreeCount;
5127 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5129 ++freeSuballocationsToRegister;
5134 if(subAlloc.hAllocation->GetOffset() != subAlloc.offset)
5138 if(subAlloc.hAllocation->GetSize() != subAlloc.size)
5144 calculatedOffset += subAlloc.size;
5145 prevFree = currFree;
5150 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
5155 VkDeviceSize lastSize = 0;
5156 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
5158 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
5161 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
5166 if(suballocItem->size < lastSize)
5171 lastSize = suballocItem->size;
5175 if(!ValidateFreeSuballocationList() ||
5176 (calculatedOffset != m_Size) ||
5177 (calculatedSumFreeSize != m_SumFreeSize) ||
5178 (calculatedFreeCount != m_FreeCount))
5186 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax()
const 5188 if(!m_FreeSuballocationsBySize.empty())
5190 return m_FreeSuballocationsBySize.back()->size;
5198 bool VmaBlockMetadata::IsEmpty()
const 5200 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
5203 void VmaBlockMetadata::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 5207 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
5219 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5220 suballocItem != m_Suballocations.cend();
5223 const VmaSuballocation& suballoc = *suballocItem;
5224 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
5237 void VmaBlockMetadata::AddPoolStats(
VmaPoolStats& inoutStats)
const 5239 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
5241 inoutStats.
size += m_Size;
5248 #if VMA_STATS_STRING_ENABLED 5250 void VmaBlockMetadata::PrintDetailedMap(
class VmaJsonWriter& json)
const 5254 json.WriteString(
"TotalBytes");
5255 json.WriteNumber(m_Size);
5257 json.WriteString(
"UnusedBytes");
5258 json.WriteNumber(m_SumFreeSize);
5260 json.WriteString(
"Allocations");
5261 json.WriteNumber((uint64_t)m_Suballocations.size() - m_FreeCount);
5263 json.WriteString(
"UnusedRanges");
5264 json.WriteNumber(m_FreeCount);
5266 json.WriteString(
"Suballocations");
5269 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5270 suballocItem != m_Suballocations.cend();
5271 ++suballocItem, ++i)
5273 json.BeginObject(
true);
5275 json.WriteString(
"Type");
5276 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
5278 json.WriteString(
"Size");
5279 json.WriteNumber(suballocItem->size);
5281 json.WriteString(
"Offset");
5282 json.WriteNumber(suballocItem->offset);
5284 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
5286 const void* pUserData = suballocItem->hAllocation->GetUserData();
5287 if(pUserData != VMA_NULL)
5289 json.WriteString(
"UserData");
5290 if(suballocItem->hAllocation->IsUserDataString())
5292 json.WriteString((
const char*)pUserData);
5297 json.ContinueString_Pointer(pUserData);
5310 #endif // #if VMA_STATS_STRING_ENABLED 5322 void VmaBlockMetadata::CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest)
5324 VMA_ASSERT(IsEmpty());
5325 pAllocationRequest->offset = 0;
5326 pAllocationRequest->sumFreeSize = m_SumFreeSize;
5327 pAllocationRequest->sumItemSize = 0;
5328 pAllocationRequest->item = m_Suballocations.begin();
5329 pAllocationRequest->itemsToMakeLostCount = 0;
5332 bool VmaBlockMetadata::CreateAllocationRequest(
5333 uint32_t currentFrameIndex,
5334 uint32_t frameInUseCount,
5335 VkDeviceSize bufferImageGranularity,
5336 VkDeviceSize allocSize,
5337 VkDeviceSize allocAlignment,
5338 VmaSuballocationType allocType,
5339 bool canMakeOtherLost,
5340 VmaAllocationRequest* pAllocationRequest)
5342 VMA_ASSERT(allocSize > 0);
5343 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5344 VMA_ASSERT(pAllocationRequest != VMA_NULL);
5345 VMA_HEAVY_ASSERT(Validate());
5348 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize)
5354 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
5355 if(freeSuballocCount > 0)
5360 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5361 m_FreeSuballocationsBySize.data(),
5362 m_FreeSuballocationsBySize.data() + freeSuballocCount,
5364 VmaSuballocationItemSizeLess());
5365 size_t index = it - m_FreeSuballocationsBySize.data();
5366 for(; index < freeSuballocCount; ++index)
5371 bufferImageGranularity,
5375 m_FreeSuballocationsBySize[index],
5377 &pAllocationRequest->offset,
5378 &pAllocationRequest->itemsToMakeLostCount,
5379 &pAllocationRequest->sumFreeSize,
5380 &pAllocationRequest->sumItemSize))
5382 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5390 for(
size_t index = freeSuballocCount; index--; )
5395 bufferImageGranularity,
5399 m_FreeSuballocationsBySize[index],
5401 &pAllocationRequest->offset,
5402 &pAllocationRequest->itemsToMakeLostCount,
5403 &pAllocationRequest->sumFreeSize,
5404 &pAllocationRequest->sumItemSize))
5406 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5413 if(canMakeOtherLost)
5417 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
5418 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
5420 VmaAllocationRequest tmpAllocRequest = {};
5421 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
5422 suballocIt != m_Suballocations.end();
5425 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
5426 suballocIt->hAllocation->CanBecomeLost())
5431 bufferImageGranularity,
5437 &tmpAllocRequest.offset,
5438 &tmpAllocRequest.itemsToMakeLostCount,
5439 &tmpAllocRequest.sumFreeSize,
5440 &tmpAllocRequest.sumItemSize))
5442 tmpAllocRequest.item = suballocIt;
5444 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
5446 *pAllocationRequest = tmpAllocRequest;
5452 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
5461 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
5462 uint32_t currentFrameIndex,
5463 uint32_t frameInUseCount,
5464 VmaAllocationRequest* pAllocationRequest)
5466 while(pAllocationRequest->itemsToMakeLostCount > 0)
5468 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
5470 ++pAllocationRequest->item;
5472 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5473 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
5474 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
5475 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5477 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
5478 --pAllocationRequest->itemsToMakeLostCount;
5486 VMA_HEAVY_ASSERT(Validate());
5487 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5488 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
5493 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
5495 uint32_t lostAllocationCount = 0;
5496 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
5497 it != m_Suballocations.end();
5500 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
5501 it->hAllocation->CanBecomeLost() &&
5502 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5504 it = FreeSuballocation(it);
5505 ++lostAllocationCount;
5508 return lostAllocationCount;
5511 void VmaBlockMetadata::Alloc(
5512 const VmaAllocationRequest& request,
5513 VmaSuballocationType type,
5514 VkDeviceSize allocSize,
5515 VmaAllocation hAllocation)
5517 VMA_ASSERT(request.item != m_Suballocations.end());
5518 VmaSuballocation& suballoc = *request.item;
5520 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5522 VMA_ASSERT(request.offset >= suballoc.offset);
5523 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
5524 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
5525 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
5529 UnregisterFreeSuballocation(request.item);
5531 suballoc.offset = request.offset;
5532 suballoc.size = allocSize;
5533 suballoc.type = type;
5534 suballoc.hAllocation = hAllocation;
5539 VmaSuballocation paddingSuballoc = {};
5540 paddingSuballoc.offset = request.offset + allocSize;
5541 paddingSuballoc.size = paddingEnd;
5542 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5543 VmaSuballocationList::iterator next = request.item;
5545 const VmaSuballocationList::iterator paddingEndItem =
5546 m_Suballocations.insert(next, paddingSuballoc);
5547 RegisterFreeSuballocation(paddingEndItem);
5553 VmaSuballocation paddingSuballoc = {};
5554 paddingSuballoc.offset = request.offset - paddingBegin;
5555 paddingSuballoc.size = paddingBegin;
5556 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5557 const VmaSuballocationList::iterator paddingBeginItem =
5558 m_Suballocations.insert(request.item, paddingSuballoc);
5559 RegisterFreeSuballocation(paddingBeginItem);
5563 m_FreeCount = m_FreeCount - 1;
5564 if(paddingBegin > 0)
5572 m_SumFreeSize -= allocSize;
5575 void VmaBlockMetadata::Free(
const VmaAllocation allocation)
5577 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5578 suballocItem != m_Suballocations.end();
5581 VmaSuballocation& suballoc = *suballocItem;
5582 if(suballoc.hAllocation == allocation)
5584 FreeSuballocation(suballocItem);
5585 VMA_HEAVY_ASSERT(Validate());
5589 VMA_ASSERT(0 &&
"Not found!");
5592 void VmaBlockMetadata::FreeAtOffset(VkDeviceSize offset)
5594 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5595 suballocItem != m_Suballocations.end();
5598 VmaSuballocation& suballoc = *suballocItem;
5599 if(suballoc.offset == offset)
5601 FreeSuballocation(suballocItem);
5605 VMA_ASSERT(0 &&
"Not found!");
5608 bool VmaBlockMetadata::ValidateFreeSuballocationList()
const 5610 VkDeviceSize lastSize = 0;
5611 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
5613 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
5615 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
5620 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5625 if(it->size < lastSize)
5631 lastSize = it->size;
5636 bool VmaBlockMetadata::CheckAllocation(
5637 uint32_t currentFrameIndex,
5638 uint32_t frameInUseCount,
5639 VkDeviceSize bufferImageGranularity,
5640 VkDeviceSize allocSize,
5641 VkDeviceSize allocAlignment,
5642 VmaSuballocationType allocType,
5643 VmaSuballocationList::const_iterator suballocItem,
5644 bool canMakeOtherLost,
5645 VkDeviceSize* pOffset,
5646 size_t* itemsToMakeLostCount,
5647 VkDeviceSize* pSumFreeSize,
5648 VkDeviceSize* pSumItemSize)
const 5650 VMA_ASSERT(allocSize > 0);
5651 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5652 VMA_ASSERT(suballocItem != m_Suballocations.cend());
5653 VMA_ASSERT(pOffset != VMA_NULL);
5655 *itemsToMakeLostCount = 0;
5659 if(canMakeOtherLost)
5661 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5663 *pSumFreeSize = suballocItem->size;
5667 if(suballocItem->hAllocation->CanBecomeLost() &&
5668 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5670 ++*itemsToMakeLostCount;
5671 *pSumItemSize = suballocItem->size;
5680 if(m_Size - suballocItem->offset < allocSize)
5686 *pOffset = suballocItem->offset;
5689 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5691 *pOffset += VMA_DEBUG_MARGIN;
5695 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5696 *pOffset = VmaAlignUp(*pOffset, alignment);
5700 if(bufferImageGranularity > 1)
5702 bool bufferImageGranularityConflict =
false;
5703 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5704 while(prevSuballocItem != m_Suballocations.cbegin())
5707 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5708 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5710 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5712 bufferImageGranularityConflict =
true;
5720 if(bufferImageGranularityConflict)
5722 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5728 if(*pOffset >= suballocItem->offset + suballocItem->size)
5734 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
5737 VmaSuballocationList::const_iterator next = suballocItem;
5739 const VkDeviceSize requiredEndMargin =
5740 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5742 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
5744 if(suballocItem->offset + totalSize > m_Size)
5751 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
5752 if(totalSize > suballocItem->size)
5754 VkDeviceSize remainingSize = totalSize - suballocItem->size;
5755 while(remainingSize > 0)
5758 if(lastSuballocItem == m_Suballocations.cend())
5762 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5764 *pSumFreeSize += lastSuballocItem->size;
5768 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
5769 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
5770 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5772 ++*itemsToMakeLostCount;
5773 *pSumItemSize += lastSuballocItem->size;
5780 remainingSize = (lastSuballocItem->size < remainingSize) ?
5781 remainingSize - lastSuballocItem->size : 0;
5787 if(bufferImageGranularity > 1)
5789 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
5791 while(nextSuballocItem != m_Suballocations.cend())
5793 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5794 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5796 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5798 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
5799 if(nextSuballoc.hAllocation->CanBecomeLost() &&
5800 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5802 ++*itemsToMakeLostCount;
5821 const VmaSuballocation& suballoc = *suballocItem;
5822 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5824 *pSumFreeSize = suballoc.size;
5827 if(suballoc.size < allocSize)
5833 *pOffset = suballoc.offset;
5836 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5838 *pOffset += VMA_DEBUG_MARGIN;
5842 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5843 *pOffset = VmaAlignUp(*pOffset, alignment);
5847 if(bufferImageGranularity > 1)
5849 bool bufferImageGranularityConflict =
false;
5850 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5851 while(prevSuballocItem != m_Suballocations.cbegin())
5854 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5855 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5857 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5859 bufferImageGranularityConflict =
true;
5867 if(bufferImageGranularityConflict)
5869 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5874 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
5877 VmaSuballocationList::const_iterator next = suballocItem;
5879 const VkDeviceSize requiredEndMargin =
5880 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5883 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
5890 if(bufferImageGranularity > 1)
5892 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
5894 while(nextSuballocItem != m_Suballocations.cend())
5896 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5897 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5899 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5918 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
5920 VMA_ASSERT(item != m_Suballocations.end());
5921 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5923 VmaSuballocationList::iterator nextItem = item;
5925 VMA_ASSERT(nextItem != m_Suballocations.end());
5926 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
5928 item->size += nextItem->size;
5930 m_Suballocations.erase(nextItem);
5933 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
5936 VmaSuballocation& suballoc = *suballocItem;
5937 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5938 suballoc.hAllocation = VK_NULL_HANDLE;
5942 m_SumFreeSize += suballoc.size;
5945 bool mergeWithNext =
false;
5946 bool mergeWithPrev =
false;
5948 VmaSuballocationList::iterator nextItem = suballocItem;
5950 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
5952 mergeWithNext =
true;
5955 VmaSuballocationList::iterator prevItem = suballocItem;
5956 if(suballocItem != m_Suballocations.begin())
5959 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5961 mergeWithPrev =
true;
5967 UnregisterFreeSuballocation(nextItem);
5968 MergeFreeWithNext(suballocItem);
5973 UnregisterFreeSuballocation(prevItem);
5974 MergeFreeWithNext(prevItem);
5975 RegisterFreeSuballocation(prevItem);
5980 RegisterFreeSuballocation(suballocItem);
5981 return suballocItem;
5985 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
5987 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5988 VMA_ASSERT(item->size > 0);
5992 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5994 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5996 if(m_FreeSuballocationsBySize.empty())
5998 m_FreeSuballocationsBySize.push_back(item);
6002 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
6010 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
6012 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
6013 VMA_ASSERT(item->size > 0);
6017 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
6019 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6021 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
6022 m_FreeSuballocationsBySize.data(),
6023 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
6025 VmaSuballocationItemSizeLess());
6026 for(
size_t index = it - m_FreeSuballocationsBySize.data();
6027 index < m_FreeSuballocationsBySize.size();
6030 if(m_FreeSuballocationsBySize[index] == item)
6032 VmaVectorRemove(m_FreeSuballocationsBySize, index);
6035 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
6037 VMA_ASSERT(0 &&
"Not found.");
6046 VmaDeviceMemoryMapping::VmaDeviceMemoryMapping() :
6048 m_pMappedData(VMA_NULL)
6052 VmaDeviceMemoryMapping::~VmaDeviceMemoryMapping()
6054 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
6057 VkResult VmaDeviceMemoryMapping::Map(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count,
void **ppData)
6064 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6067 m_MapCount += count;
6068 VMA_ASSERT(m_pMappedData != VMA_NULL);
6069 if(ppData != VMA_NULL)
6071 *ppData = m_pMappedData;
6077 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
6078 hAllocator->m_hDevice,
6084 if(result == VK_SUCCESS)
6086 if(ppData != VMA_NULL)
6088 *ppData = m_pMappedData;
6096 void VmaDeviceMemoryMapping::Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count)
6103 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6104 if(m_MapCount >= count)
6106 m_MapCount -= count;
6109 m_pMappedData = VMA_NULL;
6110 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, hMemory);
6115 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
6122 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
6123 m_MemoryTypeIndex(UINT32_MAX),
6124 m_hMemory(VK_NULL_HANDLE),
6125 m_Metadata(hAllocator)
6129 void VmaDeviceMemoryBlock::Init(
6130 uint32_t newMemoryTypeIndex,
6131 VkDeviceMemory newMemory,
6132 VkDeviceSize newSize)
6134 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
6136 m_MemoryTypeIndex = newMemoryTypeIndex;
6137 m_hMemory = newMemory;
6139 m_Metadata.Init(newSize);
6142 void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
6146 VMA_ASSERT(m_Metadata.IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
6148 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
6149 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Metadata.GetSize(), m_hMemory);
6150 m_hMemory = VK_NULL_HANDLE;
6153 bool VmaDeviceMemoryBlock::Validate()
const 6155 if((m_hMemory == VK_NULL_HANDLE) ||
6156 (m_Metadata.GetSize() == 0))
6161 return m_Metadata.Validate();
6164 VkResult VmaDeviceMemoryBlock::Map(VmaAllocator hAllocator, uint32_t count,
void** ppData)
6166 return m_Mapping.Map(hAllocator, m_hMemory, count, ppData);
6169 void VmaDeviceMemoryBlock::Unmap(VmaAllocator hAllocator, uint32_t count)
6171 m_Mapping.Unmap(hAllocator, m_hMemory, count);
6176 memset(&outInfo, 0,
sizeof(outInfo));
6195 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
6203 VmaPool_T::VmaPool_T(
6204 VmaAllocator hAllocator,
6208 createInfo.memoryTypeIndex,
6209 createInfo.blockSize,
6210 createInfo.minBlockCount,
6211 createInfo.maxBlockCount,
6213 createInfo.frameInUseCount,
6218 VmaPool_T::~VmaPool_T()
6222 #if VMA_STATS_STRING_ENABLED 6224 #endif // #if VMA_STATS_STRING_ENABLED 6226 VmaBlockVector::VmaBlockVector(
6227 VmaAllocator hAllocator,
6228 uint32_t memoryTypeIndex,
6229 VkDeviceSize preferredBlockSize,
6230 size_t minBlockCount,
6231 size_t maxBlockCount,
6232 VkDeviceSize bufferImageGranularity,
6233 uint32_t frameInUseCount,
6234 bool isCustomPool) :
6235 m_hAllocator(hAllocator),
6236 m_MemoryTypeIndex(memoryTypeIndex),
6237 m_PreferredBlockSize(preferredBlockSize),
6238 m_MinBlockCount(minBlockCount),
6239 m_MaxBlockCount(maxBlockCount),
6240 m_BufferImageGranularity(bufferImageGranularity),
6241 m_FrameInUseCount(frameInUseCount),
6242 m_IsCustomPool(isCustomPool),
6243 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
6244 m_HasEmptyBlock(false),
6245 m_pDefragmentator(VMA_NULL)
6249 VmaBlockVector::~VmaBlockVector()
6251 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
6253 for(
size_t i = m_Blocks.size(); i--; )
6255 m_Blocks[i]->Destroy(m_hAllocator);
6256 vma_delete(m_hAllocator, m_Blocks[i]);
6260 VkResult VmaBlockVector::CreateMinBlocks()
6262 for(
size_t i = 0; i < m_MinBlockCount; ++i)
6264 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
6265 if(res != VK_SUCCESS)
6273 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
6281 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6283 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6285 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6287 VMA_HEAVY_ASSERT(pBlock->Validate());
6288 pBlock->m_Metadata.AddPoolStats(*pStats);
6292 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
6294 VkResult VmaBlockVector::Allocate(
6295 VmaPool hCurrentPool,
6296 uint32_t currentFrameIndex,
6297 const VkMemoryRequirements& vkMemReq,
6299 VmaSuballocationType suballocType,
6300 VmaAllocation* pAllocation)
6305 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6309 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6311 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
6312 VMA_ASSERT(pCurrBlock);
6313 VmaAllocationRequest currRequest = {};
6314 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6317 m_BufferImageGranularity,
6325 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
6329 VkResult res = pCurrBlock->Map(m_hAllocator, 1, VMA_NULL);
6330 if(res != VK_SUCCESS)
6337 if(pCurrBlock->m_Metadata.IsEmpty())
6339 m_HasEmptyBlock =
false;
6342 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6343 pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
6344 (*pAllocation)->InitBlockAllocation(
6353 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
6354 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
6355 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6360 const bool canCreateNewBlock =
6362 (m_Blocks.size() < m_MaxBlockCount);
6365 if(canCreateNewBlock)
6368 VkDeviceSize newBlockSize = m_PreferredBlockSize;
6369 uint32_t newBlockSizeShift = 0;
6370 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
6374 if(m_IsCustomPool ==
false)
6377 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
6378 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
6380 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
6381 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= vkMemReq.size * 2)
6383 newBlockSize = smallerNewBlockSize;
6384 ++newBlockSizeShift;
6393 size_t newBlockIndex = 0;
6394 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
6396 if(m_IsCustomPool ==
false)
6398 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
6400 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
6401 if(smallerNewBlockSize >= vkMemReq.size)
6403 newBlockSize = smallerNewBlockSize;
6404 ++newBlockSizeShift;
6405 res = CreateBlock(newBlockSize, &newBlockIndex);
6414 if(res == VK_SUCCESS)
6416 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
6417 VMA_ASSERT(pBlock->m_Metadata.GetSize() >= vkMemReq.size);
6421 res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
6422 if(res != VK_SUCCESS)
6429 VmaAllocationRequest allocRequest;
6430 pBlock->m_Metadata.CreateFirstAllocationRequest(&allocRequest);
6431 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6432 pBlock->m_Metadata.Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
6433 (*pAllocation)->InitBlockAllocation(
6436 allocRequest.offset,
6442 VMA_HEAVY_ASSERT(pBlock->Validate());
6443 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
6444 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6452 if(canMakeOtherLost)
6454 uint32_t tryIndex = 0;
6455 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
6457 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
6458 VmaAllocationRequest bestRequest = {};
6459 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
6463 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6465 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
6466 VMA_ASSERT(pCurrBlock);
6467 VmaAllocationRequest currRequest = {};
6468 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6471 m_BufferImageGranularity,
6478 const VkDeviceSize currRequestCost = currRequest.CalcCost();
6479 if(pBestRequestBlock == VMA_NULL ||
6480 currRequestCost < bestRequestCost)
6482 pBestRequestBlock = pCurrBlock;
6483 bestRequest = currRequest;
6484 bestRequestCost = currRequestCost;
6486 if(bestRequestCost == 0)
6494 if(pBestRequestBlock != VMA_NULL)
6498 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
6499 if(res != VK_SUCCESS)
6505 if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
6511 if(pBestRequestBlock->m_Metadata.IsEmpty())
6513 m_HasEmptyBlock =
false;
6516 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6517 pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
6518 (*pAllocation)->InitBlockAllocation(
6527 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
6528 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
6529 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6543 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
6545 return VK_ERROR_TOO_MANY_OBJECTS;
6549 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6552 void VmaBlockVector::Free(
6553 VmaAllocation hAllocation)
6555 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
6559 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6561 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
6563 if(hAllocation->IsPersistentMap())
6565 pBlock->m_Mapping.Unmap(m_hAllocator, pBlock->m_hMemory, 1);
6568 pBlock->m_Metadata.Free(hAllocation);
6569 VMA_HEAVY_ASSERT(pBlock->Validate());
6571 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
6574 if(pBlock->m_Metadata.IsEmpty())
6577 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
6579 pBlockToDelete = pBlock;
6585 m_HasEmptyBlock =
true;
6590 else if(m_HasEmptyBlock)
6592 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
6593 if(pLastBlock->m_Metadata.IsEmpty() && m_Blocks.size() > m_MinBlockCount)
6595 pBlockToDelete = pLastBlock;
6596 m_Blocks.pop_back();
6597 m_HasEmptyBlock =
false;
6601 IncrementallySortBlocks();
6606 if(pBlockToDelete != VMA_NULL)
6608 VMA_DEBUG_LOG(
" Deleted empty allocation");
6609 pBlockToDelete->Destroy(m_hAllocator);
6610 vma_delete(m_hAllocator, pBlockToDelete);
6614 size_t VmaBlockVector::CalcMaxBlockSize()
const 6617 for(
size_t i = m_Blocks.size(); i--; )
6619 result = VMA_MAX((uint64_t)result, (uint64_t)m_Blocks[i]->m_Metadata.GetSize());
6620 if(result >= m_PreferredBlockSize)
6628 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
6630 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6632 if(m_Blocks[blockIndex] == pBlock)
6634 VmaVectorRemove(m_Blocks, blockIndex);
6641 void VmaBlockVector::IncrementallySortBlocks()
6644 for(
size_t i = 1; i < m_Blocks.size(); ++i)
6646 if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
6648 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
6654 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
6656 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6657 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
6658 allocInfo.allocationSize = blockSize;
6659 VkDeviceMemory mem = VK_NULL_HANDLE;
6660 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
6669 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
6673 allocInfo.allocationSize);
6675 m_Blocks.push_back(pBlock);
6676 if(pNewBlockIndex != VMA_NULL)
6678 *pNewBlockIndex = m_Blocks.size() - 1;
6684 #if VMA_STATS_STRING_ENABLED 6686 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
6688 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6694 json.WriteString(
"MemoryTypeIndex");
6695 json.WriteNumber(m_MemoryTypeIndex);
6697 json.WriteString(
"BlockSize");
6698 json.WriteNumber(m_PreferredBlockSize);
6700 json.WriteString(
"BlockCount");
6701 json.BeginObject(
true);
6702 if(m_MinBlockCount > 0)
6704 json.WriteString(
"Min");
6705 json.WriteNumber((uint64_t)m_MinBlockCount);
6707 if(m_MaxBlockCount < SIZE_MAX)
6709 json.WriteString(
"Max");
6710 json.WriteNumber((uint64_t)m_MaxBlockCount);
6712 json.WriteString(
"Cur");
6713 json.WriteNumber((uint64_t)m_Blocks.size());
6716 if(m_FrameInUseCount > 0)
6718 json.WriteString(
"FrameInUseCount");
6719 json.WriteNumber(m_FrameInUseCount);
6724 json.WriteString(
"PreferredBlockSize");
6725 json.WriteNumber(m_PreferredBlockSize);
6728 json.WriteString(
"Blocks");
6730 for(
size_t i = 0; i < m_Blocks.size(); ++i)
6732 m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
6739 #endif // #if VMA_STATS_STRING_ENABLED 6741 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
6742 VmaAllocator hAllocator,
6743 uint32_t currentFrameIndex)
6745 if(m_pDefragmentator == VMA_NULL)
6747 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
6753 return m_pDefragmentator;
6756 VkResult VmaBlockVector::Defragment(
6758 VkDeviceSize& maxBytesToMove,
6759 uint32_t& maxAllocationsToMove)
6761 if(m_pDefragmentator == VMA_NULL)
6766 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6769 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
6772 if(pDefragmentationStats != VMA_NULL)
6774 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
6775 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
6778 VMA_ASSERT(bytesMoved <= maxBytesToMove);
6779 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
6785 m_HasEmptyBlock =
false;
6786 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
6788 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
6789 if(pBlock->m_Metadata.IsEmpty())
6791 if(m_Blocks.size() > m_MinBlockCount)
6793 if(pDefragmentationStats != VMA_NULL)
6796 pDefragmentationStats->
bytesFreed += pBlock->m_Metadata.GetSize();
6799 VmaVectorRemove(m_Blocks, blockIndex);
6800 pBlock->Destroy(m_hAllocator);
6801 vma_delete(m_hAllocator, pBlock);
6805 m_HasEmptyBlock =
true;
6813 void VmaBlockVector::DestroyDefragmentator()
6815 if(m_pDefragmentator != VMA_NULL)
6817 vma_delete(m_hAllocator, m_pDefragmentator);
6818 m_pDefragmentator = VMA_NULL;
6822 void VmaBlockVector::MakePoolAllocationsLost(
6823 uint32_t currentFrameIndex,
6824 size_t* pLostAllocationCount)
6826 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6827 size_t lostAllocationCount = 0;
6828 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6830 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6832 lostAllocationCount += pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
6834 if(pLostAllocationCount != VMA_NULL)
6836 *pLostAllocationCount = lostAllocationCount;
6840 void VmaBlockVector::AddStats(
VmaStats* pStats)
6842 const uint32_t memTypeIndex = m_MemoryTypeIndex;
6843 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
6845 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6847 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6849 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6851 VMA_HEAVY_ASSERT(pBlock->Validate());
6853 pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
6854 VmaAddStatInfo(pStats->
total, allocationStatInfo);
6855 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
6856 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
6863 VmaDefragmentator::VmaDefragmentator(
6864 VmaAllocator hAllocator,
6865 VmaBlockVector* pBlockVector,
6866 uint32_t currentFrameIndex) :
6867 m_hAllocator(hAllocator),
6868 m_pBlockVector(pBlockVector),
6869 m_CurrentFrameIndex(currentFrameIndex),
6871 m_AllocationsMoved(0),
6872 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
6873 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
6877 VmaDefragmentator::~VmaDefragmentator()
6879 for(
size_t i = m_Blocks.size(); i--; )
6881 vma_delete(m_hAllocator, m_Blocks[i]);
6885 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
6887 AllocationInfo allocInfo;
6888 allocInfo.m_hAllocation = hAlloc;
6889 allocInfo.m_pChanged = pChanged;
6890 m_Allocations.push_back(allocInfo);
6893 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData)
6896 if(m_pMappedDataForDefragmentation)
6898 *ppMappedData = m_pMappedDataForDefragmentation;
6903 if(m_pBlock->m_Mapping.GetMappedData())
6905 *ppMappedData = m_pBlock->m_Mapping.GetMappedData();
6910 VkResult res = m_pBlock->Map(hAllocator, 1, &m_pMappedDataForDefragmentation);
6911 *ppMappedData = m_pMappedDataForDefragmentation;
6915 void VmaDefragmentator::BlockInfo::Unmap(VmaAllocator hAllocator)
6917 if(m_pMappedDataForDefragmentation != VMA_NULL)
6919 m_pBlock->Unmap(hAllocator, 1);
6923 VkResult VmaDefragmentator::DefragmentRound(
6924 VkDeviceSize maxBytesToMove,
6925 uint32_t maxAllocationsToMove)
6927 if(m_Blocks.empty())
6932 size_t srcBlockIndex = m_Blocks.size() - 1;
6933 size_t srcAllocIndex = SIZE_MAX;
6939 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
6941 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
6944 if(srcBlockIndex == 0)
6951 srcAllocIndex = SIZE_MAX;
6956 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
6960 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
6961 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
6963 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
6964 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
6965 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
6966 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
6969 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
6971 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
6972 VmaAllocationRequest dstAllocRequest;
6973 if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
6974 m_CurrentFrameIndex,
6975 m_pBlockVector->GetFrameInUseCount(),
6976 m_pBlockVector->GetBufferImageGranularity(),
6981 &dstAllocRequest) &&
6983 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
6985 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
6988 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
6989 (m_BytesMoved + size > maxBytesToMove))
6991 return VK_INCOMPLETE;
6994 void* pDstMappedData = VMA_NULL;
6995 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
6996 if(res != VK_SUCCESS)
7001 void* pSrcMappedData = VMA_NULL;
7002 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
7003 if(res != VK_SUCCESS)
7010 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
7011 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
7012 static_cast<size_t>(size));
7014 pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
7015 pSrcBlockInfo->m_pBlock->m_Metadata.FreeAtOffset(srcOffset);
7017 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
7019 if(allocInfo.m_pChanged != VMA_NULL)
7021 *allocInfo.m_pChanged = VK_TRUE;
7024 ++m_AllocationsMoved;
7025 m_BytesMoved += size;
7027 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
7035 if(srcAllocIndex > 0)
7041 if(srcBlockIndex > 0)
7044 srcAllocIndex = SIZE_MAX;
7054 VkResult VmaDefragmentator::Defragment(
7055 VkDeviceSize maxBytesToMove,
7056 uint32_t maxAllocationsToMove)
7058 if(m_Allocations.empty())
7064 const size_t blockCount = m_pBlockVector->m_Blocks.size();
7065 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7067 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
7068 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
7069 m_Blocks.push_back(pBlockInfo);
7073 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
7076 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
7078 AllocationInfo& allocInfo = m_Allocations[blockIndex];
7080 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
7082 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
7083 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
7084 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
7086 (*it)->m_Allocations.push_back(allocInfo);
7094 m_Allocations.clear();
7096 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7098 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
7099 pBlockInfo->CalcHasNonMovableAllocations();
7100 pBlockInfo->SortAllocationsBySizeDescecnding();
7104 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
7107 VkResult result = VK_SUCCESS;
7108 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
7110 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
7114 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7116 m_Blocks[blockIndex]->Unmap(m_hAllocator);
7122 bool VmaDefragmentator::MoveMakesSense(
7123 size_t dstBlockIndex, VkDeviceSize dstOffset,
7124 size_t srcBlockIndex, VkDeviceSize srcOffset)
7126 if(dstBlockIndex < srcBlockIndex)
7130 if(dstBlockIndex > srcBlockIndex)
7134 if(dstOffset < srcOffset)
7147 m_hDevice(pCreateInfo->device),
7148 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
7149 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
7150 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
7151 m_PreferredLargeHeapBlockSize(0),
7152 m_PhysicalDevice(pCreateInfo->physicalDevice),
7153 m_CurrentFrameIndex(0),
7154 m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks()))
7158 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
7159 memset(&m_MemProps, 0,
sizeof(m_MemProps));
7160 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
7162 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
7163 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
7165 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
7167 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
7178 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
7179 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
7186 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
7188 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
7189 if(limit != VK_WHOLE_SIZE)
7191 m_HeapSizeLimit[heapIndex] = limit;
7192 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
7194 m_MemProps.memoryHeaps[heapIndex].size = limit;
7200 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7202 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
7204 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
7210 GetBufferImageGranularity(),
7215 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
7219 VmaAllocator_T::~VmaAllocator_T()
7221 VMA_ASSERT(m_Pools.empty());
7223 for(
size_t i = GetMemoryTypeCount(); i--; )
7225 vma_delete(
this, m_pDedicatedAllocations[i]);
7226 vma_delete(
this, m_pBlockVectors[i]);
7230 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
7232 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 7233 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
7234 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
7235 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
7236 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
7237 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
7238 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
7239 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
7240 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
7241 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
7242 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
7243 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
7244 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
7245 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
7246 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
7247 if(m_UseKhrDedicatedAllocation)
7249 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
7250 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
7251 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
7252 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
7254 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 7256 #define VMA_COPY_IF_NOT_NULL(funcName) \ 7257 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 7259 if(pVulkanFunctions != VMA_NULL)
7261 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
7262 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
7263 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
7264 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
7265 VMA_COPY_IF_NOT_NULL(vkMapMemory);
7266 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
7267 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
7268 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
7269 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
7270 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
7271 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
7272 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
7273 VMA_COPY_IF_NOT_NULL(vkCreateImage);
7274 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
7275 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
7276 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
7279 #undef VMA_COPY_IF_NOT_NULL 7283 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
7284 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
7285 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
7286 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
7287 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
7288 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
7289 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
7290 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
7291 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
7292 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
7293 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
7294 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
7295 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
7296 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
7297 if(m_UseKhrDedicatedAllocation)
7299 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
7300 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
7304 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
7306 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7307 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
7308 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
7309 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
7312 VkResult VmaAllocator_T::AllocateMemoryOfType(
7313 const VkMemoryRequirements& vkMemReq,
7314 bool dedicatedAllocation,
7315 VkBuffer dedicatedBuffer,
7316 VkImage dedicatedImage,
7318 uint32_t memTypeIndex,
7319 VmaSuballocationType suballocType,
7320 VmaAllocation* pAllocation)
7322 VMA_ASSERT(pAllocation != VMA_NULL);
7323 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
7329 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
7334 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
7335 VMA_ASSERT(blockVector);
7337 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
7338 bool preferDedicatedMemory =
7339 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
7340 dedicatedAllocation ||
7342 vkMemReq.size > preferredBlockSize / 2;
7344 if(preferDedicatedMemory &&
7346 finalCreateInfo.
pool == VK_NULL_HANDLE)
7355 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7359 return AllocateDedicatedMemory(
7373 VkResult res = blockVector->Allocate(
7375 m_CurrentFrameIndex.load(),
7380 if(res == VK_SUCCESS)
7388 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7392 res = AllocateDedicatedMemory(
7398 finalCreateInfo.pUserData,
7402 if(res == VK_SUCCESS)
7405 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
7411 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
7418 VkResult VmaAllocator_T::AllocateDedicatedMemory(
7420 VmaSuballocationType suballocType,
7421 uint32_t memTypeIndex,
7423 bool isUserDataString,
7425 VkBuffer dedicatedBuffer,
7426 VkImage dedicatedImage,
7427 VmaAllocation* pAllocation)
7429 VMA_ASSERT(pAllocation);
7431 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
7432 allocInfo.memoryTypeIndex = memTypeIndex;
7433 allocInfo.allocationSize = size;
7435 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
7436 if(m_UseKhrDedicatedAllocation)
7438 if(dedicatedBuffer != VK_NULL_HANDLE)
7440 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
7441 dedicatedAllocInfo.buffer = dedicatedBuffer;
7442 allocInfo.pNext = &dedicatedAllocInfo;
7444 else if(dedicatedImage != VK_NULL_HANDLE)
7446 dedicatedAllocInfo.image = dedicatedImage;
7447 allocInfo.pNext = &dedicatedAllocInfo;
7452 VkDeviceMemory hMemory = VK_NULL_HANDLE;
7453 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
7456 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
7460 void* pMappedData = VMA_NULL;
7463 res = (*m_VulkanFunctions.vkMapMemory)(
7472 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
7473 FreeVulkanMemory(memTypeIndex, size, hMemory);
7478 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
7479 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
7480 (*pAllocation)->SetUserData(
this, pUserData);
7484 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7485 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
7486 VMA_ASSERT(pDedicatedAllocations);
7487 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
7490 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
7495 void VmaAllocator_T::GetBufferMemoryRequirements(
7497 VkMemoryRequirements& memReq,
7498 bool& requiresDedicatedAllocation,
7499 bool& prefersDedicatedAllocation)
const 7501 if(m_UseKhrDedicatedAllocation)
7503 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
7504 memReqInfo.buffer = hBuffer;
7506 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7508 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7509 memReq2.pNext = &memDedicatedReq;
7511 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7513 memReq = memReq2.memoryRequirements;
7514 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7515 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7519 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
7520 requiresDedicatedAllocation =
false;
7521 prefersDedicatedAllocation =
false;
7525 void VmaAllocator_T::GetImageMemoryRequirements(
7527 VkMemoryRequirements& memReq,
7528 bool& requiresDedicatedAllocation,
7529 bool& prefersDedicatedAllocation)
const 7531 if(m_UseKhrDedicatedAllocation)
7533 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
7534 memReqInfo.image = hImage;
7536 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7538 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7539 memReq2.pNext = &memDedicatedReq;
7541 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7543 memReq = memReq2.memoryRequirements;
7544 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7545 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7549 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
7550 requiresDedicatedAllocation =
false;
7551 prefersDedicatedAllocation =
false;
7555 VkResult VmaAllocator_T::AllocateMemory(
7556 const VkMemoryRequirements& vkMemReq,
7557 bool requiresDedicatedAllocation,
7558 bool prefersDedicatedAllocation,
7559 VkBuffer dedicatedBuffer,
7560 VkImage dedicatedImage,
7562 VmaSuballocationType suballocType,
7563 VmaAllocation* pAllocation)
7568 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
7569 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7574 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
7575 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7577 if(requiresDedicatedAllocation)
7581 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
7582 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7584 if(createInfo.
pool != VK_NULL_HANDLE)
7586 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
7587 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7590 if((createInfo.
pool != VK_NULL_HANDLE) &&
7593 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
7594 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7597 if(createInfo.
pool != VK_NULL_HANDLE)
7599 return createInfo.
pool->m_BlockVector.Allocate(
7601 m_CurrentFrameIndex.load(),
7610 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
7611 uint32_t memTypeIndex = UINT32_MAX;
7613 if(res == VK_SUCCESS)
7615 res = AllocateMemoryOfType(
7617 requiresDedicatedAllocation || prefersDedicatedAllocation,
7625 if(res == VK_SUCCESS)
7635 memoryTypeBits &= ~(1u << memTypeIndex);
7638 if(res == VK_SUCCESS)
7640 res = AllocateMemoryOfType(
7642 requiresDedicatedAllocation || prefersDedicatedAllocation,
7650 if(res == VK_SUCCESS)
7660 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7671 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
7673 VMA_ASSERT(allocation);
7675 if(allocation->CanBecomeLost() ==
false ||
7676 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
7678 switch(allocation->GetType())
7680 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7682 VmaBlockVector* pBlockVector = VMA_NULL;
7683 VmaPool hPool = allocation->GetPool();
7684 if(hPool != VK_NULL_HANDLE)
7686 pBlockVector = &hPool->m_BlockVector;
7690 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
7691 pBlockVector = m_pBlockVectors[memTypeIndex];
7693 pBlockVector->Free(allocation);
7696 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7697 FreeDedicatedMemory(allocation);
7704 allocation->SetUserData(
this, VMA_NULL);
7705 vma_delete(
this, allocation);
7708 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
7711 InitStatInfo(pStats->
total);
7712 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
7714 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
7718 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7720 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
7721 VMA_ASSERT(pBlockVector);
7722 pBlockVector->AddStats(pStats);
7727 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7728 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
7730 m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
7735 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7737 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7738 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7739 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
7740 VMA_ASSERT(pDedicatedAllocVector);
7741 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
7744 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
7745 VmaAddStatInfo(pStats->
total, allocationStatInfo);
7746 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
7747 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
7752 VmaPostprocessCalcStatInfo(pStats->
total);
7753 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
7754 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
7755 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
7756 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
7759 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
7761 VkResult VmaAllocator_T::Defragment(
7762 VmaAllocation* pAllocations,
7763 size_t allocationCount,
7764 VkBool32* pAllocationsChanged,
7768 if(pAllocationsChanged != VMA_NULL)
7770 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
7772 if(pDefragmentationStats != VMA_NULL)
7774 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
7777 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
7779 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
7781 const size_t poolCount = m_Pools.size();
7784 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
7786 VmaAllocation hAlloc = pAllocations[allocIndex];
7788 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
7790 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
7792 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
7794 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
7796 VmaBlockVector* pAllocBlockVector = VMA_NULL;
7798 const VmaPool hAllocPool = hAlloc->GetPool();
7800 if(hAllocPool != VK_NULL_HANDLE)
7802 pAllocBlockVector = &hAllocPool->GetBlockVector();
7807 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
7810 VmaDefragmentator*
const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
7812 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
7813 &pAllocationsChanged[allocIndex] : VMA_NULL;
7814 pDefragmentator->AddAllocation(hAlloc, pChanged);
7818 VkResult result = VK_SUCCESS;
7822 VkDeviceSize maxBytesToMove = SIZE_MAX;
7823 uint32_t maxAllocationsToMove = UINT32_MAX;
7824 if(pDefragmentationInfo != VMA_NULL)
7831 for(uint32_t memTypeIndex = 0;
7832 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
7836 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7838 result = m_pBlockVectors[memTypeIndex]->Defragment(
7839 pDefragmentationStats,
7841 maxAllocationsToMove);
7846 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
7848 result = m_Pools[poolIndex]->GetBlockVector().Defragment(
7849 pDefragmentationStats,
7851 maxAllocationsToMove);
7857 for(
size_t poolIndex = poolCount; poolIndex--; )
7859 m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
7863 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
7865 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7867 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
7874 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo)
7876 if(hAllocation->CanBecomeLost())
7882 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
7883 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
7886 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7890 pAllocationInfo->
offset = 0;
7891 pAllocationInfo->
size = hAllocation->GetSize();
7893 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7896 else if(localLastUseFrameIndex == localCurrFrameIndex)
7898 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7899 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7900 pAllocationInfo->
offset = hAllocation->GetOffset();
7901 pAllocationInfo->
size = hAllocation->GetSize();
7903 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7908 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
7910 localLastUseFrameIndex = localCurrFrameIndex;
7917 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7918 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7919 pAllocationInfo->
offset = hAllocation->GetOffset();
7920 pAllocationInfo->
size = hAllocation->GetSize();
7921 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
7922 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7926 bool VmaAllocator_T::TouchAllocation(VmaAllocation hAllocation)
7929 if(hAllocation->CanBecomeLost())
7931 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
7932 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
7935 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7939 else if(localLastUseFrameIndex == localCurrFrameIndex)
7945 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
7947 localLastUseFrameIndex = localCurrFrameIndex;
7958 VkResult VmaAllocator_T::CreatePool(
const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
7960 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u", pCreateInfo->
memoryTypeIndex);
7973 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
7975 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
7976 if(res != VK_SUCCESS)
7978 vma_delete(
this, *pPool);
7985 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7986 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
7992 void VmaAllocator_T::DestroyPool(VmaPool pool)
7996 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7997 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
7998 VMA_ASSERT(success &&
"Pool not found in Allocator.");
8001 vma_delete(
this, pool);
8004 void VmaAllocator_T::GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats)
8006 pool->m_BlockVector.GetPoolStats(pPoolStats);
8009 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
8011 m_CurrentFrameIndex.store(frameIndex);
8014 void VmaAllocator_T::MakePoolAllocationsLost(
8016 size_t* pLostAllocationCount)
8018 hPool->m_BlockVector.MakePoolAllocationsLost(
8019 m_CurrentFrameIndex.load(),
8020 pLostAllocationCount);
8023 void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
8025 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
8026 (*pAllocation)->InitLost();
8029 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
8031 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
8034 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
8036 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
8037 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
8039 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
8040 if(res == VK_SUCCESS)
8042 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
8047 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
8052 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
8055 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
8057 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
8063 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
8065 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
8067 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
8070 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
8072 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
8073 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
8075 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
8076 m_HeapSizeLimit[heapIndex] += size;
8080 VkResult VmaAllocator_T::Map(VmaAllocation hAllocation,
void** ppData)
8082 if(hAllocation->CanBecomeLost())
8084 return VK_ERROR_MEMORY_MAP_FAILED;
8087 switch(hAllocation->GetType())
8089 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8091 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
8092 char *pBytes = VMA_NULL;
8093 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
8094 if(res == VK_SUCCESS)
8096 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
8097 hAllocation->BlockAllocMap();
8101 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8102 return hAllocation->DedicatedAllocMap(
this, ppData);
8105 return VK_ERROR_MEMORY_MAP_FAILED;
8109 void VmaAllocator_T::Unmap(VmaAllocation hAllocation)
8111 switch(hAllocation->GetType())
8113 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8115 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
8116 hAllocation->BlockAllocUnmap();
8117 pBlock->Unmap(
this, 1);
8120 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8121 hAllocation->DedicatedAllocUnmap(
this);
8128 void VmaAllocator_T::FreeDedicatedMemory(VmaAllocation allocation)
8130 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
8132 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
8134 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
8135 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
8136 VMA_ASSERT(pDedicatedAllocations);
8137 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
8138 VMA_ASSERT(success);
8141 VkDeviceMemory hMemory = allocation->GetMemory();
8143 if(allocation->GetMappedData() != VMA_NULL)
8145 (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
8148 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
8150 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
8153 #if VMA_STATS_STRING_ENABLED 8155 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
8157 bool dedicatedAllocationsStarted =
false;
8158 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8160 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
8161 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
8162 VMA_ASSERT(pDedicatedAllocVector);
8163 if(pDedicatedAllocVector->empty() ==
false)
8165 if(dedicatedAllocationsStarted ==
false)
8167 dedicatedAllocationsStarted =
true;
8168 json.WriteString(
"DedicatedAllocations");
8172 json.BeginString(
"Type ");
8173 json.ContinueString(memTypeIndex);
8178 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
8180 const VmaAllocation hAlloc = (*pDedicatedAllocVector)[i];
8181 json.BeginObject(
true);
8183 json.WriteString(
"Type");
8184 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
8186 json.WriteString(
"Size");
8187 json.WriteNumber(hAlloc->GetSize());
8189 const void* pUserData = hAlloc->GetUserData();
8190 if(pUserData != VMA_NULL)
8192 json.WriteString(
"UserData");
8193 if(hAlloc->IsUserDataString())
8195 json.WriteString((
const char*)pUserData);
8200 json.ContinueString_Pointer(pUserData);
8211 if(dedicatedAllocationsStarted)
8217 bool allocationsStarted =
false;
8218 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8220 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
8222 if(allocationsStarted ==
false)
8224 allocationsStarted =
true;
8225 json.WriteString(
"DefaultPools");
8229 json.BeginString(
"Type ");
8230 json.ContinueString(memTypeIndex);
8233 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
8236 if(allocationsStarted)
8243 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
8244 const size_t poolCount = m_Pools.size();
8247 json.WriteString(
"Pools");
8249 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
8251 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
8258 #endif // #if VMA_STATS_STRING_ENABLED 8260 static VkResult AllocateMemoryForImage(
8261 VmaAllocator allocator,
8264 VmaSuballocationType suballocType,
8265 VmaAllocation* pAllocation)
8267 VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
8269 VkMemoryRequirements vkMemReq = {};
8270 bool requiresDedicatedAllocation =
false;
8271 bool prefersDedicatedAllocation =
false;
8272 allocator->GetImageMemoryRequirements(image, vkMemReq,
8273 requiresDedicatedAllocation, prefersDedicatedAllocation);
8275 return allocator->AllocateMemory(
8277 requiresDedicatedAllocation,
8278 prefersDedicatedAllocation,
8281 *pAllocationCreateInfo,
8291 VmaAllocator* pAllocator)
8293 VMA_ASSERT(pCreateInfo && pAllocator);
8294 VMA_DEBUG_LOG(
"vmaCreateAllocator");
8300 VmaAllocator allocator)
8302 if(allocator != VK_NULL_HANDLE)
8304 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
8305 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
8306 vma_delete(&allocationCallbacks, allocator);
8311 VmaAllocator allocator,
8312 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
8314 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
8315 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
8319 VmaAllocator allocator,
8320 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
8322 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
8323 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
8327 VmaAllocator allocator,
8328 uint32_t memoryTypeIndex,
8329 VkMemoryPropertyFlags* pFlags)
8331 VMA_ASSERT(allocator && pFlags);
8332 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
8333 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
8337 VmaAllocator allocator,
8338 uint32_t frameIndex)
8340 VMA_ASSERT(allocator);
8341 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
8343 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8345 allocator->SetCurrentFrameIndex(frameIndex);
8349 VmaAllocator allocator,
8352 VMA_ASSERT(allocator && pStats);
8353 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8354 allocator->CalculateStats(pStats);
8357 #if VMA_STATS_STRING_ENABLED 8360 VmaAllocator allocator,
8361 char** ppStatsString,
8362 VkBool32 detailedMap)
8364 VMA_ASSERT(allocator && ppStatsString);
8365 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8367 VmaStringBuilder sb(allocator);
8369 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
8373 allocator->CalculateStats(&stats);
8375 json.WriteString(
"Total");
8376 VmaPrintStatInfo(json, stats.
total);
8378 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
8380 json.BeginString(
"Heap ");
8381 json.ContinueString(heapIndex);
8385 json.WriteString(
"Size");
8386 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
8388 json.WriteString(
"Flags");
8389 json.BeginArray(
true);
8390 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
8392 json.WriteString(
"DEVICE_LOCAL");
8398 json.WriteString(
"Stats");
8399 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
8402 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
8404 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
8406 json.BeginString(
"Type ");
8407 json.ContinueString(typeIndex);
8412 json.WriteString(
"Flags");
8413 json.BeginArray(
true);
8414 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
8415 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
8417 json.WriteString(
"DEVICE_LOCAL");
8419 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
8421 json.WriteString(
"HOST_VISIBLE");
8423 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
8425 json.WriteString(
"HOST_COHERENT");
8427 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
8429 json.WriteString(
"HOST_CACHED");
8431 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
8433 json.WriteString(
"LAZILY_ALLOCATED");
8439 json.WriteString(
"Stats");
8440 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
8449 if(detailedMap == VK_TRUE)
8451 allocator->PrintDetailedMap(json);
8457 const size_t len = sb.GetLength();
8458 char*
const pChars = vma_new_array(allocator,
char, len + 1);
8461 memcpy(pChars, sb.GetData(), len);
8464 *ppStatsString = pChars;
8468 VmaAllocator allocator,
8471 if(pStatsString != VMA_NULL)
8473 VMA_ASSERT(allocator);
8474 size_t len = strlen(pStatsString);
8475 vma_delete_array(allocator, pStatsString, len + 1);
8479 #endif // #if VMA_STATS_STRING_ENABLED 8485 VmaAllocator allocator,
8486 uint32_t memoryTypeBits,
8488 uint32_t* pMemoryTypeIndex)
8490 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8491 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8492 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8499 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
8503 switch(pAllocationCreateInfo->
usage)
8508 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8511 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
8514 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8515 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8518 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8519 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
8525 *pMemoryTypeIndex = UINT32_MAX;
8526 uint32_t minCost = UINT32_MAX;
8527 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
8528 memTypeIndex < allocator->GetMemoryTypeCount();
8529 ++memTypeIndex, memTypeBit <<= 1)
8532 if((memTypeBit & memoryTypeBits) != 0)
8534 const VkMemoryPropertyFlags currFlags =
8535 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
8537 if((requiredFlags & ~currFlags) == 0)
8540 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
8542 if(currCost < minCost)
8544 *pMemoryTypeIndex = memTypeIndex;
8554 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
8558 VmaAllocator allocator,
8559 const VkBufferCreateInfo* pBufferCreateInfo,
8561 uint32_t* pMemoryTypeIndex)
8563 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8564 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
8565 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8566 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8568 const VkDevice hDev = allocator->m_hDevice;
8569 VkBuffer hBuffer = VK_NULL_HANDLE;
8570 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
8571 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
8572 if(res == VK_SUCCESS)
8574 VkMemoryRequirements memReq = {};
8575 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
8576 hDev, hBuffer, &memReq);
8580 memReq.memoryTypeBits,
8581 pAllocationCreateInfo,
8584 allocator->GetVulkanFunctions().vkDestroyBuffer(
8585 hDev, hBuffer, allocator->GetAllocationCallbacks());
8591 VmaAllocator allocator,
8592 const VkImageCreateInfo* pImageCreateInfo,
8594 uint32_t* pMemoryTypeIndex)
8596 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8597 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
8598 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8599 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8601 const VkDevice hDev = allocator->m_hDevice;
8602 VkImage hImage = VK_NULL_HANDLE;
8603 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
8604 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
8605 if(res == VK_SUCCESS)
8607 VkMemoryRequirements memReq = {};
8608 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
8609 hDev, hImage, &memReq);
8613 memReq.memoryTypeBits,
8614 pAllocationCreateInfo,
8617 allocator->GetVulkanFunctions().vkDestroyImage(
8618 hDev, hImage, allocator->GetAllocationCallbacks());
8624 VmaAllocator allocator,
8628 VMA_ASSERT(allocator && pCreateInfo && pPool);
8630 VMA_DEBUG_LOG(
"vmaCreatePool");
8632 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8634 return allocator->CreatePool(pCreateInfo, pPool);
8638 VmaAllocator allocator,
8641 VMA_ASSERT(allocator);
8643 if(pool == VK_NULL_HANDLE)
8648 VMA_DEBUG_LOG(
"vmaDestroyPool");
8650 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8652 allocator->DestroyPool(pool);
8656 VmaAllocator allocator,
8660 VMA_ASSERT(allocator && pool && pPoolStats);
8662 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8664 allocator->GetPoolStats(pool, pPoolStats);
8668 VmaAllocator allocator,
8670 size_t* pLostAllocationCount)
8672 VMA_ASSERT(allocator && pool);
8674 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8676 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
8680 VmaAllocator allocator,
8681 const VkMemoryRequirements* pVkMemoryRequirements,
8683 VmaAllocation* pAllocation,
8686 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
8688 VMA_DEBUG_LOG(
"vmaAllocateMemory");
8690 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8692 VkResult result = allocator->AllocateMemory(
8693 *pVkMemoryRequirements,
8699 VMA_SUBALLOCATION_TYPE_UNKNOWN,
8702 if(pAllocationInfo && result == VK_SUCCESS)
8704 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8711 VmaAllocator allocator,
8714 VmaAllocation* pAllocation,
8717 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
8719 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
8721 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8723 VkMemoryRequirements vkMemReq = {};
8724 bool requiresDedicatedAllocation =
false;
8725 bool prefersDedicatedAllocation =
false;
8726 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
8727 requiresDedicatedAllocation,
8728 prefersDedicatedAllocation);
8730 VkResult result = allocator->AllocateMemory(
8732 requiresDedicatedAllocation,
8733 prefersDedicatedAllocation,
8737 VMA_SUBALLOCATION_TYPE_BUFFER,
8740 if(pAllocationInfo && result == VK_SUCCESS)
8742 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8749 VmaAllocator allocator,
8752 VmaAllocation* pAllocation,
8755 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
8757 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
8759 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8761 VkResult result = AllocateMemoryForImage(
8765 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
8768 if(pAllocationInfo && result == VK_SUCCESS)
8770 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8777 VmaAllocator allocator,
8778 VmaAllocation allocation)
8780 VMA_ASSERT(allocator && allocation);
8782 VMA_DEBUG_LOG(
"vmaFreeMemory");
8784 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8786 allocator->FreeMemory(allocation);
8790 VmaAllocator allocator,
8791 VmaAllocation allocation,
8794 VMA_ASSERT(allocator && allocation && pAllocationInfo);
8796 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8798 allocator->GetAllocationInfo(allocation, pAllocationInfo);
8802 VmaAllocator allocator,
8803 VmaAllocation allocation)
8805 VMA_ASSERT(allocator && allocation);
8807 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8809 return allocator->TouchAllocation(allocation);
8813 VmaAllocator allocator,
8814 VmaAllocation allocation,
8817 VMA_ASSERT(allocator && allocation);
8819 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8821 allocation->SetUserData(allocator, pUserData);
8825 VmaAllocator allocator,
8826 VmaAllocation* pAllocation)
8828 VMA_ASSERT(allocator && pAllocation);
8830 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
8832 allocator->CreateLostAllocation(pAllocation);
8836 VmaAllocator allocator,
8837 VmaAllocation allocation,
8840 VMA_ASSERT(allocator && allocation && ppData);
8842 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8844 return allocator->Map(allocation, ppData);
8848 VmaAllocator allocator,
8849 VmaAllocation allocation)
8851 VMA_ASSERT(allocator && allocation);
8853 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8855 allocator->Unmap(allocation);
8859 VmaAllocator allocator,
8860 VmaAllocation* pAllocations,
8861 size_t allocationCount,
8862 VkBool32* pAllocationsChanged,
8866 VMA_ASSERT(allocator && pAllocations);
8868 VMA_DEBUG_LOG(
"vmaDefragment");
8870 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8872 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
8876 VmaAllocator allocator,
8877 const VkBufferCreateInfo* pBufferCreateInfo,
8880 VmaAllocation* pAllocation,
8883 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
8885 VMA_DEBUG_LOG(
"vmaCreateBuffer");
8887 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8889 *pBuffer = VK_NULL_HANDLE;
8890 *pAllocation = VK_NULL_HANDLE;
8893 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
8894 allocator->m_hDevice,
8896 allocator->GetAllocationCallbacks(),
8901 VkMemoryRequirements vkMemReq = {};
8902 bool requiresDedicatedAllocation =
false;
8903 bool prefersDedicatedAllocation =
false;
8904 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
8905 requiresDedicatedAllocation, prefersDedicatedAllocation);
8909 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
8911 VMA_ASSERT(vkMemReq.alignment %
8912 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
8914 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
8916 VMA_ASSERT(vkMemReq.alignment %
8917 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
8919 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
8921 VMA_ASSERT(vkMemReq.alignment %
8922 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
8926 res = allocator->AllocateMemory(
8928 requiresDedicatedAllocation,
8929 prefersDedicatedAllocation,
8932 *pAllocationCreateInfo,
8933 VMA_SUBALLOCATION_TYPE_BUFFER,
8938 res = (*allocator->GetVulkanFunctions().vkBindBufferMemory)(
8939 allocator->m_hDevice,
8941 (*pAllocation)->GetMemory(),
8942 (*pAllocation)->GetOffset());
8946 if(pAllocationInfo != VMA_NULL)
8948 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8952 allocator->FreeMemory(*pAllocation);
8953 *pAllocation = VK_NULL_HANDLE;
8954 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
8955 *pBuffer = VK_NULL_HANDLE;
8958 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
8959 *pBuffer = VK_NULL_HANDLE;
8966 VmaAllocator allocator,
8968 VmaAllocation allocation)
8970 if(buffer != VK_NULL_HANDLE)
8972 VMA_ASSERT(allocator);
8974 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
8976 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8978 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
8980 allocator->FreeMemory(allocation);
8985 VmaAllocator allocator,
8986 const VkImageCreateInfo* pImageCreateInfo,
8989 VmaAllocation* pAllocation,
8992 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
8994 VMA_DEBUG_LOG(
"vmaCreateImage");
8996 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8998 *pImage = VK_NULL_HANDLE;
8999 *pAllocation = VK_NULL_HANDLE;
9002 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
9003 allocator->m_hDevice,
9005 allocator->GetAllocationCallbacks(),
9009 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
9010 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
9011 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
9014 res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
9018 res = (*allocator->GetVulkanFunctions().vkBindImageMemory)(
9019 allocator->m_hDevice,
9021 (*pAllocation)->GetMemory(),
9022 (*pAllocation)->GetOffset());
9026 if(pAllocationInfo != VMA_NULL)
9028 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
9032 allocator->FreeMemory(*pAllocation);
9033 *pAllocation = VK_NULL_HANDLE;
9034 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
9035 *pImage = VK_NULL_HANDLE;
9038 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
9039 *pImage = VK_NULL_HANDLE;
9046 VmaAllocator allocator,
9048 VmaAllocation allocation)
9050 if(image != VK_NULL_HANDLE)
9052 VMA_ASSERT(allocator);
9054 VMA_DEBUG_LOG(
"vmaDestroyImage");
9056 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9058 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
9060 allocator->FreeMemory(allocation);
9064 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1005
+
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1259
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
-
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1029
+
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1030
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
-
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1014
+
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1015
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
-
Definition: vk_mem_alloc.h:1215
-
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1008
-
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1564
-
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1026
-
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1763
-
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1434
+
Definition: vk_mem_alloc.h:1216
+
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1009
+
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1565
+
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1027
+
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1764
+
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1435
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
-
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:1488
-
Definition: vk_mem_alloc.h:1295
-
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:997
-
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1333
-
Definition: vk_mem_alloc.h:1242
-
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1038
+
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:1489
+
Definition: vk_mem_alloc.h:1296
+
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:998
+
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1334
+
Definition: vk_mem_alloc.h:1243
+
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1039
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
-
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1091
-
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1023
+
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1092
+
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1024
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
-
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1246
+
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1247
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
-
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1156
-
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1011
-
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1155
-
PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR
Definition: vk_mem_alloc.h:1019
-
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1767
+
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1157
+
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1012
+
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1156
+
PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR
Definition: vk_mem_alloc.h:1020
+
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1768
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1055
-
VmaStatInfo total
Definition: vk_mem_alloc.h:1165
-
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1775
-
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1317
-
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1758
-
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1012
-
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:939
-
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1032
-
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1442
-
Definition: vk_mem_alloc.h:1436
-
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1574
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1056
+
VmaStatInfo total
Definition: vk_mem_alloc.h:1166
+
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1776
+
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1318
+
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1759
+
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1013
+
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:940
+
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1033
+
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1443
+
Definition: vk_mem_alloc.h:1437
+
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1575
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
-
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1009
-
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1354
-
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1458
-
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:1494
+
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1010
+
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1355
+
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1459
+
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:1495
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
-
Definition: vk_mem_alloc.h:995
-
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1445
+
Definition: vk_mem_alloc.h:996
+
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1446
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
-
VmaMemoryUsage
Definition: vk_mem_alloc.h:1193
+
VmaMemoryUsage
Definition: vk_mem_alloc.h:1194
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
-
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1753
+
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1754
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
-
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1771
-
Definition: vk_mem_alloc.h:1232
-
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1341
-
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1010
+
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1772
+
Definition: vk_mem_alloc.h:1233
+
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1342
+
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1011
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
-
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1161
-
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:945
+
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1162
+
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:946
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
-
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:966
+
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:967
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
-
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:971
-
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1773
+
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:972
+
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1774
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
-
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1328
-
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:1504
+
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1329
+
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:1505
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
-
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1005
-
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1144
-
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1453
-
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:958
+
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1006
+
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1145
+
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1454
+
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:959
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
Definition: vk_mem_alloc.h:1302
-
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1157
-
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:962
-
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1448
-
Definition: vk_mem_alloc.h:1241
+
Definition: vk_mem_alloc.h:1303
+
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1158
+
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:963
+
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1449
+
Definition: vk_mem_alloc.h:1242
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
-
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1323
-
Definition: vk_mem_alloc.h:1314
+
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1324
+
Definition: vk_mem_alloc.h:1315
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
-
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1147
-
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1007
-
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:1466
-
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1041
-
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1497
-
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1312
-
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1347
+
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1148
+
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1008
+
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:1467
+
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1042
+
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1498
+
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1313
+
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1348
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
-
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1079
-
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1163
-
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1282
-
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1156
+
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1080
+
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1164
+
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1283
+
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1157
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
-
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1016
-
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:960
-
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1015
+
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1017
+
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:961
+
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1016
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1480
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1481
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
-
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1588
-
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:1035
-
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1156
-
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1153
+
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1589
+
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:1036
+
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1157
+
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1154
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
-
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:1485
-
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:1569
-
Definition: vk_mem_alloc.h:1310
-
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1769
-
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1003
+
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:1486
+
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:1570
+
Definition: vk_mem_alloc.h:1311
+
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1770
+
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1004
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
-
PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR
Definition: vk_mem_alloc.h:1018
-
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1151
-
Definition: vk_mem_alloc.h:1198
-
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1438
+
PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR
Definition: vk_mem_alloc.h:1019
+
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1152
+
Definition: vk_mem_alloc.h:1199
+
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1439
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
-
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1149
-
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1013
-
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1017
-
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1269
-
Definition: vk_mem_alloc.h:1225
-
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:1583
+
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1150
+
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1014
+
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1018
+
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1270
+
Definition: vk_mem_alloc.h:1226
+
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:1584
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
-
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:993
+
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:994
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
-
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1006
-
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1550
+
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1007
+
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1551
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1416
-
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1157
+
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1417
+
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1158
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame...
-
Definition: vk_mem_alloc.h:1308
-
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1164
+
Definition: vk_mem_alloc.h:1309
+
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1165
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
-
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1491
-
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1157
-
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1555
+
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1492
+
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1158
+
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1556