23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 1484 #ifndef VMA_RECORDING_ENABLED 1486 #define VMA_RECORDING_ENABLED 1 1488 #define VMA_RECORDING_ENABLED 0 1493 #define NOMINMAX // For windows.h 1497 #include <vulkan/vulkan.h> 1500 #if VMA_RECORDING_ENABLED 1501 #include <windows.h> 1504 #if !defined(VMA_DEDICATED_ALLOCATION) 1505 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation 1506 #define VMA_DEDICATED_ALLOCATION 1 1508 #define VMA_DEDICATED_ALLOCATION 0 1526 uint32_t memoryType,
1527 VkDeviceMemory memory,
1532 uint32_t memoryType,
1533 VkDeviceMemory memory,
1605 #if VMA_DEDICATED_ALLOCATION 1606 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1607 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1734 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1742 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1752 uint32_t memoryTypeIndex,
1753 VkMemoryPropertyFlags* pFlags);
1765 uint32_t frameIndex);
1798 #define VMA_STATS_STRING_ENABLED 1 1800 #if VMA_STATS_STRING_ENABLED 1807 char** ppStatsString,
1808 VkBool32 detailedMap);
1812 char* pStatsString);
1814 #endif // #if VMA_STATS_STRING_ENABLED 2046 uint32_t memoryTypeBits,
2048 uint32_t* pMemoryTypeIndex);
2064 const VkBufferCreateInfo* pBufferCreateInfo,
2066 uint32_t* pMemoryTypeIndex);
2082 const VkImageCreateInfo* pImageCreateInfo,
2084 uint32_t* pMemoryTypeIndex);
2256 size_t* pLostAllocationCount);
2355 const VkMemoryRequirements* pVkMemoryRequirements,
2409 VkDeviceSize newSize);
2642 size_t allocationCount,
2643 VkBool32* pAllocationsChanged,
2709 const VkBufferCreateInfo* pBufferCreateInfo,
2734 const VkImageCreateInfo* pImageCreateInfo,
2760 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 2763 #if defined(__cplusplus) && defined(__INTELLISENSE__) 2764 #define VMA_IMPLEMENTATION 2767 #ifdef VMA_IMPLEMENTATION 2768 #undef VMA_IMPLEMENTATION 2790 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 2791 #define VMA_STATIC_VULKAN_FUNCTIONS 1 2803 #if VMA_USE_STL_CONTAINERS 2804 #define VMA_USE_STL_VECTOR 1 2805 #define VMA_USE_STL_UNORDERED_MAP 1 2806 #define VMA_USE_STL_LIST 1 2809 #if VMA_USE_STL_VECTOR 2813 #if VMA_USE_STL_UNORDERED_MAP 2814 #include <unordered_map> 2817 #if VMA_USE_STL_LIST 2826 #include <algorithm> 2832 #define VMA_NULL nullptr 2835 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16) 2837 void *aligned_alloc(
size_t alignment,
size_t size)
2840 if(alignment <
sizeof(
void*))
2842 alignment =
sizeof(
void*);
2845 return memalign(alignment, size);
2847 #elif defined(__APPLE__) || defined(__ANDROID__) 2849 void *aligned_alloc(
size_t alignment,
size_t size)
2852 if(alignment <
sizeof(
void*))
2854 alignment =
sizeof(
void*);
2858 if(posix_memalign(&pointer, alignment, size) == 0)
2872 #define VMA_ASSERT(expr) assert(expr) 2874 #define VMA_ASSERT(expr) 2880 #ifndef VMA_HEAVY_ASSERT 2882 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 2884 #define VMA_HEAVY_ASSERT(expr) 2888 #ifndef VMA_ALIGN_OF 2889 #define VMA_ALIGN_OF(type) (__alignof(type)) 2892 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 2894 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 2896 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 2900 #ifndef VMA_SYSTEM_FREE 2902 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 2904 #define VMA_SYSTEM_FREE(ptr) free(ptr) 2909 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 2913 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 2917 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 2921 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 2924 #ifndef VMA_DEBUG_LOG 2925 #define VMA_DEBUG_LOG(format, ...) 2935 #if VMA_STATS_STRING_ENABLED 2936 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
2938 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
2940 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
2942 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
2944 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
2946 snprintf(outStr, strLen,
"%p", ptr);
2956 void Lock() { m_Mutex.lock(); }
2957 void Unlock() { m_Mutex.unlock(); }
2961 #define VMA_MUTEX VmaMutex 2972 #ifndef VMA_ATOMIC_UINT32 2973 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 2976 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 2981 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 2984 #ifndef VMA_DEBUG_ALIGNMENT 2989 #define VMA_DEBUG_ALIGNMENT (1) 2992 #ifndef VMA_DEBUG_MARGIN 2997 #define VMA_DEBUG_MARGIN (0) 3000 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS 3005 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0) 3008 #ifndef VMA_DEBUG_DETECT_CORRUPTION 3014 #define VMA_DEBUG_DETECT_CORRUPTION (0) 3017 #ifndef VMA_DEBUG_GLOBAL_MUTEX 3022 #define VMA_DEBUG_GLOBAL_MUTEX (0) 3025 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 3030 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 3033 #ifndef VMA_SMALL_HEAP_MAX_SIZE 3034 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 3038 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 3039 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 3043 #ifndef VMA_CLASS_NO_COPY 3044 #define VMA_CLASS_NO_COPY(className) \ 3046 className(const className&) = delete; \ 3047 className& operator=(const className&) = delete; 3050 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
3053 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
3055 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
3056 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
3062 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
3063 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
3066 static inline uint32_t VmaCountBitsSet(uint32_t v)
3068 uint32_t c = v - ((v >> 1) & 0x55555555);
3069 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
3070 c = ((c >> 4) + c) & 0x0F0F0F0F;
3071 c = ((c >> 8) + c) & 0x00FF00FF;
3072 c = ((c >> 16) + c) & 0x0000FFFF;
3078 template <
typename T>
3079 static inline T VmaAlignUp(T val, T align)
3081 return (val + align - 1) / align * align;
3085 template <
typename T>
3086 static inline T VmaAlignDown(T val, T align)
3088 return val / align * align;
3092 template <
typename T>
3093 static inline T VmaRoundDiv(T x, T y)
3095 return (x + (y / (T)2)) / y;
3103 template <
typename T>
3104 inline bool VmaIsPow2(T x)
3106 return (x & (x-1)) == 0;
3110 static inline uint32_t VmaNextPow2(uint32_t v)
3121 static inline uint64_t VmaNextPow2(uint64_t v)
3135 static inline uint32_t VmaPrevPow2(uint32_t v)
3145 static inline uint64_t VmaPrevPow2(uint64_t v)
3157 static inline bool VmaStrIsEmpty(
const char* pStr)
3159 return pStr == VMA_NULL || *pStr ==
'\0';
3162 static const char* VmaAlgorithmToStr(uint32_t algorithm)
3180 template<
typename Iterator,
typename Compare>
3181 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
3183 Iterator centerValue = end; --centerValue;
3184 Iterator insertIndex = beg;
3185 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
3187 if(cmp(*memTypeIndex, *centerValue))
3189 if(insertIndex != memTypeIndex)
3191 VMA_SWAP(*memTypeIndex, *insertIndex);
3196 if(insertIndex != centerValue)
3198 VMA_SWAP(*insertIndex, *centerValue);
3203 template<
typename Iterator,
typename Compare>
3204 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
3208 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
3209 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
3210 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
3214 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 3216 #endif // #ifndef VMA_SORT 3225 static inline bool VmaBlocksOnSamePage(
3226 VkDeviceSize resourceAOffset,
3227 VkDeviceSize resourceASize,
3228 VkDeviceSize resourceBOffset,
3229 VkDeviceSize pageSize)
3231 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
3232 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
3233 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
3234 VkDeviceSize resourceBStart = resourceBOffset;
3235 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
3236 return resourceAEndPage == resourceBStartPage;
3239 enum VmaSuballocationType
3241 VMA_SUBALLOCATION_TYPE_FREE = 0,
3242 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
3243 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
3244 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
3245 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
3246 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
3247 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
3256 static inline bool VmaIsBufferImageGranularityConflict(
3257 VmaSuballocationType suballocType1,
3258 VmaSuballocationType suballocType2)
3260 if(suballocType1 > suballocType2)
3262 VMA_SWAP(suballocType1, suballocType2);
3265 switch(suballocType1)
3267 case VMA_SUBALLOCATION_TYPE_FREE:
3269 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
3271 case VMA_SUBALLOCATION_TYPE_BUFFER:
3273 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3274 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3275 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
3277 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3278 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
3279 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3280 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
3282 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3283 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
3291 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
3293 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
3294 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3295 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
3297 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
3301 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
3303 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
3304 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3305 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
3307 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
3318 VMA_CLASS_NO_COPY(VmaMutexLock)
3320 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
3321 m_pMutex(useMutex ? &mutex : VMA_NULL)
3338 VMA_MUTEX* m_pMutex;
3341 #if VMA_DEBUG_GLOBAL_MUTEX 3342 static VMA_MUTEX gDebugGlobalMutex;
3343 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 3345 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 3349 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
3360 template <
typename CmpLess,
typename IterT,
typename KeyT>
3361 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpLess cmp)
3363 size_t down = 0, up = (end - beg);
3366 const size_t mid = (down + up) / 2;
3367 if(cmp(*(beg+mid), key))
3382 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
3384 if((pAllocationCallbacks != VMA_NULL) &&
3385 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
3387 return (*pAllocationCallbacks->pfnAllocation)(
3388 pAllocationCallbacks->pUserData,
3391 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
3395 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
3399 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
3401 if((pAllocationCallbacks != VMA_NULL) &&
3402 (pAllocationCallbacks->pfnFree != VMA_NULL))
3404 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
3408 VMA_SYSTEM_FREE(ptr);
3412 template<
typename T>
3413 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
3415 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
3418 template<
typename T>
3419 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
3421 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
3424 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 3426 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 3428 template<
typename T>
3429 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
3432 VmaFree(pAllocationCallbacks, ptr);
3435 template<
typename T>
3436 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
3440 for(
size_t i = count; i--; )
3444 VmaFree(pAllocationCallbacks, ptr);
3449 template<
typename T>
3450 class VmaStlAllocator
3453 const VkAllocationCallbacks*
const m_pCallbacks;
3454 typedef T value_type;
3456 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
3457 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
3459 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
3460 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
3462 template<
typename U>
3463 bool operator==(
const VmaStlAllocator<U>& rhs)
const 3465 return m_pCallbacks == rhs.m_pCallbacks;
3467 template<
typename U>
3468 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 3470 return m_pCallbacks != rhs.m_pCallbacks;
3473 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
3476 #if VMA_USE_STL_VECTOR 3478 #define VmaVector std::vector 3480 template<
typename T,
typename allocatorT>
3481 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
3483 vec.insert(vec.begin() + index, item);
3486 template<
typename T,
typename allocatorT>
3487 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
3489 vec.erase(vec.begin() + index);
3492 #else // #if VMA_USE_STL_VECTOR 3497 template<
typename T,
typename AllocatorT>
3501 typedef T value_type;
3503 VmaVector(
const AllocatorT& allocator) :
3504 m_Allocator(allocator),
3511 VmaVector(
size_t count,
const AllocatorT& allocator) :
3512 m_Allocator(allocator),
3513 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
3519 VmaVector(
const VmaVector<T, AllocatorT>& src) :
3520 m_Allocator(src.m_Allocator),
3521 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
3522 m_Count(src.m_Count),
3523 m_Capacity(src.m_Count)
3527 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
3533 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3536 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
3540 resize(rhs.m_Count);
3543 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
3549 bool empty()
const {
return m_Count == 0; }
3550 size_t size()
const {
return m_Count; }
3551 T* data() {
return m_pArray; }
3552 const T* data()
const {
return m_pArray; }
3554 T& operator[](
size_t index)
3556 VMA_HEAVY_ASSERT(index < m_Count);
3557 return m_pArray[index];
3559 const T& operator[](
size_t index)
const 3561 VMA_HEAVY_ASSERT(index < m_Count);
3562 return m_pArray[index];
3567 VMA_HEAVY_ASSERT(m_Count > 0);
3570 const T& front()
const 3572 VMA_HEAVY_ASSERT(m_Count > 0);
3577 VMA_HEAVY_ASSERT(m_Count > 0);
3578 return m_pArray[m_Count - 1];
3580 const T& back()
const 3582 VMA_HEAVY_ASSERT(m_Count > 0);
3583 return m_pArray[m_Count - 1];
3586 void reserve(
size_t newCapacity,
bool freeMemory =
false)
3588 newCapacity = VMA_MAX(newCapacity, m_Count);
3590 if((newCapacity < m_Capacity) && !freeMemory)
3592 newCapacity = m_Capacity;
3595 if(newCapacity != m_Capacity)
3597 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
3600 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
3602 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3603 m_Capacity = newCapacity;
3604 m_pArray = newArray;
3608 void resize(
size_t newCount,
bool freeMemory =
false)
3610 size_t newCapacity = m_Capacity;
3611 if(newCount > m_Capacity)
3613 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
3617 newCapacity = newCount;
3620 if(newCapacity != m_Capacity)
3622 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
3623 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
3624 if(elementsToCopy != 0)
3626 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
3628 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3629 m_Capacity = newCapacity;
3630 m_pArray = newArray;
3636 void clear(
bool freeMemory =
false)
3638 resize(0, freeMemory);
3641 void insert(
size_t index,
const T& src)
3643 VMA_HEAVY_ASSERT(index <= m_Count);
3644 const size_t oldCount = size();
3645 resize(oldCount + 1);
3646 if(index < oldCount)
3648 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
3650 m_pArray[index] = src;
3653 void remove(
size_t index)
3655 VMA_HEAVY_ASSERT(index < m_Count);
3656 const size_t oldCount = size();
3657 if(index < oldCount - 1)
3659 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
3661 resize(oldCount - 1);
3664 void push_back(
const T& src)
3666 const size_t newIndex = size();
3667 resize(newIndex + 1);
3668 m_pArray[newIndex] = src;
3673 VMA_HEAVY_ASSERT(m_Count > 0);
3677 void push_front(
const T& src)
3684 VMA_HEAVY_ASSERT(m_Count > 0);
3688 typedef T* iterator;
3690 iterator begin() {
return m_pArray; }
3691 iterator end() {
return m_pArray + m_Count; }
3694 AllocatorT m_Allocator;
3700 template<
typename T,
typename allocatorT>
3701 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
3703 vec.insert(index, item);
3706 template<
typename T,
typename allocatorT>
3707 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
3712 #endif // #if VMA_USE_STL_VECTOR 3714 template<
typename CmpLess,
typename VectorT>
3715 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
3717 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3719 vector.data() + vector.size(),
3721 CmpLess()) - vector.data();
3722 VmaVectorInsert(vector, indexToInsert, value);
3723 return indexToInsert;
3726 template<
typename CmpLess,
typename VectorT>
3727 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
3730 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
3735 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
3737 size_t indexToRemove = it - vector.begin();
3738 VmaVectorRemove(vector, indexToRemove);
3744 template<
typename CmpLess,
typename IterT,
typename KeyT>
3745 IterT VmaVectorFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value)
3748 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
3749 beg, end, value, comparator);
3751 (!comparator(*it, value) && !comparator(value, *it)))
3766 template<
typename T>
3767 class VmaPoolAllocator
3769 VMA_CLASS_NO_COPY(VmaPoolAllocator)
3771 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
3772 ~VmaPoolAllocator();
3780 uint32_t NextFreeIndex;
3787 uint32_t FirstFreeIndex;
3790 const VkAllocationCallbacks* m_pAllocationCallbacks;
3791 size_t m_ItemsPerBlock;
3792 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
3794 ItemBlock& CreateNewBlock();
3797 template<
typename T>
3798 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
3799 m_pAllocationCallbacks(pAllocationCallbacks),
3800 m_ItemsPerBlock(itemsPerBlock),
3801 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
3803 VMA_ASSERT(itemsPerBlock > 0);
3806 template<
typename T>
3807 VmaPoolAllocator<T>::~VmaPoolAllocator()
3812 template<
typename T>
3813 void VmaPoolAllocator<T>::Clear()
3815 for(
size_t i = m_ItemBlocks.size(); i--; )
3816 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
3817 m_ItemBlocks.clear();
3820 template<
typename T>
3821 T* VmaPoolAllocator<T>::Alloc()
3823 for(
size_t i = m_ItemBlocks.size(); i--; )
3825 ItemBlock& block = m_ItemBlocks[i];
3827 if(block.FirstFreeIndex != UINT32_MAX)
3829 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
3830 block.FirstFreeIndex = pItem->NextFreeIndex;
3831 return &pItem->Value;
3836 ItemBlock& newBlock = CreateNewBlock();
3837 Item*
const pItem = &newBlock.pItems[0];
3838 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
3839 return &pItem->Value;
3842 template<
typename T>
3843 void VmaPoolAllocator<T>::Free(T* ptr)
3846 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
3848 ItemBlock& block = m_ItemBlocks[i];
3852 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
3855 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
3857 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
3858 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
3859 block.FirstFreeIndex = index;
3863 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
3866 template<
typename T>
3867 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
3869 ItemBlock newBlock = {
3870 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
3872 m_ItemBlocks.push_back(newBlock);
3875 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
3876 newBlock.pItems[i].NextFreeIndex = i + 1;
3877 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
3878 return m_ItemBlocks.back();
3884 #if VMA_USE_STL_LIST 3886 #define VmaList std::list 3888 #else // #if VMA_USE_STL_LIST 3890 template<
typename T>
3899 template<
typename T>
3902 VMA_CLASS_NO_COPY(VmaRawList)
3904 typedef VmaListItem<T> ItemType;
3906 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
3910 size_t GetCount()
const {
return m_Count; }
3911 bool IsEmpty()
const {
return m_Count == 0; }
3913 ItemType* Front() {
return m_pFront; }
3914 const ItemType* Front()
const {
return m_pFront; }
3915 ItemType* Back() {
return m_pBack; }
3916 const ItemType* Back()
const {
return m_pBack; }
3918 ItemType* PushBack();
3919 ItemType* PushFront();
3920 ItemType* PushBack(
const T& value);
3921 ItemType* PushFront(
const T& value);
3926 ItemType* InsertBefore(ItemType* pItem);
3928 ItemType* InsertAfter(ItemType* pItem);
3930 ItemType* InsertBefore(ItemType* pItem,
const T& value);
3931 ItemType* InsertAfter(ItemType* pItem,
const T& value);
3933 void Remove(ItemType* pItem);
3936 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
3937 VmaPoolAllocator<ItemType> m_ItemAllocator;
3943 template<
typename T>
3944 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
3945 m_pAllocationCallbacks(pAllocationCallbacks),
3946 m_ItemAllocator(pAllocationCallbacks, 128),
3953 template<
typename T>
3954 VmaRawList<T>::~VmaRawList()
3960 template<
typename T>
3961 void VmaRawList<T>::Clear()
3963 if(IsEmpty() ==
false)
3965 ItemType* pItem = m_pBack;
3966 while(pItem != VMA_NULL)
3968 ItemType*
const pPrevItem = pItem->pPrev;
3969 m_ItemAllocator.Free(pItem);
3972 m_pFront = VMA_NULL;
3978 template<
typename T>
3979 VmaListItem<T>* VmaRawList<T>::PushBack()
3981 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3982 pNewItem->pNext = VMA_NULL;
3985 pNewItem->pPrev = VMA_NULL;
3986 m_pFront = pNewItem;
3992 pNewItem->pPrev = m_pBack;
3993 m_pBack->pNext = pNewItem;
4000 template<
typename T>
4001 VmaListItem<T>* VmaRawList<T>::PushFront()
4003 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4004 pNewItem->pPrev = VMA_NULL;
4007 pNewItem->pNext = VMA_NULL;
4008 m_pFront = pNewItem;
4014 pNewItem->pNext = m_pFront;
4015 m_pFront->pPrev = pNewItem;
4016 m_pFront = pNewItem;
4022 template<
typename T>
4023 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
4025 ItemType*
const pNewItem = PushBack();
4026 pNewItem->Value = value;
4030 template<
typename T>
4031 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
4033 ItemType*
const pNewItem = PushFront();
4034 pNewItem->Value = value;
4038 template<
typename T>
4039 void VmaRawList<T>::PopBack()
4041 VMA_HEAVY_ASSERT(m_Count > 0);
4042 ItemType*
const pBackItem = m_pBack;
4043 ItemType*
const pPrevItem = pBackItem->pPrev;
4044 if(pPrevItem != VMA_NULL)
4046 pPrevItem->pNext = VMA_NULL;
4048 m_pBack = pPrevItem;
4049 m_ItemAllocator.Free(pBackItem);
4053 template<
typename T>
4054 void VmaRawList<T>::PopFront()
4056 VMA_HEAVY_ASSERT(m_Count > 0);
4057 ItemType*
const pFrontItem = m_pFront;
4058 ItemType*
const pNextItem = pFrontItem->pNext;
4059 if(pNextItem != VMA_NULL)
4061 pNextItem->pPrev = VMA_NULL;
4063 m_pFront = pNextItem;
4064 m_ItemAllocator.Free(pFrontItem);
4068 template<
typename T>
4069 void VmaRawList<T>::Remove(ItemType* pItem)
4071 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
4072 VMA_HEAVY_ASSERT(m_Count > 0);
4074 if(pItem->pPrev != VMA_NULL)
4076 pItem->pPrev->pNext = pItem->pNext;
4080 VMA_HEAVY_ASSERT(m_pFront == pItem);
4081 m_pFront = pItem->pNext;
4084 if(pItem->pNext != VMA_NULL)
4086 pItem->pNext->pPrev = pItem->pPrev;
4090 VMA_HEAVY_ASSERT(m_pBack == pItem);
4091 m_pBack = pItem->pPrev;
4094 m_ItemAllocator.Free(pItem);
4098 template<
typename T>
4099 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
4101 if(pItem != VMA_NULL)
4103 ItemType*
const prevItem = pItem->pPrev;
4104 ItemType*
const newItem = m_ItemAllocator.Alloc();
4105 newItem->pPrev = prevItem;
4106 newItem->pNext = pItem;
4107 pItem->pPrev = newItem;
4108 if(prevItem != VMA_NULL)
4110 prevItem->pNext = newItem;
4114 VMA_HEAVY_ASSERT(m_pFront == pItem);
4124 template<
typename T>
4125 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
4127 if(pItem != VMA_NULL)
4129 ItemType*
const nextItem = pItem->pNext;
4130 ItemType*
const newItem = m_ItemAllocator.Alloc();
4131 newItem->pNext = nextItem;
4132 newItem->pPrev = pItem;
4133 pItem->pNext = newItem;
4134 if(nextItem != VMA_NULL)
4136 nextItem->pPrev = newItem;
4140 VMA_HEAVY_ASSERT(m_pBack == pItem);
4150 template<
typename T>
4151 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
4153 ItemType*
const newItem = InsertBefore(pItem);
4154 newItem->Value = value;
4158 template<
typename T>
4159 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
4161 ItemType*
const newItem = InsertAfter(pItem);
4162 newItem->Value = value;
4166 template<
typename T,
typename AllocatorT>
4169 VMA_CLASS_NO_COPY(VmaList)
4180 T& operator*()
const 4182 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4183 return m_pItem->Value;
4185 T* operator->()
const 4187 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4188 return &m_pItem->Value;
4191 iterator& operator++()
4193 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4194 m_pItem = m_pItem->pNext;
4197 iterator& operator--()
4199 if(m_pItem != VMA_NULL)
4201 m_pItem = m_pItem->pPrev;
4205 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4206 m_pItem = m_pList->Back();
4211 iterator operator++(
int)
4213 iterator result = *
this;
4217 iterator operator--(
int)
4219 iterator result = *
this;
4224 bool operator==(
const iterator& rhs)
const 4226 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4227 return m_pItem == rhs.m_pItem;
4229 bool operator!=(
const iterator& rhs)
const 4231 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4232 return m_pItem != rhs.m_pItem;
4236 VmaRawList<T>* m_pList;
4237 VmaListItem<T>* m_pItem;
4239 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
4245 friend class VmaList<T, AllocatorT>;
4248 class const_iterator
4257 const_iterator(
const iterator& src) :
4258 m_pList(src.m_pList),
4259 m_pItem(src.m_pItem)
4263 const T& operator*()
const 4265 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4266 return m_pItem->Value;
4268 const T* operator->()
const 4270 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4271 return &m_pItem->Value;
4274 const_iterator& operator++()
4276 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4277 m_pItem = m_pItem->pNext;
4280 const_iterator& operator--()
4282 if(m_pItem != VMA_NULL)
4284 m_pItem = m_pItem->pPrev;
4288 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4289 m_pItem = m_pList->Back();
4294 const_iterator operator++(
int)
4296 const_iterator result = *
this;
4300 const_iterator operator--(
int)
4302 const_iterator result = *
this;
4307 bool operator==(
const const_iterator& rhs)
const 4309 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4310 return m_pItem == rhs.m_pItem;
4312 bool operator!=(
const const_iterator& rhs)
const 4314 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4315 return m_pItem != rhs.m_pItem;
4319 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
4325 const VmaRawList<T>* m_pList;
4326 const VmaListItem<T>* m_pItem;
4328 friend class VmaList<T, AllocatorT>;
4331 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
4333 bool empty()
const {
return m_RawList.IsEmpty(); }
4334 size_t size()
const {
return m_RawList.GetCount(); }
4336 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
4337 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
4339 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
4340 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
4342 void clear() { m_RawList.Clear(); }
4343 void push_back(
const T& value) { m_RawList.PushBack(value); }
4344 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
4345 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
4348 VmaRawList<T> m_RawList;
4351 #endif // #if VMA_USE_STL_LIST 4359 #if VMA_USE_STL_UNORDERED_MAP 4361 #define VmaPair std::pair 4363 #define VMA_MAP_TYPE(KeyT, ValueT) \ 4364 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 4366 #else // #if VMA_USE_STL_UNORDERED_MAP 4368 template<
typename T1,
typename T2>
4374 VmaPair() : first(), second() { }
4375 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
4381 template<
typename KeyT,
typename ValueT>
4385 typedef VmaPair<KeyT, ValueT> PairType;
4386 typedef PairType* iterator;
4388 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
4390 iterator begin() {
return m_Vector.begin(); }
4391 iterator end() {
return m_Vector.end(); }
4393 void insert(
const PairType& pair);
4394 iterator find(
const KeyT& key);
4395 void erase(iterator it);
4398 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
4401 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 4403 template<
typename FirstT,
typename SecondT>
4404 struct VmaPairFirstLess
4406 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 4408 return lhs.first < rhs.first;
4410 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 4412 return lhs.first < rhsFirst;
4416 template<
typename KeyT,
typename ValueT>
4417 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
4419 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4421 m_Vector.data() + m_Vector.size(),
4423 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
4424 VmaVectorInsert(m_Vector, indexToInsert, pair);
4427 template<
typename KeyT,
typename ValueT>
4428 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
4430 PairType* it = VmaBinaryFindFirstNotLess(
4432 m_Vector.data() + m_Vector.size(),
4434 VmaPairFirstLess<KeyT, ValueT>());
4435 if((it != m_Vector.end()) && (it->first == key))
4441 return m_Vector.end();
4445 template<
typename KeyT,
typename ValueT>
4446 void VmaMap<KeyT, ValueT>::erase(iterator it)
4448 VmaVectorRemove(m_Vector, it - m_Vector.begin());
4451 #endif // #if VMA_USE_STL_UNORDERED_MAP 4457 class VmaDeviceMemoryBlock;
4459 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
4461 struct VmaAllocation_T
4463 VMA_CLASS_NO_COPY(VmaAllocation_T)
4465 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
4469 FLAG_USER_DATA_STRING = 0x01,
4473 enum ALLOCATION_TYPE
4475 ALLOCATION_TYPE_NONE,
4476 ALLOCATION_TYPE_BLOCK,
4477 ALLOCATION_TYPE_DEDICATED,
4480 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
4483 m_pUserData(VMA_NULL),
4484 m_LastUseFrameIndex(currentFrameIndex),
4485 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
4486 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
4488 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
4490 #if VMA_STATS_STRING_ENABLED 4491 m_CreationFrameIndex = currentFrameIndex;
4492 m_BufferImageUsage = 0;
4498 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
4501 VMA_ASSERT(m_pUserData == VMA_NULL);
4504 void InitBlockAllocation(
4506 VmaDeviceMemoryBlock* block,
4507 VkDeviceSize offset,
4508 VkDeviceSize alignment,
4510 VmaSuballocationType suballocationType,
4514 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4515 VMA_ASSERT(block != VMA_NULL);
4516 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4517 m_Alignment = alignment;
4519 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4520 m_SuballocationType = (uint8_t)suballocationType;
4521 m_BlockAllocation.m_hPool = hPool;
4522 m_BlockAllocation.m_Block = block;
4523 m_BlockAllocation.m_Offset = offset;
4524 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
4529 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4530 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
4531 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4532 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
4533 m_BlockAllocation.m_Block = VMA_NULL;
4534 m_BlockAllocation.m_Offset = 0;
4535 m_BlockAllocation.m_CanBecomeLost =
true;
4538 void ChangeBlockAllocation(
4540 VmaDeviceMemoryBlock* block,
4541 VkDeviceSize offset);
4543 void ChangeSize(VkDeviceSize newSize);
4546 void InitDedicatedAllocation(
4547 uint32_t memoryTypeIndex,
4548 VkDeviceMemory hMemory,
4549 VmaSuballocationType suballocationType,
4553 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4554 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
4555 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
4558 m_SuballocationType = (uint8_t)suballocationType;
4559 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4560 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
4561 m_DedicatedAllocation.m_hMemory = hMemory;
4562 m_DedicatedAllocation.m_pMappedData = pMappedData;
4565 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
4566 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
4567 VkDeviceSize GetSize()
const {
return m_Size; }
4568 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
4569 void* GetUserData()
const {
return m_pUserData; }
4570 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
4571 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
4573 VmaDeviceMemoryBlock* GetBlock()
const 4575 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4576 return m_BlockAllocation.m_Block;
4578 VkDeviceSize GetOffset()
const;
4579 VkDeviceMemory GetMemory()
const;
4580 uint32_t GetMemoryTypeIndex()
const;
4581 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
4582 void* GetMappedData()
const;
4583 bool CanBecomeLost()
const;
4586 uint32_t GetLastUseFrameIndex()
const 4588 return m_LastUseFrameIndex.load();
4590 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
4592 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
4602 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4604 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
4606 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
4617 void BlockAllocMap();
4618 void BlockAllocUnmap();
4619 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
4622 #if VMA_STATS_STRING_ENABLED 4623 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
4624 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
4626 void InitBufferImageUsage(uint32_t bufferImageUsage)
4628 VMA_ASSERT(m_BufferImageUsage == 0);
4629 m_BufferImageUsage = bufferImageUsage;
4632 void PrintParameters(
class VmaJsonWriter& json)
const;
4636 VkDeviceSize m_Alignment;
4637 VkDeviceSize m_Size;
4639 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
4641 uint8_t m_SuballocationType;
4648 struct BlockAllocation
4651 VmaDeviceMemoryBlock* m_Block;
4652 VkDeviceSize m_Offset;
4653 bool m_CanBecomeLost;
4657 struct DedicatedAllocation
4659 uint32_t m_MemoryTypeIndex;
4660 VkDeviceMemory m_hMemory;
4661 void* m_pMappedData;
4667 BlockAllocation m_BlockAllocation;
4669 DedicatedAllocation m_DedicatedAllocation;
4672 #if VMA_STATS_STRING_ENABLED 4673 uint32_t m_CreationFrameIndex;
4674 uint32_t m_BufferImageUsage;
4684 struct VmaSuballocation
4686 VkDeviceSize offset;
4689 VmaSuballocationType type;
4693 struct VmaSuballocationOffsetLess
4695 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 4697 return lhs.offset < rhs.offset;
4700 struct VmaSuballocationOffsetGreater
4702 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 4704 return lhs.offset > rhs.offset;
4708 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
4711 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
4726 struct VmaAllocationRequest
4728 VkDeviceSize offset;
4729 VkDeviceSize sumFreeSize;
4730 VkDeviceSize sumItemSize;
4731 VmaSuballocationList::iterator item;
4732 size_t itemsToMakeLostCount;
4735 VkDeviceSize CalcCost()
const 4737 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
4745 class VmaBlockMetadata
4749 virtual ~VmaBlockMetadata() { }
4750 virtual void Init(VkDeviceSize size) { m_Size = size; }
4753 virtual bool Validate()
const = 0;
4754 VkDeviceSize GetSize()
const {
return m_Size; }
4755 virtual size_t GetAllocationCount()
const = 0;
4756 virtual VkDeviceSize GetSumFreeSize()
const = 0;
4757 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
4759 virtual bool IsEmpty()
const = 0;
4761 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
4763 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
4765 #if VMA_STATS_STRING_ENABLED 4766 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
4772 virtual bool CreateAllocationRequest(
4773 uint32_t currentFrameIndex,
4774 uint32_t frameInUseCount,
4775 VkDeviceSize bufferImageGranularity,
4776 VkDeviceSize allocSize,
4777 VkDeviceSize allocAlignment,
4779 VmaSuballocationType allocType,
4780 bool canMakeOtherLost,
4782 VmaAllocationRequest* pAllocationRequest) = 0;
4784 virtual bool MakeRequestedAllocationsLost(
4785 uint32_t currentFrameIndex,
4786 uint32_t frameInUseCount,
4787 VmaAllocationRequest* pAllocationRequest) = 0;
4789 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
4791 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
4795 const VmaAllocationRequest& request,
4796 VmaSuballocationType type,
4797 VkDeviceSize allocSize,
4803 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
4806 virtual bool ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize) {
return false; }
4809 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
4811 #if VMA_STATS_STRING_ENABLED 4812 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
4813 VkDeviceSize unusedBytes,
4814 size_t allocationCount,
4815 size_t unusedRangeCount)
const;
4816 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
4817 VkDeviceSize offset,
4819 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
4820 VkDeviceSize offset,
4821 VkDeviceSize size)
const;
4822 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
4826 VkDeviceSize m_Size;
4827 const VkAllocationCallbacks* m_pAllocationCallbacks;
4830 #define VMA_VALIDATE(cond) do { if(!(cond)) { \ 4831 VMA_ASSERT(0 && "Validation failed: " #cond); \ 4835 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
4837 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
4840 virtual ~VmaBlockMetadata_Generic();
4841 virtual void Init(VkDeviceSize size);
4843 virtual bool Validate()
const;
4844 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
4845 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
4846 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
4847 virtual bool IsEmpty()
const;
4849 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
4850 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
4852 #if VMA_STATS_STRING_ENABLED 4853 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
4856 virtual bool CreateAllocationRequest(
4857 uint32_t currentFrameIndex,
4858 uint32_t frameInUseCount,
4859 VkDeviceSize bufferImageGranularity,
4860 VkDeviceSize allocSize,
4861 VkDeviceSize allocAlignment,
4863 VmaSuballocationType allocType,
4864 bool canMakeOtherLost,
4866 VmaAllocationRequest* pAllocationRequest);
4868 virtual bool MakeRequestedAllocationsLost(
4869 uint32_t currentFrameIndex,
4870 uint32_t frameInUseCount,
4871 VmaAllocationRequest* pAllocationRequest);
4873 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4875 virtual VkResult CheckCorruption(
const void* pBlockData);
4878 const VmaAllocationRequest& request,
4879 VmaSuballocationType type,
4880 VkDeviceSize allocSize,
4885 virtual void FreeAtOffset(VkDeviceSize offset);
4887 virtual bool ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize);
4890 uint32_t m_FreeCount;
4891 VkDeviceSize m_SumFreeSize;
4892 VmaSuballocationList m_Suballocations;
4895 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
4897 bool ValidateFreeSuballocationList()
const;
4901 bool CheckAllocation(
4902 uint32_t currentFrameIndex,
4903 uint32_t frameInUseCount,
4904 VkDeviceSize bufferImageGranularity,
4905 VkDeviceSize allocSize,
4906 VkDeviceSize allocAlignment,
4907 VmaSuballocationType allocType,
4908 VmaSuballocationList::const_iterator suballocItem,
4909 bool canMakeOtherLost,
4910 VkDeviceSize* pOffset,
4911 size_t* itemsToMakeLostCount,
4912 VkDeviceSize* pSumFreeSize,
4913 VkDeviceSize* pSumItemSize)
const;
4915 void MergeFreeWithNext(VmaSuballocationList::iterator item);
4919 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
4922 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
4925 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
5006 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
5008 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
5011 virtual ~VmaBlockMetadata_Linear();
5012 virtual void Init(VkDeviceSize size);
5014 virtual bool Validate()
const;
5015 virtual size_t GetAllocationCount()
const;
5016 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5017 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5018 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
5020 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5021 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5023 #if VMA_STATS_STRING_ENABLED 5024 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5027 virtual bool CreateAllocationRequest(
5028 uint32_t currentFrameIndex,
5029 uint32_t frameInUseCount,
5030 VkDeviceSize bufferImageGranularity,
5031 VkDeviceSize allocSize,
5032 VkDeviceSize allocAlignment,
5034 VmaSuballocationType allocType,
5035 bool canMakeOtherLost,
5037 VmaAllocationRequest* pAllocationRequest);
5039 virtual bool MakeRequestedAllocationsLost(
5040 uint32_t currentFrameIndex,
5041 uint32_t frameInUseCount,
5042 VmaAllocationRequest* pAllocationRequest);
5044 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5046 virtual VkResult CheckCorruption(
const void* pBlockData);
5049 const VmaAllocationRequest& request,
5050 VmaSuballocationType type,
5051 VkDeviceSize allocSize,
5056 virtual void FreeAtOffset(VkDeviceSize offset);
5066 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
5068 enum SECOND_VECTOR_MODE
5070 SECOND_VECTOR_EMPTY,
5075 SECOND_VECTOR_RING_BUFFER,
5081 SECOND_VECTOR_DOUBLE_STACK,
5084 VkDeviceSize m_SumFreeSize;
5085 SuballocationVectorType m_Suballocations0, m_Suballocations1;
5086 uint32_t m_1stVectorIndex;
5087 SECOND_VECTOR_MODE m_2ndVectorMode;
5089 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5090 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5091 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5092 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5095 size_t m_1stNullItemsBeginCount;
5097 size_t m_1stNullItemsMiddleCount;
5099 size_t m_2ndNullItemsCount;
5101 bool ShouldCompact1st()
const;
5102 void CleanupAfterFree();
5116 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
5118 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
5121 virtual ~VmaBlockMetadata_Buddy();
5122 virtual void Init(VkDeviceSize size);
5124 virtual bool Validate()
const;
5125 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
5126 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
5127 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5128 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
5130 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5131 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5133 #if VMA_STATS_STRING_ENABLED 5134 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5137 virtual bool CreateAllocationRequest(
5138 uint32_t currentFrameIndex,
5139 uint32_t frameInUseCount,
5140 VkDeviceSize bufferImageGranularity,
5141 VkDeviceSize allocSize,
5142 VkDeviceSize allocAlignment,
5144 VmaSuballocationType allocType,
5145 bool canMakeOtherLost,
5147 VmaAllocationRequest* pAllocationRequest);
5149 virtual bool MakeRequestedAllocationsLost(
5150 uint32_t currentFrameIndex,
5151 uint32_t frameInUseCount,
5152 VmaAllocationRequest* pAllocationRequest);
5154 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5156 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
5159 const VmaAllocationRequest& request,
5160 VmaSuballocationType type,
5161 VkDeviceSize allocSize,
5165 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
5166 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
5169 static const VkDeviceSize MIN_NODE_SIZE = 32;
5170 static const size_t MAX_LEVELS = 30;
5172 struct ValidationContext
5174 size_t calculatedAllocationCount;
5175 size_t calculatedFreeCount;
5176 VkDeviceSize calculatedSumFreeSize;
5178 ValidationContext() :
5179 calculatedAllocationCount(0),
5180 calculatedFreeCount(0),
5181 calculatedSumFreeSize(0) { }
5186 VkDeviceSize offset;
5216 VkDeviceSize m_UsableSize;
5217 uint32_t m_LevelCount;
5223 } m_FreeList[MAX_LEVELS];
5225 size_t m_AllocationCount;
5229 VkDeviceSize m_SumFreeSize;
5231 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
5232 void DeleteNode(Node* node);
5233 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
5234 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
5235 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
5237 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
5238 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
5242 void AddToFreeListFront(uint32_t level, Node* node);
5246 void RemoveFromFreeList(uint32_t level, Node* node);
5248 #if VMA_STATS_STRING_ENABLED 5249 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
5259 class VmaDeviceMemoryBlock
5261 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
5263 VmaBlockMetadata* m_pMetadata;
5267 ~VmaDeviceMemoryBlock()
5269 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5270 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5276 uint32_t newMemoryTypeIndex,
5277 VkDeviceMemory newMemory,
5278 VkDeviceSize newSize,
5280 uint32_t algorithm);
5284 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
5285 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5286 uint32_t GetId()
const {
return m_Id; }
5287 void* GetMappedData()
const {
return m_pMappedData; }
5290 bool Validate()
const;
5295 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
5298 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5299 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5301 VkResult BindBufferMemory(
5305 VkResult BindImageMemory(
5311 uint32_t m_MemoryTypeIndex;
5313 VkDeviceMemory m_hMemory;
5318 uint32_t m_MapCount;
5319 void* m_pMappedData;
5322 struct VmaPointerLess
5324 bool operator()(
const void* lhs,
const void* rhs)
const 5330 class VmaDefragmentator;
5338 struct VmaBlockVector
5340 VMA_CLASS_NO_COPY(VmaBlockVector)
5344 uint32_t memoryTypeIndex,
5345 VkDeviceSize preferredBlockSize,
5346 size_t minBlockCount,
5347 size_t maxBlockCount,
5348 VkDeviceSize bufferImageGranularity,
5349 uint32_t frameInUseCount,
5351 bool explicitBlockSize,
5352 uint32_t algorithm);
5355 VkResult CreateMinBlocks();
5357 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5358 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
5359 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
5360 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
5361 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
5365 bool IsEmpty()
const {
return m_Blocks.empty(); }
5366 bool IsCorruptionDetectionEnabled()
const;
5370 uint32_t currentFrameIndex,
5372 VkDeviceSize alignment,
5374 VmaSuballocationType suballocType,
5383 #if VMA_STATS_STRING_ENABLED 5384 void PrintDetailedMap(
class VmaJsonWriter& json);
5387 void MakePoolAllocationsLost(
5388 uint32_t currentFrameIndex,
5389 size_t* pLostAllocationCount);
5390 VkResult CheckCorruption();
5392 VmaDefragmentator* EnsureDefragmentator(
5394 uint32_t currentFrameIndex);
5396 VkResult Defragment(
5398 VkDeviceSize& maxBytesToMove,
5399 uint32_t& maxAllocationsToMove);
5401 void DestroyDefragmentator();
5404 friend class VmaDefragmentator;
5407 const uint32_t m_MemoryTypeIndex;
5408 const VkDeviceSize m_PreferredBlockSize;
5409 const size_t m_MinBlockCount;
5410 const size_t m_MaxBlockCount;
5411 const VkDeviceSize m_BufferImageGranularity;
5412 const uint32_t m_FrameInUseCount;
5413 const bool m_IsCustomPool;
5414 const bool m_ExplicitBlockSize;
5415 const uint32_t m_Algorithm;
5416 bool m_HasEmptyBlock;
5419 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
5423 VmaDefragmentator* m_pDefragmentator;
5424 uint32_t m_NextBlockId;
5426 VkDeviceSize CalcMaxBlockSize()
const;
5429 void Remove(VmaDeviceMemoryBlock* pBlock);
5433 void IncrementallySortBlocks();
5436 VkResult AllocateFromBlock(
5437 VmaDeviceMemoryBlock* pBlock,
5439 uint32_t currentFrameIndex,
5441 VkDeviceSize alignment,
5444 VmaSuballocationType suballocType,
5448 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
5453 VMA_CLASS_NO_COPY(VmaPool_T)
5455 VmaBlockVector m_BlockVector;
5460 VkDeviceSize preferredBlockSize);
5463 uint32_t GetId()
const {
return m_Id; }
5464 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
5466 #if VMA_STATS_STRING_ENABLED 5474 class VmaDefragmentator
5476 VMA_CLASS_NO_COPY(VmaDefragmentator)
5479 VmaBlockVector*
const m_pBlockVector;
5480 uint32_t m_CurrentFrameIndex;
5481 VkDeviceSize m_BytesMoved;
5482 uint32_t m_AllocationsMoved;
5484 struct AllocationInfo
5487 VkBool32* m_pChanged;
5490 m_hAllocation(VK_NULL_HANDLE),
5491 m_pChanged(VMA_NULL)
5496 struct AllocationInfoSizeGreater
5498 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 5500 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
5505 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
5509 VmaDeviceMemoryBlock* m_pBlock;
5510 bool m_HasNonMovableAllocations;
5511 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
5513 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
5515 m_HasNonMovableAllocations(true),
5516 m_Allocations(pAllocationCallbacks),
5517 m_pMappedDataForDefragmentation(VMA_NULL)
5521 void CalcHasNonMovableAllocations()
5523 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
5524 const size_t defragmentAllocCount = m_Allocations.size();
5525 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
5528 void SortAllocationsBySizeDescecnding()
5530 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
5533 VkResult EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData);
5538 void* m_pMappedDataForDefragmentation;
5541 struct BlockPointerLess
5543 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 5545 return pLhsBlockInfo->m_pBlock < pRhsBlock;
5547 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 5549 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
5555 struct BlockInfoCompareMoveDestination
5557 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 5559 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
5563 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
5567 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
5575 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
5576 BlockInfoVector m_Blocks;
5578 VkResult DefragmentRound(
5579 VkDeviceSize maxBytesToMove,
5580 uint32_t maxAllocationsToMove);
5582 static bool MoveMakesSense(
5583 size_t dstBlockIndex, VkDeviceSize dstOffset,
5584 size_t srcBlockIndex, VkDeviceSize srcOffset);
5589 VmaBlockVector* pBlockVector,
5590 uint32_t currentFrameIndex);
5592 ~VmaDefragmentator();
5594 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
5595 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
5597 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
5599 VkResult Defragment(
5600 VkDeviceSize maxBytesToMove,
5601 uint32_t maxAllocationsToMove);
5604 #if VMA_RECORDING_ENABLED 5611 void WriteConfiguration(
5612 const VkPhysicalDeviceProperties& devProps,
5613 const VkPhysicalDeviceMemoryProperties& memProps,
5614 bool dedicatedAllocationExtensionEnabled);
5617 void RecordCreateAllocator(uint32_t frameIndex);
5618 void RecordDestroyAllocator(uint32_t frameIndex);
5619 void RecordCreatePool(uint32_t frameIndex,
5622 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
5623 void RecordAllocateMemory(uint32_t frameIndex,
5624 const VkMemoryRequirements& vkMemReq,
5627 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
5628 const VkMemoryRequirements& vkMemReq,
5629 bool requiresDedicatedAllocation,
5630 bool prefersDedicatedAllocation,
5633 void RecordAllocateMemoryForImage(uint32_t frameIndex,
5634 const VkMemoryRequirements& vkMemReq,
5635 bool requiresDedicatedAllocation,
5636 bool prefersDedicatedAllocation,
5639 void RecordFreeMemory(uint32_t frameIndex,
5641 void RecordResizeAllocation(
5642 uint32_t frameIndex,
5644 VkDeviceSize newSize);
5645 void RecordSetAllocationUserData(uint32_t frameIndex,
5647 const void* pUserData);
5648 void RecordCreateLostAllocation(uint32_t frameIndex,
5650 void RecordMapMemory(uint32_t frameIndex,
5652 void RecordUnmapMemory(uint32_t frameIndex,
5654 void RecordFlushAllocation(uint32_t frameIndex,
5655 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
5656 void RecordInvalidateAllocation(uint32_t frameIndex,
5657 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
5658 void RecordCreateBuffer(uint32_t frameIndex,
5659 const VkBufferCreateInfo& bufCreateInfo,
5662 void RecordCreateImage(uint32_t frameIndex,
5663 const VkImageCreateInfo& imageCreateInfo,
5666 void RecordDestroyBuffer(uint32_t frameIndex,
5668 void RecordDestroyImage(uint32_t frameIndex,
5670 void RecordTouchAllocation(uint32_t frameIndex,
5672 void RecordGetAllocationInfo(uint32_t frameIndex,
5674 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
5684 class UserDataString
5688 const char* GetString()
const {
return m_Str; }
5698 VMA_MUTEX m_FileMutex;
5700 int64_t m_StartCounter;
5702 void GetBasicParams(CallParams& outParams);
5706 #endif // #if VMA_RECORDING_ENABLED 5709 struct VmaAllocator_T
5711 VMA_CLASS_NO_COPY(VmaAllocator_T)
5714 bool m_UseKhrDedicatedAllocation;
5716 bool m_AllocationCallbacksSpecified;
5717 VkAllocationCallbacks m_AllocationCallbacks;
5721 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
5722 VMA_MUTEX m_HeapSizeLimitMutex;
5724 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
5725 VkPhysicalDeviceMemoryProperties m_MemProps;
5728 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
5731 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
5732 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
5733 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
5739 const VkAllocationCallbacks* GetAllocationCallbacks()
const 5741 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
5745 return m_VulkanFunctions;
5748 VkDeviceSize GetBufferImageGranularity()
const 5751 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
5752 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
5755 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
5756 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
5758 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 5760 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
5761 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
5764 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const 5766 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
5767 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
5770 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const 5772 return IsMemoryTypeNonCoherent(memTypeIndex) ?
5773 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
5774 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
5777 bool IsIntegratedGpu()
const 5779 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
5782 #if VMA_RECORDING_ENABLED 5783 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
5786 void GetBufferMemoryRequirements(
5788 VkMemoryRequirements& memReq,
5789 bool& requiresDedicatedAllocation,
5790 bool& prefersDedicatedAllocation)
const;
5791 void GetImageMemoryRequirements(
5793 VkMemoryRequirements& memReq,
5794 bool& requiresDedicatedAllocation,
5795 bool& prefersDedicatedAllocation)
const;
5798 VkResult AllocateMemory(
5799 const VkMemoryRequirements& vkMemReq,
5800 bool requiresDedicatedAllocation,
5801 bool prefersDedicatedAllocation,
5802 VkBuffer dedicatedBuffer,
5803 VkImage dedicatedImage,
5805 VmaSuballocationType suballocType,
5811 VkResult ResizeAllocation(
5813 VkDeviceSize newSize);
5815 void CalculateStats(
VmaStats* pStats);
5817 #if VMA_STATS_STRING_ENABLED 5818 void PrintDetailedMap(
class VmaJsonWriter& json);
5821 VkResult Defragment(
5823 size_t allocationCount,
5824 VkBool32* pAllocationsChanged,
5832 void DestroyPool(
VmaPool pool);
5835 void SetCurrentFrameIndex(uint32_t frameIndex);
5836 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
5838 void MakePoolAllocationsLost(
5840 size_t* pLostAllocationCount);
5841 VkResult CheckPoolCorruption(
VmaPool hPool);
5842 VkResult CheckCorruption(uint32_t memoryTypeBits);
5846 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
5847 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
5852 VkResult BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer);
5853 VkResult BindImageMemory(
VmaAllocation hAllocation, VkImage hImage);
5855 void FlushOrInvalidateAllocation(
5857 VkDeviceSize offset, VkDeviceSize size,
5858 VMA_CACHE_OPERATION op);
5860 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
5863 VkDeviceSize m_PreferredLargeHeapBlockSize;
5865 VkPhysicalDevice m_PhysicalDevice;
5866 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
5868 VMA_MUTEX m_PoolsMutex;
5870 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
5871 uint32_t m_NextPoolId;
5875 #if VMA_RECORDING_ENABLED 5876 VmaRecorder* m_pRecorder;
5881 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
5883 VkResult AllocateMemoryOfType(
5885 VkDeviceSize alignment,
5886 bool dedicatedAllocation,
5887 VkBuffer dedicatedBuffer,
5888 VkImage dedicatedImage,
5890 uint32_t memTypeIndex,
5891 VmaSuballocationType suballocType,
5895 VkResult AllocateDedicatedMemory(
5897 VmaSuballocationType suballocType,
5898 uint32_t memTypeIndex,
5900 bool isUserDataString,
5902 VkBuffer dedicatedBuffer,
5903 VkImage dedicatedImage,
5913 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
5915 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
5918 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
5920 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
5923 template<
typename T>
5926 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
5929 template<
typename T>
5930 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
5932 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
5935 template<
typename T>
5936 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
5941 VmaFree(hAllocator, ptr);
5945 template<
typename T>
5946 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
5950 for(
size_t i = count; i--; )
5952 VmaFree(hAllocator, ptr);
5959 #if VMA_STATS_STRING_ENABLED 5961 class VmaStringBuilder
5964 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
5965 size_t GetLength()
const {
return m_Data.size(); }
5966 const char* GetData()
const {
return m_Data.data(); }
5968 void Add(
char ch) { m_Data.push_back(ch); }
5969 void Add(
const char* pStr);
5970 void AddNewLine() { Add(
'\n'); }
5971 void AddNumber(uint32_t num);
5972 void AddNumber(uint64_t num);
5973 void AddPointer(
const void* ptr);
5976 VmaVector< char, VmaStlAllocator<char> > m_Data;
5979 void VmaStringBuilder::Add(
const char* pStr)
5981 const size_t strLen = strlen(pStr);
5984 const size_t oldCount = m_Data.size();
5985 m_Data.resize(oldCount + strLen);
5986 memcpy(m_Data.data() + oldCount, pStr, strLen);
5990 void VmaStringBuilder::AddNumber(uint32_t num)
5993 VmaUint32ToStr(buf,
sizeof(buf), num);
5997 void VmaStringBuilder::AddNumber(uint64_t num)
6000 VmaUint64ToStr(buf,
sizeof(buf), num);
6004 void VmaStringBuilder::AddPointer(
const void* ptr)
6007 VmaPtrToStr(buf,
sizeof(buf), ptr);
6011 #endif // #if VMA_STATS_STRING_ENABLED 6016 #if VMA_STATS_STRING_ENABLED 6020 VMA_CLASS_NO_COPY(VmaJsonWriter)
6022 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
6025 void BeginObject(
bool singleLine =
false);
6028 void BeginArray(
bool singleLine =
false);
6031 void WriteString(
const char* pStr);
6032 void BeginString(
const char* pStr = VMA_NULL);
6033 void ContinueString(
const char* pStr);
6034 void ContinueString(uint32_t n);
6035 void ContinueString(uint64_t n);
6036 void ContinueString_Pointer(
const void* ptr);
6037 void EndString(
const char* pStr = VMA_NULL);
6039 void WriteNumber(uint32_t n);
6040 void WriteNumber(uint64_t n);
6041 void WriteBool(
bool b);
6045 static const char*
const INDENT;
6047 enum COLLECTION_TYPE
6049 COLLECTION_TYPE_OBJECT,
6050 COLLECTION_TYPE_ARRAY,
6054 COLLECTION_TYPE type;
6055 uint32_t valueCount;
6056 bool singleLineMode;
6059 VmaStringBuilder& m_SB;
6060 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
6061 bool m_InsideString;
6063 void BeginValue(
bool isString);
6064 void WriteIndent(
bool oneLess =
false);
6067 const char*
const VmaJsonWriter::INDENT =
" ";
6069 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
6071 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
6072 m_InsideString(false)
6076 VmaJsonWriter::~VmaJsonWriter()
6078 VMA_ASSERT(!m_InsideString);
6079 VMA_ASSERT(m_Stack.empty());
6082 void VmaJsonWriter::BeginObject(
bool singleLine)
6084 VMA_ASSERT(!m_InsideString);
6090 item.type = COLLECTION_TYPE_OBJECT;
6091 item.valueCount = 0;
6092 item.singleLineMode = singleLine;
6093 m_Stack.push_back(item);
6096 void VmaJsonWriter::EndObject()
6098 VMA_ASSERT(!m_InsideString);
6103 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
6107 void VmaJsonWriter::BeginArray(
bool singleLine)
6109 VMA_ASSERT(!m_InsideString);
6115 item.type = COLLECTION_TYPE_ARRAY;
6116 item.valueCount = 0;
6117 item.singleLineMode = singleLine;
6118 m_Stack.push_back(item);
6121 void VmaJsonWriter::EndArray()
6123 VMA_ASSERT(!m_InsideString);
6128 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
6132 void VmaJsonWriter::WriteString(
const char* pStr)
6138 void VmaJsonWriter::BeginString(
const char* pStr)
6140 VMA_ASSERT(!m_InsideString);
6144 m_InsideString =
true;
6145 if(pStr != VMA_NULL && pStr[0] !=
'\0')
6147 ContinueString(pStr);
6151 void VmaJsonWriter::ContinueString(
const char* pStr)
6153 VMA_ASSERT(m_InsideString);
6155 const size_t strLen = strlen(pStr);
6156 for(
size_t i = 0; i < strLen; ++i)
6189 VMA_ASSERT(0 &&
"Character not currently supported.");
6195 void VmaJsonWriter::ContinueString(uint32_t n)
6197 VMA_ASSERT(m_InsideString);
6201 void VmaJsonWriter::ContinueString(uint64_t n)
6203 VMA_ASSERT(m_InsideString);
6207 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
6209 VMA_ASSERT(m_InsideString);
6210 m_SB.AddPointer(ptr);
6213 void VmaJsonWriter::EndString(
const char* pStr)
6215 VMA_ASSERT(m_InsideString);
6216 if(pStr != VMA_NULL && pStr[0] !=
'\0')
6218 ContinueString(pStr);
6221 m_InsideString =
false;
6224 void VmaJsonWriter::WriteNumber(uint32_t n)
6226 VMA_ASSERT(!m_InsideString);
6231 void VmaJsonWriter::WriteNumber(uint64_t n)
6233 VMA_ASSERT(!m_InsideString);
6238 void VmaJsonWriter::WriteBool(
bool b)
6240 VMA_ASSERT(!m_InsideString);
6242 m_SB.Add(b ?
"true" :
"false");
6245 void VmaJsonWriter::WriteNull()
6247 VMA_ASSERT(!m_InsideString);
6252 void VmaJsonWriter::BeginValue(
bool isString)
6254 if(!m_Stack.empty())
6256 StackItem& currItem = m_Stack.back();
6257 if(currItem.type == COLLECTION_TYPE_OBJECT &&
6258 currItem.valueCount % 2 == 0)
6260 VMA_ASSERT(isString);
6263 if(currItem.type == COLLECTION_TYPE_OBJECT &&
6264 currItem.valueCount % 2 != 0)
6268 else if(currItem.valueCount > 0)
6277 ++currItem.valueCount;
6281 void VmaJsonWriter::WriteIndent(
bool oneLess)
6283 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
6287 size_t count = m_Stack.size();
6288 if(count > 0 && oneLess)
6292 for(
size_t i = 0; i < count; ++i)
6299 #endif // #if VMA_STATS_STRING_ENABLED 6303 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
6305 if(IsUserDataString())
6307 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
6309 FreeUserDataString(hAllocator);
6311 if(pUserData != VMA_NULL)
6313 const char*
const newStrSrc = (
char*)pUserData;
6314 const size_t newStrLen = strlen(newStrSrc);
6315 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
6316 memcpy(newStrDst, newStrSrc, newStrLen + 1);
6317 m_pUserData = newStrDst;
6322 m_pUserData = pUserData;
6326 void VmaAllocation_T::ChangeBlockAllocation(
6328 VmaDeviceMemoryBlock* block,
6329 VkDeviceSize offset)
6331 VMA_ASSERT(block != VMA_NULL);
6332 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
6335 if(block != m_BlockAllocation.m_Block)
6337 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
6338 if(IsPersistentMap())
6340 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
6341 block->Map(hAllocator, mapRefCount, VMA_NULL);
6344 m_BlockAllocation.m_Block = block;
6345 m_BlockAllocation.m_Offset = offset;
6348 void VmaAllocation_T::ChangeSize(VkDeviceSize newSize)
6350 VMA_ASSERT(newSize > 0);
6354 VkDeviceSize VmaAllocation_T::GetOffset()
const 6358 case ALLOCATION_TYPE_BLOCK:
6359 return m_BlockAllocation.m_Offset;
6360 case ALLOCATION_TYPE_DEDICATED:
6368 VkDeviceMemory VmaAllocation_T::GetMemory()
const 6372 case ALLOCATION_TYPE_BLOCK:
6373 return m_BlockAllocation.m_Block->GetDeviceMemory();
6374 case ALLOCATION_TYPE_DEDICATED:
6375 return m_DedicatedAllocation.m_hMemory;
6378 return VK_NULL_HANDLE;
6382 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 6386 case ALLOCATION_TYPE_BLOCK:
6387 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
6388 case ALLOCATION_TYPE_DEDICATED:
6389 return m_DedicatedAllocation.m_MemoryTypeIndex;
6396 void* VmaAllocation_T::GetMappedData()
const 6400 case ALLOCATION_TYPE_BLOCK:
6403 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
6404 VMA_ASSERT(pBlockData != VMA_NULL);
6405 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
6412 case ALLOCATION_TYPE_DEDICATED:
6413 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
6414 return m_DedicatedAllocation.m_pMappedData;
6421 bool VmaAllocation_T::CanBecomeLost()
const 6425 case ALLOCATION_TYPE_BLOCK:
6426 return m_BlockAllocation.m_CanBecomeLost;
6427 case ALLOCATION_TYPE_DEDICATED:
6435 VmaPool VmaAllocation_T::GetPool()
const 6437 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
6438 return m_BlockAllocation.m_hPool;
6441 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
6443 VMA_ASSERT(CanBecomeLost());
6449 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
6452 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
6457 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
6463 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
6473 #if VMA_STATS_STRING_ENABLED 6476 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
6485 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const 6487 json.WriteString(
"Type");
6488 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
6490 json.WriteString(
"Size");
6491 json.WriteNumber(m_Size);
6493 if(m_pUserData != VMA_NULL)
6495 json.WriteString(
"UserData");
6496 if(IsUserDataString())
6498 json.WriteString((
const char*)m_pUserData);
6503 json.ContinueString_Pointer(m_pUserData);
6508 json.WriteString(
"CreationFrameIndex");
6509 json.WriteNumber(m_CreationFrameIndex);
6511 json.WriteString(
"LastUseFrameIndex");
6512 json.WriteNumber(GetLastUseFrameIndex());
6514 if(m_BufferImageUsage != 0)
6516 json.WriteString(
"Usage");
6517 json.WriteNumber(m_BufferImageUsage);
6523 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
6525 VMA_ASSERT(IsUserDataString());
6526 if(m_pUserData != VMA_NULL)
6528 char*
const oldStr = (
char*)m_pUserData;
6529 const size_t oldStrLen = strlen(oldStr);
6530 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
6531 m_pUserData = VMA_NULL;
6535 void VmaAllocation_T::BlockAllocMap()
6537 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
6539 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
6545 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
6549 void VmaAllocation_T::BlockAllocUnmap()
6551 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
6553 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
6559 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
6563 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
6565 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
6569 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
6571 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
6572 *ppData = m_DedicatedAllocation.m_pMappedData;
6578 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
6579 return VK_ERROR_MEMORY_MAP_FAILED;
6584 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
6585 hAllocator->m_hDevice,
6586 m_DedicatedAllocation.m_hMemory,
6591 if(result == VK_SUCCESS)
6593 m_DedicatedAllocation.m_pMappedData = *ppData;
6600 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
6602 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
6604 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
6609 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
6610 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
6611 hAllocator->m_hDevice,
6612 m_DedicatedAllocation.m_hMemory);
6617 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
6621 #if VMA_STATS_STRING_ENABLED 6623 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
6627 json.WriteString(
"Blocks");
6630 json.WriteString(
"Allocations");
6633 json.WriteString(
"UnusedRanges");
6636 json.WriteString(
"UsedBytes");
6639 json.WriteString(
"UnusedBytes");
6644 json.WriteString(
"AllocationSize");
6645 json.BeginObject(
true);
6646 json.WriteString(
"Min");
6648 json.WriteString(
"Avg");
6650 json.WriteString(
"Max");
6657 json.WriteString(
"UnusedRangeSize");
6658 json.BeginObject(
true);
6659 json.WriteString(
"Min");
6661 json.WriteString(
"Avg");
6663 json.WriteString(
"Max");
6671 #endif // #if VMA_STATS_STRING_ENABLED 6673 struct VmaSuballocationItemSizeLess
6676 const VmaSuballocationList::iterator lhs,
6677 const VmaSuballocationList::iterator rhs)
const 6679 return lhs->size < rhs->size;
6682 const VmaSuballocationList::iterator lhs,
6683 VkDeviceSize rhsSize)
const 6685 return lhs->size < rhsSize;
6693 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
6695 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
6699 #if VMA_STATS_STRING_ENABLED 6701 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
6702 VkDeviceSize unusedBytes,
6703 size_t allocationCount,
6704 size_t unusedRangeCount)
const 6708 json.WriteString(
"TotalBytes");
6709 json.WriteNumber(GetSize());
6711 json.WriteString(
"UnusedBytes");
6712 json.WriteNumber(unusedBytes);
6714 json.WriteString(
"Allocations");
6715 json.WriteNumber((uint64_t)allocationCount);
6717 json.WriteString(
"UnusedRanges");
6718 json.WriteNumber((uint64_t)unusedRangeCount);
6720 json.WriteString(
"Suballocations");
6724 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
6725 VkDeviceSize offset,
6728 json.BeginObject(
true);
6730 json.WriteString(
"Offset");
6731 json.WriteNumber(offset);
6733 hAllocation->PrintParameters(json);
6738 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
6739 VkDeviceSize offset,
6740 VkDeviceSize size)
const 6742 json.BeginObject(
true);
6744 json.WriteString(
"Offset");
6745 json.WriteNumber(offset);
6747 json.WriteString(
"Type");
6748 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
6750 json.WriteString(
"Size");
6751 json.WriteNumber(size);
6756 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const 6762 #endif // #if VMA_STATS_STRING_ENABLED 6767 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
6768 VmaBlockMetadata(hAllocator),
6771 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
6772 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
6776 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
6780 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
6782 VmaBlockMetadata::Init(size);
6785 m_SumFreeSize = size;
6787 VmaSuballocation suballoc = {};
6788 suballoc.offset = 0;
6789 suballoc.size = size;
6790 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
6791 suballoc.hAllocation = VK_NULL_HANDLE;
6793 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
6794 m_Suballocations.push_back(suballoc);
6795 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
6797 m_FreeSuballocationsBySize.push_back(suballocItem);
6800 bool VmaBlockMetadata_Generic::Validate()
const 6802 VMA_VALIDATE(!m_Suballocations.empty());
6805 VkDeviceSize calculatedOffset = 0;
6807 uint32_t calculatedFreeCount = 0;
6809 VkDeviceSize calculatedSumFreeSize = 0;
6812 size_t freeSuballocationsToRegister = 0;
6814 bool prevFree =
false;
6816 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6817 suballocItem != m_Suballocations.cend();
6820 const VmaSuballocation& subAlloc = *suballocItem;
6823 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
6825 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
6827 VMA_VALIDATE(!prevFree || !currFree);
6829 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
6833 calculatedSumFreeSize += subAlloc.size;
6834 ++calculatedFreeCount;
6835 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6837 ++freeSuballocationsToRegister;
6841 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
6845 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
6846 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
6849 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
6852 calculatedOffset += subAlloc.size;
6853 prevFree = currFree;
6858 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
6860 VkDeviceSize lastSize = 0;
6861 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
6863 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
6866 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
6868 VMA_VALIDATE(suballocItem->size >= lastSize);
6870 lastSize = suballocItem->size;
6874 VMA_VALIDATE(ValidateFreeSuballocationList());
6875 VMA_VALIDATE(calculatedOffset == GetSize());
6876 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
6877 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
6882 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const 6884 if(!m_FreeSuballocationsBySize.empty())
6886 return m_FreeSuballocationsBySize.back()->size;
6894 bool VmaBlockMetadata_Generic::IsEmpty()
const 6896 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
6899 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 6903 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
6915 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6916 suballocItem != m_Suballocations.cend();
6919 const VmaSuballocation& suballoc = *suballocItem;
6920 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
6933 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const 6935 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
6937 inoutStats.
size += GetSize();
6944 #if VMA_STATS_STRING_ENABLED 6946 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const 6948 PrintDetailedMap_Begin(json,
6950 m_Suballocations.size() - (size_t)m_FreeCount,
6954 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6955 suballocItem != m_Suballocations.cend();
6956 ++suballocItem, ++i)
6958 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
6960 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
6964 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
6968 PrintDetailedMap_End(json);
6971 #endif // #if VMA_STATS_STRING_ENABLED 6973 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
6974 uint32_t currentFrameIndex,
6975 uint32_t frameInUseCount,
6976 VkDeviceSize bufferImageGranularity,
6977 VkDeviceSize allocSize,
6978 VkDeviceSize allocAlignment,
6980 VmaSuballocationType allocType,
6981 bool canMakeOtherLost,
6983 VmaAllocationRequest* pAllocationRequest)
6985 VMA_ASSERT(allocSize > 0);
6986 VMA_ASSERT(!upperAddress);
6987 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
6988 VMA_ASSERT(pAllocationRequest != VMA_NULL);
6989 VMA_HEAVY_ASSERT(Validate());
6992 if(canMakeOtherLost ==
false &&
6993 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
6999 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
7000 if(freeSuballocCount > 0)
7005 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
7006 m_FreeSuballocationsBySize.data(),
7007 m_FreeSuballocationsBySize.data() + freeSuballocCount,
7008 allocSize + 2 * VMA_DEBUG_MARGIN,
7009 VmaSuballocationItemSizeLess());
7010 size_t index = it - m_FreeSuballocationsBySize.data();
7011 for(; index < freeSuballocCount; ++index)
7016 bufferImageGranularity,
7020 m_FreeSuballocationsBySize[index],
7022 &pAllocationRequest->offset,
7023 &pAllocationRequest->itemsToMakeLostCount,
7024 &pAllocationRequest->sumFreeSize,
7025 &pAllocationRequest->sumItemSize))
7027 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
7035 for(
size_t index = freeSuballocCount; index--; )
7040 bufferImageGranularity,
7044 m_FreeSuballocationsBySize[index],
7046 &pAllocationRequest->offset,
7047 &pAllocationRequest->itemsToMakeLostCount,
7048 &pAllocationRequest->sumFreeSize,
7049 &pAllocationRequest->sumItemSize))
7051 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
7058 if(canMakeOtherLost)
7062 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
7063 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
7065 VmaAllocationRequest tmpAllocRequest = {};
7066 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
7067 suballocIt != m_Suballocations.end();
7070 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
7071 suballocIt->hAllocation->CanBecomeLost())
7076 bufferImageGranularity,
7082 &tmpAllocRequest.offset,
7083 &tmpAllocRequest.itemsToMakeLostCount,
7084 &tmpAllocRequest.sumFreeSize,
7085 &tmpAllocRequest.sumItemSize))
7087 tmpAllocRequest.item = suballocIt;
7089 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost() ||
7092 *pAllocationRequest = tmpAllocRequest;
7098 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
7107 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
7108 uint32_t currentFrameIndex,
7109 uint32_t frameInUseCount,
7110 VmaAllocationRequest* pAllocationRequest)
7112 while(pAllocationRequest->itemsToMakeLostCount > 0)
7114 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
7116 ++pAllocationRequest->item;
7118 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
7119 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
7120 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
7121 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
7123 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
7124 --pAllocationRequest->itemsToMakeLostCount;
7132 VMA_HEAVY_ASSERT(Validate());
7133 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
7134 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
7139 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
7141 uint32_t lostAllocationCount = 0;
7142 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7143 it != m_Suballocations.end();
7146 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
7147 it->hAllocation->CanBecomeLost() &&
7148 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
7150 it = FreeSuballocation(it);
7151 ++lostAllocationCount;
7154 return lostAllocationCount;
7157 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
7159 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7160 it != m_Suballocations.end();
7163 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
7165 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
7167 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
7168 return VK_ERROR_VALIDATION_FAILED_EXT;
7170 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
7172 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
7173 return VK_ERROR_VALIDATION_FAILED_EXT;
7181 void VmaBlockMetadata_Generic::Alloc(
7182 const VmaAllocationRequest& request,
7183 VmaSuballocationType type,
7184 VkDeviceSize allocSize,
7188 VMA_ASSERT(!upperAddress);
7189 VMA_ASSERT(request.item != m_Suballocations.end());
7190 VmaSuballocation& suballoc = *request.item;
7192 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7194 VMA_ASSERT(request.offset >= suballoc.offset);
7195 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
7196 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
7197 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
7201 UnregisterFreeSuballocation(request.item);
7203 suballoc.offset = request.offset;
7204 suballoc.size = allocSize;
7205 suballoc.type = type;
7206 suballoc.hAllocation = hAllocation;
7211 VmaSuballocation paddingSuballoc = {};
7212 paddingSuballoc.offset = request.offset + allocSize;
7213 paddingSuballoc.size = paddingEnd;
7214 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7215 VmaSuballocationList::iterator next = request.item;
7217 const VmaSuballocationList::iterator paddingEndItem =
7218 m_Suballocations.insert(next, paddingSuballoc);
7219 RegisterFreeSuballocation(paddingEndItem);
7225 VmaSuballocation paddingSuballoc = {};
7226 paddingSuballoc.offset = request.offset - paddingBegin;
7227 paddingSuballoc.size = paddingBegin;
7228 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7229 const VmaSuballocationList::iterator paddingBeginItem =
7230 m_Suballocations.insert(request.item, paddingSuballoc);
7231 RegisterFreeSuballocation(paddingBeginItem);
7235 m_FreeCount = m_FreeCount - 1;
7236 if(paddingBegin > 0)
7244 m_SumFreeSize -= allocSize;
7247 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
7249 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
7250 suballocItem != m_Suballocations.end();
7253 VmaSuballocation& suballoc = *suballocItem;
7254 if(suballoc.hAllocation == allocation)
7256 FreeSuballocation(suballocItem);
7257 VMA_HEAVY_ASSERT(Validate());
7261 VMA_ASSERT(0 &&
"Not found!");
7264 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
7266 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
7267 suballocItem != m_Suballocations.end();
7270 VmaSuballocation& suballoc = *suballocItem;
7271 if(suballoc.offset == offset)
7273 FreeSuballocation(suballocItem);
7277 VMA_ASSERT(0 &&
"Not found!");
7280 bool VmaBlockMetadata_Generic::ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize)
7282 typedef VmaSuballocationList::iterator iter_type;
7283 for(iter_type suballocItem = m_Suballocations.begin();
7284 suballocItem != m_Suballocations.end();
7287 VmaSuballocation& suballoc = *suballocItem;
7288 if(suballoc.hAllocation == alloc)
7290 iter_type nextItem = suballocItem;
7294 VMA_ASSERT(newSize != alloc->GetSize() && newSize > 0);
7297 if(newSize < alloc->GetSize())
7299 const VkDeviceSize sizeDiff = suballoc.size - newSize;
7302 if(nextItem != m_Suballocations.end())
7305 if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7308 UnregisterFreeSuballocation(nextItem);
7309 nextItem->offset -= sizeDiff;
7310 nextItem->size += sizeDiff;
7311 RegisterFreeSuballocation(nextItem);
7317 VmaSuballocation newFreeSuballoc;
7318 newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
7319 newFreeSuballoc.offset = suballoc.offset + newSize;
7320 newFreeSuballoc.size = sizeDiff;
7321 newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7322 iter_type newFreeSuballocIt = m_Suballocations.insert(nextItem, newFreeSuballoc);
7323 RegisterFreeSuballocation(newFreeSuballocIt);
7332 VmaSuballocation newFreeSuballoc;
7333 newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
7334 newFreeSuballoc.offset = suballoc.offset + newSize;
7335 newFreeSuballoc.size = sizeDiff;
7336 newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7337 m_Suballocations.push_back(newFreeSuballoc);
7339 iter_type newFreeSuballocIt = m_Suballocations.end();
7340 RegisterFreeSuballocation(--newFreeSuballocIt);
7345 suballoc.size = newSize;
7346 m_SumFreeSize += sizeDiff;
7351 const VkDeviceSize sizeDiff = newSize - suballoc.size;
7354 if(nextItem != m_Suballocations.end())
7357 if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7360 if(nextItem->size < sizeDiff + VMA_DEBUG_MARGIN)
7366 if(nextItem->size > sizeDiff)
7369 UnregisterFreeSuballocation(nextItem);
7370 nextItem->offset += sizeDiff;
7371 nextItem->size -= sizeDiff;
7372 RegisterFreeSuballocation(nextItem);
7378 UnregisterFreeSuballocation(nextItem);
7379 m_Suballocations.erase(nextItem);
7395 suballoc.size = newSize;
7396 m_SumFreeSize -= sizeDiff;
7403 VMA_ASSERT(0 &&
"Not found!");
7407 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const 7409 VkDeviceSize lastSize = 0;
7410 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
7412 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
7414 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
7415 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
7416 VMA_VALIDATE(it->size >= lastSize);
7417 lastSize = it->size;
7422 bool VmaBlockMetadata_Generic::CheckAllocation(
7423 uint32_t currentFrameIndex,
7424 uint32_t frameInUseCount,
7425 VkDeviceSize bufferImageGranularity,
7426 VkDeviceSize allocSize,
7427 VkDeviceSize allocAlignment,
7428 VmaSuballocationType allocType,
7429 VmaSuballocationList::const_iterator suballocItem,
7430 bool canMakeOtherLost,
7431 VkDeviceSize* pOffset,
7432 size_t* itemsToMakeLostCount,
7433 VkDeviceSize* pSumFreeSize,
7434 VkDeviceSize* pSumItemSize)
const 7436 VMA_ASSERT(allocSize > 0);
7437 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
7438 VMA_ASSERT(suballocItem != m_Suballocations.cend());
7439 VMA_ASSERT(pOffset != VMA_NULL);
7441 *itemsToMakeLostCount = 0;
7445 if(canMakeOtherLost)
7447 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7449 *pSumFreeSize = suballocItem->size;
7453 if(suballocItem->hAllocation->CanBecomeLost() &&
7454 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7456 ++*itemsToMakeLostCount;
7457 *pSumItemSize = suballocItem->size;
7466 if(GetSize() - suballocItem->offset < allocSize)
7472 *pOffset = suballocItem->offset;
7475 if(VMA_DEBUG_MARGIN > 0)
7477 *pOffset += VMA_DEBUG_MARGIN;
7481 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
7485 if(bufferImageGranularity > 1)
7487 bool bufferImageGranularityConflict =
false;
7488 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
7489 while(prevSuballocItem != m_Suballocations.cbegin())
7492 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
7493 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
7495 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
7497 bufferImageGranularityConflict =
true;
7505 if(bufferImageGranularityConflict)
7507 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
7513 if(*pOffset >= suballocItem->offset + suballocItem->size)
7519 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
7522 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
7524 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
7526 if(suballocItem->offset + totalSize > GetSize())
7533 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
7534 if(totalSize > suballocItem->size)
7536 VkDeviceSize remainingSize = totalSize - suballocItem->size;
7537 while(remainingSize > 0)
7540 if(lastSuballocItem == m_Suballocations.cend())
7544 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7546 *pSumFreeSize += lastSuballocItem->size;
7550 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
7551 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
7552 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7554 ++*itemsToMakeLostCount;
7555 *pSumItemSize += lastSuballocItem->size;
7562 remainingSize = (lastSuballocItem->size < remainingSize) ?
7563 remainingSize - lastSuballocItem->size : 0;
7569 if(bufferImageGranularity > 1)
7571 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
7573 while(nextSuballocItem != m_Suballocations.cend())
7575 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
7576 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
7578 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
7580 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
7581 if(nextSuballoc.hAllocation->CanBecomeLost() &&
7582 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7584 ++*itemsToMakeLostCount;
7603 const VmaSuballocation& suballoc = *suballocItem;
7604 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7606 *pSumFreeSize = suballoc.size;
7609 if(suballoc.size < allocSize)
7615 *pOffset = suballoc.offset;
7618 if(VMA_DEBUG_MARGIN > 0)
7620 *pOffset += VMA_DEBUG_MARGIN;
7624 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
7628 if(bufferImageGranularity > 1)
7630 bool bufferImageGranularityConflict =
false;
7631 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
7632 while(prevSuballocItem != m_Suballocations.cbegin())
7635 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
7636 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
7638 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
7640 bufferImageGranularityConflict =
true;
7648 if(bufferImageGranularityConflict)
7650 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
7655 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
7658 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
7661 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
7668 if(bufferImageGranularity > 1)
7670 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
7672 while(nextSuballocItem != m_Suballocations.cend())
7674 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
7675 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
7677 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
7696 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
7698 VMA_ASSERT(item != m_Suballocations.end());
7699 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7701 VmaSuballocationList::iterator nextItem = item;
7703 VMA_ASSERT(nextItem != m_Suballocations.end());
7704 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
7706 item->size += nextItem->size;
7708 m_Suballocations.erase(nextItem);
7711 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
7714 VmaSuballocation& suballoc = *suballocItem;
7715 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7716 suballoc.hAllocation = VK_NULL_HANDLE;
7720 m_SumFreeSize += suballoc.size;
7723 bool mergeWithNext =
false;
7724 bool mergeWithPrev =
false;
7726 VmaSuballocationList::iterator nextItem = suballocItem;
7728 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
7730 mergeWithNext =
true;
7733 VmaSuballocationList::iterator prevItem = suballocItem;
7734 if(suballocItem != m_Suballocations.begin())
7737 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7739 mergeWithPrev =
true;
7745 UnregisterFreeSuballocation(nextItem);
7746 MergeFreeWithNext(suballocItem);
7751 UnregisterFreeSuballocation(prevItem);
7752 MergeFreeWithNext(prevItem);
7753 RegisterFreeSuballocation(prevItem);
7758 RegisterFreeSuballocation(suballocItem);
7759 return suballocItem;
7763 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
7765 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7766 VMA_ASSERT(item->size > 0);
7770 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
7772 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7774 if(m_FreeSuballocationsBySize.empty())
7776 m_FreeSuballocationsBySize.push_back(item);
7780 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
7788 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
7790 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7791 VMA_ASSERT(item->size > 0);
7795 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
7797 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7799 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
7800 m_FreeSuballocationsBySize.data(),
7801 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
7803 VmaSuballocationItemSizeLess());
7804 for(
size_t index = it - m_FreeSuballocationsBySize.data();
7805 index < m_FreeSuballocationsBySize.size();
7808 if(m_FreeSuballocationsBySize[index] == item)
7810 VmaVectorRemove(m_FreeSuballocationsBySize, index);
7813 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
7815 VMA_ASSERT(0 &&
"Not found.");
7824 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
7825 VmaBlockMetadata(hAllocator),
7827 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7828 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7829 m_1stVectorIndex(0),
7830 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
7831 m_1stNullItemsBeginCount(0),
7832 m_1stNullItemsMiddleCount(0),
7833 m_2ndNullItemsCount(0)
7837 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
7841 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
7843 VmaBlockMetadata::Init(size);
7844 m_SumFreeSize = size;
7847 bool VmaBlockMetadata_Linear::Validate()
const 7849 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7850 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7852 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
7853 VMA_VALIDATE(!suballocations1st.empty() ||
7854 suballocations2nd.empty() ||
7855 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
7857 if(!suballocations1st.empty())
7860 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
7862 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
7864 if(!suballocations2nd.empty())
7867 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
7870 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
7871 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
7873 VkDeviceSize sumUsedSize = 0;
7874 const size_t suballoc1stCount = suballocations1st.size();
7875 VkDeviceSize offset = VMA_DEBUG_MARGIN;
7877 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
7879 const size_t suballoc2ndCount = suballocations2nd.size();
7880 size_t nullItem2ndCount = 0;
7881 for(
size_t i = 0; i < suballoc2ndCount; ++i)
7883 const VmaSuballocation& suballoc = suballocations2nd[i];
7884 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7886 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
7887 VMA_VALIDATE(suballoc.offset >= offset);
7891 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
7892 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
7893 sumUsedSize += suballoc.size;
7900 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7903 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
7906 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
7908 const VmaSuballocation& suballoc = suballocations1st[i];
7909 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
7910 suballoc.hAllocation == VK_NULL_HANDLE);
7913 size_t nullItem1stCount = m_1stNullItemsBeginCount;
7915 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
7917 const VmaSuballocation& suballoc = suballocations1st[i];
7918 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7920 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
7921 VMA_VALIDATE(suballoc.offset >= offset);
7922 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
7926 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
7927 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
7928 sumUsedSize += suballoc.size;
7935 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7937 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
7939 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
7941 const size_t suballoc2ndCount = suballocations2nd.size();
7942 size_t nullItem2ndCount = 0;
7943 for(
size_t i = suballoc2ndCount; i--; )
7945 const VmaSuballocation& suballoc = suballocations2nd[i];
7946 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7948 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
7949 VMA_VALIDATE(suballoc.offset >= offset);
7953 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
7954 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
7955 sumUsedSize += suballoc.size;
7962 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7965 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
7968 VMA_VALIDATE(offset <= GetSize());
7969 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
7974 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const 7976 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
7977 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
7980 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const 7982 const VkDeviceSize size = GetSize();
7994 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7996 switch(m_2ndVectorMode)
7998 case SECOND_VECTOR_EMPTY:
8004 const size_t suballocations1stCount = suballocations1st.size();
8005 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
8006 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
8007 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
8009 firstSuballoc.offset,
8010 size - (lastSuballoc.offset + lastSuballoc.size));
8014 case SECOND_VECTOR_RING_BUFFER:
8019 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8020 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
8021 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
8022 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
8026 case SECOND_VECTOR_DOUBLE_STACK:
8031 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8032 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
8033 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
8034 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
8044 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 8046 const VkDeviceSize size = GetSize();
8047 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8048 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8049 const size_t suballoc1stCount = suballocations1st.size();
8050 const size_t suballoc2ndCount = suballocations2nd.size();
8061 VkDeviceSize lastOffset = 0;
8063 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8065 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8066 size_t nextAlloc2ndIndex = 0;
8067 while(lastOffset < freeSpace2ndTo1stEnd)
8070 while(nextAlloc2ndIndex < suballoc2ndCount &&
8071 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8073 ++nextAlloc2ndIndex;
8077 if(nextAlloc2ndIndex < suballoc2ndCount)
8079 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8082 if(lastOffset < suballoc.offset)
8085 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8099 lastOffset = suballoc.offset + suballoc.size;
8100 ++nextAlloc2ndIndex;
8106 if(lastOffset < freeSpace2ndTo1stEnd)
8108 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
8116 lastOffset = freeSpace2ndTo1stEnd;
8121 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
8122 const VkDeviceSize freeSpace1stTo2ndEnd =
8123 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
8124 while(lastOffset < freeSpace1stTo2ndEnd)
8127 while(nextAlloc1stIndex < suballoc1stCount &&
8128 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8130 ++nextAlloc1stIndex;
8134 if(nextAlloc1stIndex < suballoc1stCount)
8136 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8139 if(lastOffset < suballoc.offset)
8142 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8156 lastOffset = suballoc.offset + suballoc.size;
8157 ++nextAlloc1stIndex;
8163 if(lastOffset < freeSpace1stTo2ndEnd)
8165 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
8173 lastOffset = freeSpace1stTo2ndEnd;
8177 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8179 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8180 while(lastOffset < size)
8183 while(nextAlloc2ndIndex != SIZE_MAX &&
8184 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8186 --nextAlloc2ndIndex;
8190 if(nextAlloc2ndIndex != SIZE_MAX)
8192 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8195 if(lastOffset < suballoc.offset)
8198 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8212 lastOffset = suballoc.offset + suballoc.size;
8213 --nextAlloc2ndIndex;
8219 if(lastOffset < size)
8221 const VkDeviceSize unusedRangeSize = size - lastOffset;
8237 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const 8239 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8240 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8241 const VkDeviceSize size = GetSize();
8242 const size_t suballoc1stCount = suballocations1st.size();
8243 const size_t suballoc2ndCount = suballocations2nd.size();
8245 inoutStats.
size += size;
8247 VkDeviceSize lastOffset = 0;
8249 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8251 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8252 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
8253 while(lastOffset < freeSpace2ndTo1stEnd)
8256 while(nextAlloc2ndIndex < suballoc2ndCount &&
8257 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8259 ++nextAlloc2ndIndex;
8263 if(nextAlloc2ndIndex < suballoc2ndCount)
8265 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8268 if(lastOffset < suballoc.offset)
8271 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8282 lastOffset = suballoc.offset + suballoc.size;
8283 ++nextAlloc2ndIndex;
8288 if(lastOffset < freeSpace2ndTo1stEnd)
8291 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
8298 lastOffset = freeSpace2ndTo1stEnd;
8303 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
8304 const VkDeviceSize freeSpace1stTo2ndEnd =
8305 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
8306 while(lastOffset < freeSpace1stTo2ndEnd)
8309 while(nextAlloc1stIndex < suballoc1stCount &&
8310 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8312 ++nextAlloc1stIndex;
8316 if(nextAlloc1stIndex < suballoc1stCount)
8318 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8321 if(lastOffset < suballoc.offset)
8324 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8335 lastOffset = suballoc.offset + suballoc.size;
8336 ++nextAlloc1stIndex;
8341 if(lastOffset < freeSpace1stTo2ndEnd)
8344 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
8351 lastOffset = freeSpace1stTo2ndEnd;
8355 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8357 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8358 while(lastOffset < size)
8361 while(nextAlloc2ndIndex != SIZE_MAX &&
8362 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8364 --nextAlloc2ndIndex;
8368 if(nextAlloc2ndIndex != SIZE_MAX)
8370 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8373 if(lastOffset < suballoc.offset)
8376 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8387 lastOffset = suballoc.offset + suballoc.size;
8388 --nextAlloc2ndIndex;
8393 if(lastOffset < size)
8396 const VkDeviceSize unusedRangeSize = size - lastOffset;
8409 #if VMA_STATS_STRING_ENABLED 8410 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const 8412 const VkDeviceSize size = GetSize();
8413 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8414 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8415 const size_t suballoc1stCount = suballocations1st.size();
8416 const size_t suballoc2ndCount = suballocations2nd.size();
8420 size_t unusedRangeCount = 0;
8421 VkDeviceSize usedBytes = 0;
8423 VkDeviceSize lastOffset = 0;
8425 size_t alloc2ndCount = 0;
8426 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8428 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8429 size_t nextAlloc2ndIndex = 0;
8430 while(lastOffset < freeSpace2ndTo1stEnd)
8433 while(nextAlloc2ndIndex < suballoc2ndCount &&
8434 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8436 ++nextAlloc2ndIndex;
8440 if(nextAlloc2ndIndex < suballoc2ndCount)
8442 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8445 if(lastOffset < suballoc.offset)
8454 usedBytes += suballoc.size;
8457 lastOffset = suballoc.offset + suballoc.size;
8458 ++nextAlloc2ndIndex;
8463 if(lastOffset < freeSpace2ndTo1stEnd)
8470 lastOffset = freeSpace2ndTo1stEnd;
8475 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
8476 size_t alloc1stCount = 0;
8477 const VkDeviceSize freeSpace1stTo2ndEnd =
8478 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
8479 while(lastOffset < freeSpace1stTo2ndEnd)
8482 while(nextAlloc1stIndex < suballoc1stCount &&
8483 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8485 ++nextAlloc1stIndex;
8489 if(nextAlloc1stIndex < suballoc1stCount)
8491 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8494 if(lastOffset < suballoc.offset)
8503 usedBytes += suballoc.size;
8506 lastOffset = suballoc.offset + suballoc.size;
8507 ++nextAlloc1stIndex;
8512 if(lastOffset < size)
8519 lastOffset = freeSpace1stTo2ndEnd;
8523 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8525 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8526 while(lastOffset < size)
8529 while(nextAlloc2ndIndex != SIZE_MAX &&
8530 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8532 --nextAlloc2ndIndex;
8536 if(nextAlloc2ndIndex != SIZE_MAX)
8538 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8541 if(lastOffset < suballoc.offset)
8550 usedBytes += suballoc.size;
8553 lastOffset = suballoc.offset + suballoc.size;
8554 --nextAlloc2ndIndex;
8559 if(lastOffset < size)
8571 const VkDeviceSize unusedBytes = size - usedBytes;
8572 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
8577 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8579 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8580 size_t nextAlloc2ndIndex = 0;
8581 while(lastOffset < freeSpace2ndTo1stEnd)
8584 while(nextAlloc2ndIndex < suballoc2ndCount &&
8585 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8587 ++nextAlloc2ndIndex;
8591 if(nextAlloc2ndIndex < suballoc2ndCount)
8593 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8596 if(lastOffset < suballoc.offset)
8599 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8600 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8605 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8608 lastOffset = suballoc.offset + suballoc.size;
8609 ++nextAlloc2ndIndex;
8614 if(lastOffset < freeSpace2ndTo1stEnd)
8617 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
8618 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8622 lastOffset = freeSpace2ndTo1stEnd;
8627 nextAlloc1stIndex = m_1stNullItemsBeginCount;
8628 while(lastOffset < freeSpace1stTo2ndEnd)
8631 while(nextAlloc1stIndex < suballoc1stCount &&
8632 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8634 ++nextAlloc1stIndex;
8638 if(nextAlloc1stIndex < suballoc1stCount)
8640 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8643 if(lastOffset < suballoc.offset)
8646 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8647 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8652 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8655 lastOffset = suballoc.offset + suballoc.size;
8656 ++nextAlloc1stIndex;
8661 if(lastOffset < freeSpace1stTo2ndEnd)
8664 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
8665 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8669 lastOffset = freeSpace1stTo2ndEnd;
8673 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8675 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8676 while(lastOffset < size)
8679 while(nextAlloc2ndIndex != SIZE_MAX &&
8680 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8682 --nextAlloc2ndIndex;
8686 if(nextAlloc2ndIndex != SIZE_MAX)
8688 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8691 if(lastOffset < suballoc.offset)
8694 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8695 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8700 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8703 lastOffset = suballoc.offset + suballoc.size;
8704 --nextAlloc2ndIndex;
8709 if(lastOffset < size)
8712 const VkDeviceSize unusedRangeSize = size - lastOffset;
8713 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8722 PrintDetailedMap_End(json);
8724 #endif // #if VMA_STATS_STRING_ENABLED 8726 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
8727 uint32_t currentFrameIndex,
8728 uint32_t frameInUseCount,
8729 VkDeviceSize bufferImageGranularity,
8730 VkDeviceSize allocSize,
8731 VkDeviceSize allocAlignment,
8733 VmaSuballocationType allocType,
8734 bool canMakeOtherLost,
8736 VmaAllocationRequest* pAllocationRequest)
8738 VMA_ASSERT(allocSize > 0);
8739 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8740 VMA_ASSERT(pAllocationRequest != VMA_NULL);
8741 VMA_HEAVY_ASSERT(Validate());
8743 const VkDeviceSize size = GetSize();
8744 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8745 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8749 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8751 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
8756 if(allocSize > size)
8760 VkDeviceSize resultBaseOffset = size - allocSize;
8761 if(!suballocations2nd.empty())
8763 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
8764 resultBaseOffset = lastSuballoc.offset - allocSize;
8765 if(allocSize > lastSuballoc.offset)
8772 VkDeviceSize resultOffset = resultBaseOffset;
8775 if(VMA_DEBUG_MARGIN > 0)
8777 if(resultOffset < VMA_DEBUG_MARGIN)
8781 resultOffset -= VMA_DEBUG_MARGIN;
8785 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
8789 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
8791 bool bufferImageGranularityConflict =
false;
8792 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
8794 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
8795 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8797 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
8799 bufferImageGranularityConflict =
true;
8807 if(bufferImageGranularityConflict)
8809 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
8814 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
8815 suballocations1st.back().offset + suballocations1st.back().size :
8817 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
8821 if(bufferImageGranularity > 1)
8823 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
8825 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
8826 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8828 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
8842 pAllocationRequest->offset = resultOffset;
8843 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
8844 pAllocationRequest->sumItemSize = 0;
8846 pAllocationRequest->itemsToMakeLostCount = 0;
8852 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8856 VkDeviceSize resultBaseOffset = 0;
8857 if(!suballocations1st.empty())
8859 const VmaSuballocation& lastSuballoc = suballocations1st.back();
8860 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
8864 VkDeviceSize resultOffset = resultBaseOffset;
8867 if(VMA_DEBUG_MARGIN > 0)
8869 resultOffset += VMA_DEBUG_MARGIN;
8873 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
8877 if(bufferImageGranularity > 1 && !suballocations1st.empty())
8879 bool bufferImageGranularityConflict =
false;
8880 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
8882 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
8883 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8885 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8887 bufferImageGranularityConflict =
true;
8895 if(bufferImageGranularityConflict)
8897 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
8901 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
8902 suballocations2nd.back().offset : size;
8905 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
8909 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8911 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
8913 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
8914 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8916 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8930 pAllocationRequest->offset = resultOffset;
8931 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
8932 pAllocationRequest->sumItemSize = 0;
8934 pAllocationRequest->itemsToMakeLostCount = 0;
8941 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8943 VMA_ASSERT(!suballocations1st.empty());
8945 VkDeviceSize resultBaseOffset = 0;
8946 if(!suballocations2nd.empty())
8948 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
8949 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
8953 VkDeviceSize resultOffset = resultBaseOffset;
8956 if(VMA_DEBUG_MARGIN > 0)
8958 resultOffset += VMA_DEBUG_MARGIN;
8962 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
8966 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
8968 bool bufferImageGranularityConflict =
false;
8969 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
8971 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
8972 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8974 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8976 bufferImageGranularityConflict =
true;
8984 if(bufferImageGranularityConflict)
8986 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
8990 pAllocationRequest->itemsToMakeLostCount = 0;
8991 pAllocationRequest->sumItemSize = 0;
8992 size_t index1st = m_1stNullItemsBeginCount;
8994 if(canMakeOtherLost)
8996 while(index1st < suballocations1st.size() &&
8997 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
9000 const VmaSuballocation& suballoc = suballocations1st[index1st];
9001 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
9007 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
9008 if(suballoc.hAllocation->CanBecomeLost() &&
9009 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9011 ++pAllocationRequest->itemsToMakeLostCount;
9012 pAllocationRequest->sumItemSize += suballoc.size;
9024 if(bufferImageGranularity > 1)
9026 while(index1st < suballocations1st.size())
9028 const VmaSuballocation& suballoc = suballocations1st[index1st];
9029 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
9031 if(suballoc.hAllocation != VK_NULL_HANDLE)
9034 if(suballoc.hAllocation->CanBecomeLost() &&
9035 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9037 ++pAllocationRequest->itemsToMakeLostCount;
9038 pAllocationRequest->sumItemSize += suballoc.size;
9057 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN < size) ||
9058 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
9062 if(bufferImageGranularity > 1)
9064 for(
size_t nextSuballocIndex = index1st;
9065 nextSuballocIndex < suballocations1st.size();
9066 nextSuballocIndex++)
9068 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
9069 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9071 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9085 pAllocationRequest->offset = resultOffset;
9086 pAllocationRequest->sumFreeSize =
9087 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
9089 - pAllocationRequest->sumItemSize;
9099 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
9100 uint32_t currentFrameIndex,
9101 uint32_t frameInUseCount,
9102 VmaAllocationRequest* pAllocationRequest)
9104 if(pAllocationRequest->itemsToMakeLostCount == 0)
9109 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
9111 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9112 size_t index1st = m_1stNullItemsBeginCount;
9113 size_t madeLostCount = 0;
9114 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
9116 VMA_ASSERT(index1st < suballocations1st.size());
9117 VmaSuballocation& suballoc = suballocations1st[index1st];
9118 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9120 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
9121 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
9122 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9124 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9125 suballoc.hAllocation = VK_NULL_HANDLE;
9126 m_SumFreeSize += suballoc.size;
9127 ++m_1stNullItemsMiddleCount;
9144 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
9146 uint32_t lostAllocationCount = 0;
9148 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9149 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
9151 VmaSuballocation& suballoc = suballocations1st[i];
9152 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
9153 suballoc.hAllocation->CanBecomeLost() &&
9154 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9156 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9157 suballoc.hAllocation = VK_NULL_HANDLE;
9158 ++m_1stNullItemsMiddleCount;
9159 m_SumFreeSize += suballoc.size;
9160 ++lostAllocationCount;
9164 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9165 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
9167 VmaSuballocation& suballoc = suballocations2nd[i];
9168 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
9169 suballoc.hAllocation->CanBecomeLost() &&
9170 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9172 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9173 suballoc.hAllocation = VK_NULL_HANDLE;
9174 ++m_2ndNullItemsCount;
9175 ++lostAllocationCount;
9179 if(lostAllocationCount)
9184 return lostAllocationCount;
9187 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
9189 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9190 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
9192 const VmaSuballocation& suballoc = suballocations1st[i];
9193 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9195 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
9197 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
9198 return VK_ERROR_VALIDATION_FAILED_EXT;
9200 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
9202 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
9203 return VK_ERROR_VALIDATION_FAILED_EXT;
9208 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9209 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
9211 const VmaSuballocation& suballoc = suballocations2nd[i];
9212 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9214 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
9216 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
9217 return VK_ERROR_VALIDATION_FAILED_EXT;
9219 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
9221 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
9222 return VK_ERROR_VALIDATION_FAILED_EXT;
9230 void VmaBlockMetadata_Linear::Alloc(
9231 const VmaAllocationRequest& request,
9232 VmaSuballocationType type,
9233 VkDeviceSize allocSize,
9237 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
9241 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
9242 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
9243 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9244 suballocations2nd.push_back(newSuballoc);
9245 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
9249 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9252 if(suballocations1st.empty())
9254 suballocations1st.push_back(newSuballoc);
9259 if(request.offset >= suballocations1st.back().offset + suballocations1st.back().size)
9262 VMA_ASSERT(request.offset + allocSize <= GetSize());
9263 suballocations1st.push_back(newSuballoc);
9266 else if(request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset)
9268 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9270 switch(m_2ndVectorMode)
9272 case SECOND_VECTOR_EMPTY:
9274 VMA_ASSERT(suballocations2nd.empty());
9275 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
9277 case SECOND_VECTOR_RING_BUFFER:
9279 VMA_ASSERT(!suballocations2nd.empty());
9281 case SECOND_VECTOR_DOUBLE_STACK:
9282 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
9288 suballocations2nd.push_back(newSuballoc);
9292 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
9297 m_SumFreeSize -= newSuballoc.size;
9300 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
9302 FreeAtOffset(allocation->GetOffset());
9305 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
9307 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9308 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9310 if(!suballocations1st.empty())
9313 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
9314 if(firstSuballoc.offset == offset)
9316 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9317 firstSuballoc.hAllocation = VK_NULL_HANDLE;
9318 m_SumFreeSize += firstSuballoc.size;
9319 ++m_1stNullItemsBeginCount;
9326 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
9327 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9329 VmaSuballocation& lastSuballoc = suballocations2nd.back();
9330 if(lastSuballoc.offset == offset)
9332 m_SumFreeSize += lastSuballoc.size;
9333 suballocations2nd.pop_back();
9339 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
9341 VmaSuballocation& lastSuballoc = suballocations1st.back();
9342 if(lastSuballoc.offset == offset)
9344 m_SumFreeSize += lastSuballoc.size;
9345 suballocations1st.pop_back();
9353 VmaSuballocation refSuballoc;
9354 refSuballoc.offset = offset;
9356 SuballocationVectorType::iterator it = VmaVectorFindSorted<VmaSuballocationOffsetLess>(
9357 suballocations1st.begin() + m_1stNullItemsBeginCount,
9358 suballocations1st.end(),
9360 if(it != suballocations1st.end())
9362 it->type = VMA_SUBALLOCATION_TYPE_FREE;
9363 it->hAllocation = VK_NULL_HANDLE;
9364 ++m_1stNullItemsMiddleCount;
9365 m_SumFreeSize += it->size;
9371 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
9374 VmaSuballocation refSuballoc;
9375 refSuballoc.offset = offset;
9377 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
9378 VmaVectorFindSorted<VmaSuballocationOffsetLess>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc) :
9379 VmaVectorFindSorted<VmaSuballocationOffsetGreater>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc);
9380 if(it != suballocations2nd.end())
9382 it->type = VMA_SUBALLOCATION_TYPE_FREE;
9383 it->hAllocation = VK_NULL_HANDLE;
9384 ++m_2ndNullItemsCount;
9385 m_SumFreeSize += it->size;
9391 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
9394 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const 9396 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
9397 const size_t suballocCount = AccessSuballocations1st().size();
9398 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
9401 void VmaBlockMetadata_Linear::CleanupAfterFree()
9403 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9404 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9408 suballocations1st.clear();
9409 suballocations2nd.clear();
9410 m_1stNullItemsBeginCount = 0;
9411 m_1stNullItemsMiddleCount = 0;
9412 m_2ndNullItemsCount = 0;
9413 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9417 const size_t suballoc1stCount = suballocations1st.size();
9418 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
9419 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
9422 while(m_1stNullItemsBeginCount < suballoc1stCount &&
9423 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
9425 ++m_1stNullItemsBeginCount;
9426 --m_1stNullItemsMiddleCount;
9430 while(m_1stNullItemsMiddleCount > 0 &&
9431 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
9433 --m_1stNullItemsMiddleCount;
9434 suballocations1st.pop_back();
9438 while(m_2ndNullItemsCount > 0 &&
9439 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
9441 --m_2ndNullItemsCount;
9442 suballocations2nd.pop_back();
9445 if(ShouldCompact1st())
9447 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
9448 size_t srcIndex = m_1stNullItemsBeginCount;
9449 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
9451 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
9455 if(dstIndex != srcIndex)
9457 suballocations1st[dstIndex] = suballocations1st[srcIndex];
9461 suballocations1st.resize(nonNullItemCount);
9462 m_1stNullItemsBeginCount = 0;
9463 m_1stNullItemsMiddleCount = 0;
9467 if(suballocations2nd.empty())
9469 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9473 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
9475 suballocations1st.clear();
9476 m_1stNullItemsBeginCount = 0;
9478 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9481 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9482 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
9483 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
9484 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
9486 ++m_1stNullItemsBeginCount;
9487 --m_1stNullItemsMiddleCount;
9489 m_2ndNullItemsCount = 0;
9490 m_1stVectorIndex ^= 1;
9495 VMA_HEAVY_ASSERT(Validate());
9502 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
9503 VmaBlockMetadata(hAllocator),
9505 m_AllocationCount(0),
9509 memset(m_FreeList, 0,
sizeof(m_FreeList));
9512 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
9517 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
9519 VmaBlockMetadata::Init(size);
9521 m_UsableSize = VmaPrevPow2(size);
9522 m_SumFreeSize = m_UsableSize;
9526 while(m_LevelCount < MAX_LEVELS &&
9527 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
9532 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
9533 rootNode->offset = 0;
9534 rootNode->type = Node::TYPE_FREE;
9535 rootNode->parent = VMA_NULL;
9536 rootNode->buddy = VMA_NULL;
9539 AddToFreeListFront(0, rootNode);
9542 bool VmaBlockMetadata_Buddy::Validate()
const 9545 ValidationContext ctx;
9546 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
9548 VMA_VALIDATE(
false &&
"ValidateNode failed.");
9550 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
9551 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
9554 for(uint32_t level = 0; level < m_LevelCount; ++level)
9556 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
9557 m_FreeList[level].front->free.prev == VMA_NULL);
9559 for(Node* node = m_FreeList[level].front;
9561 node = node->free.next)
9563 VMA_VALIDATE(node->type == Node::TYPE_FREE);
9565 if(node->free.next == VMA_NULL)
9567 VMA_VALIDATE(m_FreeList[level].back == node);
9571 VMA_VALIDATE(node->free.next->free.prev == node);
9577 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
9579 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
9585 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const 9587 for(uint32_t level = 0; level < m_LevelCount; ++level)
9589 if(m_FreeList[level].front != VMA_NULL)
9591 return LevelToNodeSize(level);
9597 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 9599 const VkDeviceSize unusableSize = GetUnusableSize();
9610 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
9612 if(unusableSize > 0)
9621 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const 9623 const VkDeviceSize unusableSize = GetUnusableSize();
9625 inoutStats.
size += GetSize();
9626 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
9631 if(unusableSize > 0)
9638 #if VMA_STATS_STRING_ENABLED 9640 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const 9644 CalcAllocationStatInfo(stat);
9646 PrintDetailedMap_Begin(
9652 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
9654 const VkDeviceSize unusableSize = GetUnusableSize();
9655 if(unusableSize > 0)
9657 PrintDetailedMap_UnusedRange(json,
9662 PrintDetailedMap_End(json);
9665 #endif // #if VMA_STATS_STRING_ENABLED 9667 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
9668 uint32_t currentFrameIndex,
9669 uint32_t frameInUseCount,
9670 VkDeviceSize bufferImageGranularity,
9671 VkDeviceSize allocSize,
9672 VkDeviceSize allocAlignment,
9674 VmaSuballocationType allocType,
9675 bool canMakeOtherLost,
9677 VmaAllocationRequest* pAllocationRequest)
9679 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
9683 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
9684 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
9685 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
9687 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
9688 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
9691 if(allocSize > m_UsableSize)
9696 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
9697 for(uint32_t level = targetLevel + 1; level--; )
9699 for(Node* freeNode = m_FreeList[level].front;
9700 freeNode != VMA_NULL;
9701 freeNode = freeNode->free.next)
9703 if(freeNode->offset % allocAlignment == 0)
9705 pAllocationRequest->offset = freeNode->offset;
9706 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
9707 pAllocationRequest->sumItemSize = 0;
9708 pAllocationRequest->itemsToMakeLostCount = 0;
9709 pAllocationRequest->customData = (
void*)(uintptr_t)level;
9718 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
9719 uint32_t currentFrameIndex,
9720 uint32_t frameInUseCount,
9721 VmaAllocationRequest* pAllocationRequest)
9727 return pAllocationRequest->itemsToMakeLostCount == 0;
9730 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
9739 void VmaBlockMetadata_Buddy::Alloc(
9740 const VmaAllocationRequest& request,
9741 VmaSuballocationType type,
9742 VkDeviceSize allocSize,
9746 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
9747 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
9749 Node* currNode = m_FreeList[currLevel].front;
9750 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
9751 while(currNode->offset != request.offset)
9753 currNode = currNode->free.next;
9754 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
9758 while(currLevel < targetLevel)
9762 RemoveFromFreeList(currLevel, currNode);
9764 const uint32_t childrenLevel = currLevel + 1;
9767 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
9768 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
9770 leftChild->offset = currNode->offset;
9771 leftChild->type = Node::TYPE_FREE;
9772 leftChild->parent = currNode;
9773 leftChild->buddy = rightChild;
9775 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
9776 rightChild->type = Node::TYPE_FREE;
9777 rightChild->parent = currNode;
9778 rightChild->buddy = leftChild;
9781 currNode->type = Node::TYPE_SPLIT;
9782 currNode->split.leftChild = leftChild;
9785 AddToFreeListFront(childrenLevel, rightChild);
9786 AddToFreeListFront(childrenLevel, leftChild);
9791 currNode = m_FreeList[currLevel].front;
9800 VMA_ASSERT(currLevel == targetLevel &&
9801 currNode != VMA_NULL &&
9802 currNode->type == Node::TYPE_FREE);
9803 RemoveFromFreeList(currLevel, currNode);
9806 currNode->type = Node::TYPE_ALLOCATION;
9807 currNode->allocation.alloc = hAllocation;
9809 ++m_AllocationCount;
9811 m_SumFreeSize -= allocSize;
9814 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
9816 if(node->type == Node::TYPE_SPLIT)
9818 DeleteNode(node->split.leftChild->buddy);
9819 DeleteNode(node->split.leftChild);
9822 vma_delete(GetAllocationCallbacks(), node);
9825 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const 9827 VMA_VALIDATE(level < m_LevelCount);
9828 VMA_VALIDATE(curr->parent == parent);
9829 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
9830 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
9833 case Node::TYPE_FREE:
9835 ctx.calculatedSumFreeSize += levelNodeSize;
9836 ++ctx.calculatedFreeCount;
9838 case Node::TYPE_ALLOCATION:
9839 ++ctx.calculatedAllocationCount;
9840 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
9841 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
9843 case Node::TYPE_SPLIT:
9845 const uint32_t childrenLevel = level + 1;
9846 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
9847 const Node*
const leftChild = curr->split.leftChild;
9848 VMA_VALIDATE(leftChild != VMA_NULL);
9849 VMA_VALIDATE(leftChild->offset == curr->offset);
9850 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
9852 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
9854 const Node*
const rightChild = leftChild->buddy;
9855 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
9856 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
9858 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
9869 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const 9873 VkDeviceSize currLevelNodeSize = m_UsableSize;
9874 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
9875 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
9878 currLevelNodeSize = nextLevelNodeSize;
9879 nextLevelNodeSize = currLevelNodeSize >> 1;
9884 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
9887 Node* node = m_Root;
9888 VkDeviceSize nodeOffset = 0;
9890 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
9891 while(node->type == Node::TYPE_SPLIT)
9893 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
9894 if(offset < nodeOffset + nextLevelSize)
9896 node = node->split.leftChild;
9900 node = node->split.leftChild->buddy;
9901 nodeOffset += nextLevelSize;
9904 levelNodeSize = nextLevelSize;
9907 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
9908 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
9911 --m_AllocationCount;
9912 m_SumFreeSize += alloc->GetSize();
9914 node->type = Node::TYPE_FREE;
9917 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
9919 RemoveFromFreeList(level, node->buddy);
9920 Node*
const parent = node->parent;
9922 vma_delete(GetAllocationCallbacks(), node->buddy);
9923 vma_delete(GetAllocationCallbacks(), node);
9924 parent->type = Node::TYPE_FREE;
9932 AddToFreeListFront(level, node);
9935 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const 9939 case Node::TYPE_FREE:
9945 case Node::TYPE_ALLOCATION:
9947 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
9953 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
9954 if(unusedRangeSize > 0)
9963 case Node::TYPE_SPLIT:
9965 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
9966 const Node*
const leftChild = node->split.leftChild;
9967 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
9968 const Node*
const rightChild = leftChild->buddy;
9969 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
9977 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
9979 VMA_ASSERT(node->type == Node::TYPE_FREE);
9982 Node*
const frontNode = m_FreeList[level].front;
9983 if(frontNode == VMA_NULL)
9985 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
9986 node->free.prev = node->free.next = VMA_NULL;
9987 m_FreeList[level].front = m_FreeList[level].back = node;
9991 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
9992 node->free.prev = VMA_NULL;
9993 node->free.next = frontNode;
9994 frontNode->free.prev = node;
9995 m_FreeList[level].front = node;
9999 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
10001 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
10004 if(node->free.prev == VMA_NULL)
10006 VMA_ASSERT(m_FreeList[level].front == node);
10007 m_FreeList[level].front = node->free.next;
10011 Node*
const prevFreeNode = node->free.prev;
10012 VMA_ASSERT(prevFreeNode->free.next == node);
10013 prevFreeNode->free.next = node->free.next;
10017 if(node->free.next == VMA_NULL)
10019 VMA_ASSERT(m_FreeList[level].back == node);
10020 m_FreeList[level].back = node->free.prev;
10024 Node*
const nextFreeNode = node->free.next;
10025 VMA_ASSERT(nextFreeNode->free.prev == node);
10026 nextFreeNode->free.prev = node->free.prev;
10030 #if VMA_STATS_STRING_ENABLED 10031 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const 10035 case Node::TYPE_FREE:
10036 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
10038 case Node::TYPE_ALLOCATION:
10040 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
10041 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
10042 if(allocSize < levelNodeSize)
10044 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
10048 case Node::TYPE_SPLIT:
10050 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
10051 const Node*
const leftChild = node->split.leftChild;
10052 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
10053 const Node*
const rightChild = leftChild->buddy;
10054 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
10061 #endif // #if VMA_STATS_STRING_ENABLED 10067 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
10068 m_pMetadata(VMA_NULL),
10069 m_MemoryTypeIndex(UINT32_MAX),
10071 m_hMemory(VK_NULL_HANDLE),
10073 m_pMappedData(VMA_NULL)
10077 void VmaDeviceMemoryBlock::Init(
10079 uint32_t newMemoryTypeIndex,
10080 VkDeviceMemory newMemory,
10081 VkDeviceSize newSize,
10083 uint32_t algorithm)
10085 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
10087 m_MemoryTypeIndex = newMemoryTypeIndex;
10089 m_hMemory = newMemory;
10094 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
10097 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
10103 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
10105 m_pMetadata->Init(newSize);
10108 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
10112 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
10114 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
10115 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
10116 m_hMemory = VK_NULL_HANDLE;
10118 vma_delete(allocator, m_pMetadata);
10119 m_pMetadata = VMA_NULL;
10122 bool VmaDeviceMemoryBlock::Validate()
const 10124 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
10125 (m_pMetadata->GetSize() != 0));
10127 return m_pMetadata->Validate();
10130 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
10132 void* pData =
nullptr;
10133 VkResult res = Map(hAllocator, 1, &pData);
10134 if(res != VK_SUCCESS)
10139 res = m_pMetadata->CheckCorruption(pData);
10141 Unmap(hAllocator, 1);
10146 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
10153 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
10154 if(m_MapCount != 0)
10156 m_MapCount += count;
10157 VMA_ASSERT(m_pMappedData != VMA_NULL);
10158 if(ppData != VMA_NULL)
10160 *ppData = m_pMappedData;
10166 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
10167 hAllocator->m_hDevice,
10173 if(result == VK_SUCCESS)
10175 if(ppData != VMA_NULL)
10177 *ppData = m_pMappedData;
10179 m_MapCount = count;
10185 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
10192 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
10193 if(m_MapCount >= count)
10195 m_MapCount -= count;
10196 if(m_MapCount == 0)
10198 m_pMappedData = VMA_NULL;
10199 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
10204 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
10208 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
10210 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
10211 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
10214 VkResult res = Map(hAllocator, 1, &pData);
10215 if(res != VK_SUCCESS)
10220 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
10221 VmaWriteMagicValue(pData, allocOffset + allocSize);
10223 Unmap(hAllocator, 1);
10228 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
10230 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
10231 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
10234 VkResult res = Map(hAllocator, 1, &pData);
10235 if(res != VK_SUCCESS)
10240 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
10242 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
10244 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
10246 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
10249 Unmap(hAllocator, 1);
10254 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
10259 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
10260 hAllocation->GetBlock() ==
this);
10262 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
10263 return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
10264 hAllocator->m_hDevice,
10267 hAllocation->GetOffset());
10270 VkResult VmaDeviceMemoryBlock::BindImageMemory(
10275 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
10276 hAllocation->GetBlock() ==
this);
10278 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
10279 return hAllocator->GetVulkanFunctions().vkBindImageMemory(
10280 hAllocator->m_hDevice,
10283 hAllocation->GetOffset());
10288 memset(&outInfo, 0,
sizeof(outInfo));
10307 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
10315 VmaPool_T::VmaPool_T(
10318 VkDeviceSize preferredBlockSize) :
10321 createInfo.memoryTypeIndex,
10322 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
10323 createInfo.minBlockCount,
10324 createInfo.maxBlockCount,
10326 createInfo.frameInUseCount,
10328 createInfo.blockSize != 0,
10334 VmaPool_T::~VmaPool_T()
10338 #if VMA_STATS_STRING_ENABLED 10340 #endif // #if VMA_STATS_STRING_ENABLED 10342 VmaBlockVector::VmaBlockVector(
10344 uint32_t memoryTypeIndex,
10345 VkDeviceSize preferredBlockSize,
10346 size_t minBlockCount,
10347 size_t maxBlockCount,
10348 VkDeviceSize bufferImageGranularity,
10349 uint32_t frameInUseCount,
10351 bool explicitBlockSize,
10352 uint32_t algorithm) :
10353 m_hAllocator(hAllocator),
10354 m_MemoryTypeIndex(memoryTypeIndex),
10355 m_PreferredBlockSize(preferredBlockSize),
10356 m_MinBlockCount(minBlockCount),
10357 m_MaxBlockCount(maxBlockCount),
10358 m_BufferImageGranularity(bufferImageGranularity),
10359 m_FrameInUseCount(frameInUseCount),
10360 m_IsCustomPool(isCustomPool),
10361 m_ExplicitBlockSize(explicitBlockSize),
10362 m_Algorithm(algorithm),
10363 m_HasEmptyBlock(false),
10364 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
10365 m_pDefragmentator(VMA_NULL),
10370 VmaBlockVector::~VmaBlockVector()
10372 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
10374 for(
size_t i = m_Blocks.size(); i--; )
10376 m_Blocks[i]->Destroy(m_hAllocator);
10377 vma_delete(m_hAllocator, m_Blocks[i]);
10381 VkResult VmaBlockVector::CreateMinBlocks()
10383 for(
size_t i = 0; i < m_MinBlockCount; ++i)
10385 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
10386 if(res != VK_SUCCESS)
10394 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
10396 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10398 const size_t blockCount = m_Blocks.size();
10407 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
10409 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
10410 VMA_ASSERT(pBlock);
10411 VMA_HEAVY_ASSERT(pBlock->Validate());
10412 pBlock->m_pMetadata->AddPoolStats(*pStats);
10416 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const 10418 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
10419 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
10420 (VMA_DEBUG_MARGIN > 0) &&
10421 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
10424 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
10426 VkResult VmaBlockVector::Allocate(
10428 uint32_t currentFrameIndex,
10430 VkDeviceSize alignment,
10432 VmaSuballocationType suballocType,
10439 const bool canCreateNewBlock =
10441 (m_Blocks.size() < m_MaxBlockCount);
10448 canMakeOtherLost =
false;
10452 if(isUpperAddress &&
10455 return VK_ERROR_FEATURE_NOT_PRESENT;
10469 return VK_ERROR_FEATURE_NOT_PRESENT;
10473 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
10475 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10478 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10485 if(!canMakeOtherLost || canCreateNewBlock)
10494 if(!m_Blocks.empty())
10496 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
10497 VMA_ASSERT(pCurrBlock);
10498 VkResult res = AllocateFromBlock(
10509 if(res == VK_SUCCESS)
10511 VMA_DEBUG_LOG(
" Returned from last block #%u", (uint32_t)(m_Blocks.size() - 1));
10521 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
10523 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10524 VMA_ASSERT(pCurrBlock);
10525 VkResult res = AllocateFromBlock(
10536 if(res == VK_SUCCESS)
10538 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
10546 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
10548 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10549 VMA_ASSERT(pCurrBlock);
10550 VkResult res = AllocateFromBlock(
10561 if(res == VK_SUCCESS)
10563 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
10571 if(canCreateNewBlock)
10574 VkDeviceSize newBlockSize = m_PreferredBlockSize;
10575 uint32_t newBlockSizeShift = 0;
10576 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
10578 if(!m_ExplicitBlockSize)
10581 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
10582 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
10584 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
10585 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
10587 newBlockSize = smallerNewBlockSize;
10588 ++newBlockSizeShift;
10597 size_t newBlockIndex = 0;
10598 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
10600 if(!m_ExplicitBlockSize)
10602 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
10604 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
10605 if(smallerNewBlockSize >= size)
10607 newBlockSize = smallerNewBlockSize;
10608 ++newBlockSizeShift;
10609 res = CreateBlock(newBlockSize, &newBlockIndex);
10618 if(res == VK_SUCCESS)
10620 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
10621 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
10623 res = AllocateFromBlock(
10634 if(res == VK_SUCCESS)
10636 VMA_DEBUG_LOG(
" Created new block Size=%llu", newBlockSize);
10642 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10649 if(canMakeOtherLost)
10651 uint32_t tryIndex = 0;
10652 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
10654 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
10655 VmaAllocationRequest bestRequest = {};
10656 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
10662 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
10664 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10665 VMA_ASSERT(pCurrBlock);
10666 VmaAllocationRequest currRequest = {};
10667 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
10670 m_BufferImageGranularity,
10679 const VkDeviceSize currRequestCost = currRequest.CalcCost();
10680 if(pBestRequestBlock == VMA_NULL ||
10681 currRequestCost < bestRequestCost)
10683 pBestRequestBlock = pCurrBlock;
10684 bestRequest = currRequest;
10685 bestRequestCost = currRequestCost;
10687 if(bestRequestCost == 0)
10698 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
10700 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10701 VMA_ASSERT(pCurrBlock);
10702 VmaAllocationRequest currRequest = {};
10703 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
10706 m_BufferImageGranularity,
10715 const VkDeviceSize currRequestCost = currRequest.CalcCost();
10716 if(pBestRequestBlock == VMA_NULL ||
10717 currRequestCost < bestRequestCost ||
10720 pBestRequestBlock = pCurrBlock;
10721 bestRequest = currRequest;
10722 bestRequestCost = currRequestCost;
10724 if(bestRequestCost == 0 ||
10734 if(pBestRequestBlock != VMA_NULL)
10738 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
10739 if(res != VK_SUCCESS)
10745 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
10751 if(pBestRequestBlock->m_pMetadata->IsEmpty())
10753 m_HasEmptyBlock =
false;
10756 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
10757 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, isUpperAddress, *pAllocation);
10758 (*pAllocation)->InitBlockAllocation(
10761 bestRequest.offset,
10767 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
10768 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
10769 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
10770 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
10772 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
10774 if(IsCorruptionDetectionEnabled())
10776 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
10777 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
10792 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
10794 return VK_ERROR_TOO_MANY_OBJECTS;
10798 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10801 void VmaBlockVector::Free(
10804 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
10808 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10810 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
10812 if(IsCorruptionDetectionEnabled())
10814 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
10815 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
10818 if(hAllocation->IsPersistentMap())
10820 pBlock->Unmap(m_hAllocator, 1);
10823 pBlock->m_pMetadata->Free(hAllocation);
10824 VMA_HEAVY_ASSERT(pBlock->Validate());
10826 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
10829 if(pBlock->m_pMetadata->IsEmpty())
10832 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
10834 pBlockToDelete = pBlock;
10840 m_HasEmptyBlock =
true;
10845 else if(m_HasEmptyBlock)
10847 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
10848 if(pLastBlock->m_pMetadata->IsEmpty() && m_Blocks.size() > m_MinBlockCount)
10850 pBlockToDelete = pLastBlock;
10851 m_Blocks.pop_back();
10852 m_HasEmptyBlock =
false;
10856 IncrementallySortBlocks();
10861 if(pBlockToDelete != VMA_NULL)
10863 VMA_DEBUG_LOG(
" Deleted empty allocation");
10864 pBlockToDelete->Destroy(m_hAllocator);
10865 vma_delete(m_hAllocator, pBlockToDelete);
10869 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const 10871 VkDeviceSize result = 0;
10872 for(
size_t i = m_Blocks.size(); i--; )
10874 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
10875 if(result >= m_PreferredBlockSize)
10883 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
10885 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
10887 if(m_Blocks[blockIndex] == pBlock)
10889 VmaVectorRemove(m_Blocks, blockIndex);
10896 void VmaBlockVector::IncrementallySortBlocks()
10901 for(
size_t i = 1; i < m_Blocks.size(); ++i)
10903 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
10905 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
10912 VkResult VmaBlockVector::AllocateFromBlock(
10913 VmaDeviceMemoryBlock* pBlock,
10915 uint32_t currentFrameIndex,
10917 VkDeviceSize alignment,
10920 VmaSuballocationType suballocType,
10929 VmaAllocationRequest currRequest = {};
10930 if(pBlock->m_pMetadata->CreateAllocationRequest(
10933 m_BufferImageGranularity,
10943 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
10947 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
10948 if(res != VK_SUCCESS)
10955 if(pBlock->m_pMetadata->IsEmpty())
10957 m_HasEmptyBlock =
false;
10960 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
10961 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, isUpperAddress, *pAllocation);
10962 (*pAllocation)->InitBlockAllocation(
10965 currRequest.offset,
10971 VMA_HEAVY_ASSERT(pBlock->Validate());
10972 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
10973 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
10975 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
10977 if(IsCorruptionDetectionEnabled())
10979 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
10980 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
10984 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10987 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
10989 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
10990 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
10991 allocInfo.allocationSize = blockSize;
10992 VkDeviceMemory mem = VK_NULL_HANDLE;
10993 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
11002 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
11007 allocInfo.allocationSize,
11011 m_Blocks.push_back(pBlock);
11012 if(pNewBlockIndex != VMA_NULL)
11014 *pNewBlockIndex = m_Blocks.size() - 1;
11020 #if VMA_STATS_STRING_ENABLED 11022 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
11024 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
11026 json.BeginObject();
11030 json.WriteString(
"MemoryTypeIndex");
11031 json.WriteNumber(m_MemoryTypeIndex);
11033 json.WriteString(
"BlockSize");
11034 json.WriteNumber(m_PreferredBlockSize);
11036 json.WriteString(
"BlockCount");
11037 json.BeginObject(
true);
11038 if(m_MinBlockCount > 0)
11040 json.WriteString(
"Min");
11041 json.WriteNumber((uint64_t)m_MinBlockCount);
11043 if(m_MaxBlockCount < SIZE_MAX)
11045 json.WriteString(
"Max");
11046 json.WriteNumber((uint64_t)m_MaxBlockCount);
11048 json.WriteString(
"Cur");
11049 json.WriteNumber((uint64_t)m_Blocks.size());
11052 if(m_FrameInUseCount > 0)
11054 json.WriteString(
"FrameInUseCount");
11055 json.WriteNumber(m_FrameInUseCount);
11058 if(m_Algorithm != 0)
11060 json.WriteString(
"Algorithm");
11061 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
11066 json.WriteString(
"PreferredBlockSize");
11067 json.WriteNumber(m_PreferredBlockSize);
11070 json.WriteString(
"Blocks");
11071 json.BeginObject();
11072 for(
size_t i = 0; i < m_Blocks.size(); ++i)
11074 json.BeginString();
11075 json.ContinueString(m_Blocks[i]->GetId());
11078 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
11085 #endif // #if VMA_STATS_STRING_ENABLED 11087 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
11089 uint32_t currentFrameIndex)
11091 if(m_pDefragmentator == VMA_NULL)
11093 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
11096 currentFrameIndex);
11099 return m_pDefragmentator;
11102 VkResult VmaBlockVector::Defragment(
11104 VkDeviceSize& maxBytesToMove,
11105 uint32_t& maxAllocationsToMove)
11107 if(m_pDefragmentator == VMA_NULL)
11112 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
11115 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
11118 if(pDefragmentationStats != VMA_NULL)
11120 const VkDeviceSize bytesMoved = m_pDefragmentator->GetBytesMoved();
11121 const uint32_t allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
11122 pDefragmentationStats->
bytesMoved += bytesMoved;
11124 VMA_ASSERT(bytesMoved <= maxBytesToMove);
11125 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
11126 maxBytesToMove -= bytesMoved;
11127 maxAllocationsToMove -= allocationsMoved;
11131 m_HasEmptyBlock =
false;
11132 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11134 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
11135 if(pBlock->m_pMetadata->IsEmpty())
11137 if(m_Blocks.size() > m_MinBlockCount)
11139 if(pDefragmentationStats != VMA_NULL)
11142 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
11145 VmaVectorRemove(m_Blocks, blockIndex);
11146 pBlock->Destroy(m_hAllocator);
11147 vma_delete(m_hAllocator, pBlock);
11151 m_HasEmptyBlock =
true;
11159 void VmaBlockVector::DestroyDefragmentator()
11161 if(m_pDefragmentator != VMA_NULL)
11163 vma_delete(m_hAllocator, m_pDefragmentator);
11164 m_pDefragmentator = VMA_NULL;
11168 void VmaBlockVector::MakePoolAllocationsLost(
11169 uint32_t currentFrameIndex,
11170 size_t* pLostAllocationCount)
11172 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
11173 size_t lostAllocationCount = 0;
11174 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
11176 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11177 VMA_ASSERT(pBlock);
11178 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
11180 if(pLostAllocationCount != VMA_NULL)
11182 *pLostAllocationCount = lostAllocationCount;
11186 VkResult VmaBlockVector::CheckCorruption()
11188 if(!IsCorruptionDetectionEnabled())
11190 return VK_ERROR_FEATURE_NOT_PRESENT;
11193 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
11194 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
11196 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11197 VMA_ASSERT(pBlock);
11198 VkResult res = pBlock->CheckCorruption(m_hAllocator);
11199 if(res != VK_SUCCESS)
11207 void VmaBlockVector::AddStats(
VmaStats* pStats)
11209 const uint32_t memTypeIndex = m_MemoryTypeIndex;
11210 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
11212 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
11214 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
11216 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11217 VMA_ASSERT(pBlock);
11218 VMA_HEAVY_ASSERT(pBlock->Validate());
11220 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
11221 VmaAddStatInfo(pStats->
total, allocationStatInfo);
11222 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
11223 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
11230 VmaDefragmentator::VmaDefragmentator(
11232 VmaBlockVector* pBlockVector,
11233 uint32_t currentFrameIndex) :
11234 m_hAllocator(hAllocator),
11235 m_pBlockVector(pBlockVector),
11236 m_CurrentFrameIndex(currentFrameIndex),
11238 m_AllocationsMoved(0),
11239 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
11240 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
11242 VMA_ASSERT(pBlockVector->GetAlgorithm() == 0);
11245 VmaDefragmentator::~VmaDefragmentator()
11247 for(
size_t i = m_Blocks.size(); i--; )
11249 vma_delete(m_hAllocator, m_Blocks[i]);
11253 void VmaDefragmentator::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
11255 AllocationInfo allocInfo;
11256 allocInfo.m_hAllocation = hAlloc;
11257 allocInfo.m_pChanged = pChanged;
11258 m_Allocations.push_back(allocInfo);
11261 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData)
11264 if(m_pMappedDataForDefragmentation)
11266 *ppMappedData = m_pMappedDataForDefragmentation;
11271 if(m_pBlock->GetMappedData())
11273 *ppMappedData = m_pBlock->GetMappedData();
11278 VkResult res = m_pBlock->Map(hAllocator, 1, &m_pMappedDataForDefragmentation);
11279 *ppMappedData = m_pMappedDataForDefragmentation;
11283 void VmaDefragmentator::BlockInfo::Unmap(
VmaAllocator hAllocator)
11285 if(m_pMappedDataForDefragmentation != VMA_NULL)
11287 m_pBlock->Unmap(hAllocator, 1);
11291 VkResult VmaDefragmentator::DefragmentRound(
11292 VkDeviceSize maxBytesToMove,
11293 uint32_t maxAllocationsToMove)
11295 if(m_Blocks.empty())
11300 size_t srcBlockIndex = m_Blocks.size() - 1;
11301 size_t srcAllocIndex = SIZE_MAX;
11307 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
11309 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
11312 if(srcBlockIndex == 0)
11319 srcAllocIndex = SIZE_MAX;
11324 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
11328 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
11329 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
11331 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
11332 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
11333 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
11334 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
11337 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
11339 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
11340 VmaAllocationRequest dstAllocRequest;
11341 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
11342 m_CurrentFrameIndex,
11343 m_pBlockVector->GetFrameInUseCount(),
11344 m_pBlockVector->GetBufferImageGranularity(),
11351 &dstAllocRequest) &&
11353 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
11355 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
11358 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
11359 (m_BytesMoved + size > maxBytesToMove))
11361 return VK_INCOMPLETE;
11364 void* pDstMappedData = VMA_NULL;
11365 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
11366 if(res != VK_SUCCESS)
11371 void* pSrcMappedData = VMA_NULL;
11372 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
11373 if(res != VK_SUCCESS)
11380 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
11381 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
11382 static_cast<size_t>(size));
11384 if(VMA_DEBUG_MARGIN > 0)
11386 VmaWriteMagicValue(pDstMappedData, dstAllocRequest.offset - VMA_DEBUG_MARGIN);
11387 VmaWriteMagicValue(pDstMappedData, dstAllocRequest.offset + size);
11390 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
11395 allocInfo.m_hAllocation);
11396 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
11398 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
11400 if(allocInfo.m_pChanged != VMA_NULL)
11402 *allocInfo.m_pChanged = VK_TRUE;
11405 ++m_AllocationsMoved;
11406 m_BytesMoved += size;
11408 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
11416 if(srcAllocIndex > 0)
11422 if(srcBlockIndex > 0)
11425 srcAllocIndex = SIZE_MAX;
11435 VkResult VmaDefragmentator::Defragment(
11436 VkDeviceSize maxBytesToMove,
11437 uint32_t maxAllocationsToMove)
11439 if(m_Allocations.empty())
11445 const size_t blockCount = m_pBlockVector->m_Blocks.size();
11446 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11448 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
11449 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
11450 m_Blocks.push_back(pBlockInfo);
11454 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
11457 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
11459 AllocationInfo& allocInfo = m_Allocations[blockIndex];
11461 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
11463 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
11464 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
11465 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
11467 (*it)->m_Allocations.push_back(allocInfo);
11475 m_Allocations.clear();
11477 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11479 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
11480 pBlockInfo->CalcHasNonMovableAllocations();
11481 pBlockInfo->SortAllocationsBySizeDescecnding();
11485 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
11488 VkResult result = VK_SUCCESS;
11489 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
11491 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
11495 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11497 m_Blocks[blockIndex]->Unmap(m_hAllocator);
11503 bool VmaDefragmentator::MoveMakesSense(
11504 size_t dstBlockIndex, VkDeviceSize dstOffset,
11505 size_t srcBlockIndex, VkDeviceSize srcOffset)
11507 if(dstBlockIndex < srcBlockIndex)
11511 if(dstBlockIndex > srcBlockIndex)
11515 if(dstOffset < srcOffset)
11525 #if VMA_RECORDING_ENABLED 11527 VmaRecorder::VmaRecorder() :
11532 m_StartCounter(INT64_MAX)
11538 m_UseMutex = useMutex;
11539 m_Flags = settings.
flags;
11541 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
11542 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
11545 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
11548 return VK_ERROR_INITIALIZATION_FAILED;
11552 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
11553 fprintf(m_File,
"%s\n",
"1,4");
11558 VmaRecorder::~VmaRecorder()
11560 if(m_File != VMA_NULL)
11566 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
11568 CallParams callParams;
11569 GetBasicParams(callParams);
11571 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11572 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
11576 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
11578 CallParams callParams;
11579 GetBasicParams(callParams);
11581 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11582 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
11588 CallParams callParams;
11589 GetBasicParams(callParams);
11591 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11592 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
11603 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
11605 CallParams callParams;
11606 GetBasicParams(callParams);
11608 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11609 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
11614 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
11615 const VkMemoryRequirements& vkMemReq,
11619 CallParams callParams;
11620 GetBasicParams(callParams);
11622 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11623 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
11624 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11626 vkMemReq.alignment,
11627 vkMemReq.memoryTypeBits,
11635 userDataStr.GetString());
11639 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
11640 const VkMemoryRequirements& vkMemReq,
11641 bool requiresDedicatedAllocation,
11642 bool prefersDedicatedAllocation,
11646 CallParams callParams;
11647 GetBasicParams(callParams);
11649 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11650 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
11651 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11653 vkMemReq.alignment,
11654 vkMemReq.memoryTypeBits,
11655 requiresDedicatedAllocation ? 1 : 0,
11656 prefersDedicatedAllocation ? 1 : 0,
11664 userDataStr.GetString());
11668 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
11669 const VkMemoryRequirements& vkMemReq,
11670 bool requiresDedicatedAllocation,
11671 bool prefersDedicatedAllocation,
11675 CallParams callParams;
11676 GetBasicParams(callParams);
11678 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11679 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
11680 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11682 vkMemReq.alignment,
11683 vkMemReq.memoryTypeBits,
11684 requiresDedicatedAllocation ? 1 : 0,
11685 prefersDedicatedAllocation ? 1 : 0,
11693 userDataStr.GetString());
11697 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
11700 CallParams callParams;
11701 GetBasicParams(callParams);
11703 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11704 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
11709 void VmaRecorder::RecordResizeAllocation(
11710 uint32_t frameIndex,
11712 VkDeviceSize newSize)
11714 CallParams callParams;
11715 GetBasicParams(callParams);
11717 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11718 fprintf(m_File,
"%u,%.3f,%u,vmaResizeAllocation,%p,%llu\n", callParams.threadId, callParams.time, frameIndex,
11719 allocation, newSize);
11723 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
11725 const void* pUserData)
11727 CallParams callParams;
11728 GetBasicParams(callParams);
11730 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11731 UserDataString userDataStr(
11734 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11736 userDataStr.GetString());
11740 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
11743 CallParams callParams;
11744 GetBasicParams(callParams);
11746 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11747 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
11752 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
11755 CallParams callParams;
11756 GetBasicParams(callParams);
11758 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11759 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
11764 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
11767 CallParams callParams;
11768 GetBasicParams(callParams);
11770 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11771 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
11776 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
11777 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
11779 CallParams callParams;
11780 GetBasicParams(callParams);
11782 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11783 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
11790 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
11791 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
11793 CallParams callParams;
11794 GetBasicParams(callParams);
11796 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11797 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
11804 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
11805 const VkBufferCreateInfo& bufCreateInfo,
11809 CallParams callParams;
11810 GetBasicParams(callParams);
11812 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11813 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
11814 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11815 bufCreateInfo.flags,
11816 bufCreateInfo.size,
11817 bufCreateInfo.usage,
11818 bufCreateInfo.sharingMode,
11819 allocCreateInfo.
flags,
11820 allocCreateInfo.
usage,
11824 allocCreateInfo.
pool,
11826 userDataStr.GetString());
11830 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
11831 const VkImageCreateInfo& imageCreateInfo,
11835 CallParams callParams;
11836 GetBasicParams(callParams);
11838 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11839 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
11840 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11841 imageCreateInfo.flags,
11842 imageCreateInfo.imageType,
11843 imageCreateInfo.format,
11844 imageCreateInfo.extent.width,
11845 imageCreateInfo.extent.height,
11846 imageCreateInfo.extent.depth,
11847 imageCreateInfo.mipLevels,
11848 imageCreateInfo.arrayLayers,
11849 imageCreateInfo.samples,
11850 imageCreateInfo.tiling,
11851 imageCreateInfo.usage,
11852 imageCreateInfo.sharingMode,
11853 imageCreateInfo.initialLayout,
11854 allocCreateInfo.
flags,
11855 allocCreateInfo.
usage,
11859 allocCreateInfo.
pool,
11861 userDataStr.GetString());
11865 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
11868 CallParams callParams;
11869 GetBasicParams(callParams);
11871 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11872 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
11877 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
11880 CallParams callParams;
11881 GetBasicParams(callParams);
11883 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11884 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
11889 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
11892 CallParams callParams;
11893 GetBasicParams(callParams);
11895 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11896 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
11901 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
11904 CallParams callParams;
11905 GetBasicParams(callParams);
11907 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11908 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
11913 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
11916 CallParams callParams;
11917 GetBasicParams(callParams);
11919 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11920 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
11927 if(pUserData != VMA_NULL)
11931 m_Str = (
const char*)pUserData;
11935 sprintf_s(m_PtrStr,
"%p", pUserData);
11945 void VmaRecorder::WriteConfiguration(
11946 const VkPhysicalDeviceProperties& devProps,
11947 const VkPhysicalDeviceMemoryProperties& memProps,
11948 bool dedicatedAllocationExtensionEnabled)
11950 fprintf(m_File,
"Config,Begin\n");
11952 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
11953 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
11954 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
11955 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
11956 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
11957 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
11959 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
11960 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
11961 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
11963 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
11964 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
11966 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
11967 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
11969 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
11970 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
11972 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
11973 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
11976 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
11978 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
11979 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
11980 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
11981 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
11982 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
11983 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
11984 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
11985 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
11986 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
11988 fprintf(m_File,
"Config,End\n");
11991 void VmaRecorder::GetBasicParams(CallParams& outParams)
11993 outParams.threadId = GetCurrentThreadId();
11995 LARGE_INTEGER counter;
11996 QueryPerformanceCounter(&counter);
11997 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
12000 void VmaRecorder::Flush()
12008 #endif // #if VMA_RECORDING_ENABLED 12016 m_hDevice(pCreateInfo->device),
12017 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
12018 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
12019 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
12020 m_PreferredLargeHeapBlockSize(0),
12021 m_PhysicalDevice(pCreateInfo->physicalDevice),
12022 m_CurrentFrameIndex(0),
12023 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
12026 ,m_pRecorder(VMA_NULL)
12029 if(VMA_DEBUG_DETECT_CORRUPTION)
12032 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
12037 #if !(VMA_DEDICATED_ALLOCATION) 12040 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
12044 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
12045 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
12046 memset(&m_MemProps, 0,
sizeof(m_MemProps));
12048 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
12049 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
12051 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
12053 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
12064 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
12065 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
12067 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
12068 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
12069 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
12070 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
12077 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
12079 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
12080 if(limit != VK_WHOLE_SIZE)
12082 m_HeapSizeLimit[heapIndex] = limit;
12083 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
12085 m_MemProps.memoryHeaps[heapIndex].size = limit;
12091 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12093 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
12095 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
12098 preferredBlockSize,
12101 GetBufferImageGranularity(),
12108 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
12115 VkResult res = VK_SUCCESS;
12120 #if VMA_RECORDING_ENABLED 12121 m_pRecorder = vma_new(
this, VmaRecorder)();
12123 if(res != VK_SUCCESS)
12127 m_pRecorder->WriteConfiguration(
12128 m_PhysicalDeviceProperties,
12130 m_UseKhrDedicatedAllocation);
12131 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
12133 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
12134 return VK_ERROR_FEATURE_NOT_PRESENT;
12141 VmaAllocator_T::~VmaAllocator_T()
12143 #if VMA_RECORDING_ENABLED 12144 if(m_pRecorder != VMA_NULL)
12146 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
12147 vma_delete(
this, m_pRecorder);
12151 VMA_ASSERT(m_Pools.empty());
12153 for(
size_t i = GetMemoryTypeCount(); i--; )
12155 vma_delete(
this, m_pDedicatedAllocations[i]);
12156 vma_delete(
this, m_pBlockVectors[i]);
12160 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
12162 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 12163 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
12164 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
12165 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
12166 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
12167 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
12168 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
12169 m_VulkanFunctions.vkFlushMappedMemoryRanges = &vkFlushMappedMemoryRanges;
12170 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = &vkInvalidateMappedMemoryRanges;
12171 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
12172 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
12173 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
12174 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
12175 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
12176 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
12177 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
12178 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
12179 #if VMA_DEDICATED_ALLOCATION 12180 if(m_UseKhrDedicatedAllocation)
12182 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
12183 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
12184 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
12185 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
12187 #endif // #if VMA_DEDICATED_ALLOCATION 12188 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 12190 #define VMA_COPY_IF_NOT_NULL(funcName) \ 12191 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 12193 if(pVulkanFunctions != VMA_NULL)
12195 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
12196 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
12197 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
12198 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
12199 VMA_COPY_IF_NOT_NULL(vkMapMemory);
12200 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
12201 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
12202 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
12203 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
12204 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
12205 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
12206 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
12207 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
12208 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
12209 VMA_COPY_IF_NOT_NULL(vkCreateImage);
12210 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
12211 #if VMA_DEDICATED_ALLOCATION 12212 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
12213 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
12217 #undef VMA_COPY_IF_NOT_NULL 12221 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
12222 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
12223 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
12224 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
12225 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
12226 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
12227 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
12228 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
12229 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
12230 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
12231 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
12232 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
12233 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
12234 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
12235 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
12236 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
12237 #if VMA_DEDICATED_ALLOCATION 12238 if(m_UseKhrDedicatedAllocation)
12240 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
12241 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
12246 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
12248 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
12249 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
12250 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
12251 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
12254 VkResult VmaAllocator_T::AllocateMemoryOfType(
12256 VkDeviceSize alignment,
12257 bool dedicatedAllocation,
12258 VkBuffer dedicatedBuffer,
12259 VkImage dedicatedImage,
12261 uint32_t memTypeIndex,
12262 VmaSuballocationType suballocType,
12265 VMA_ASSERT(pAllocation != VMA_NULL);
12266 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
12272 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
12277 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
12278 VMA_ASSERT(blockVector);
12280 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
12281 bool preferDedicatedMemory =
12282 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
12283 dedicatedAllocation ||
12285 size > preferredBlockSize / 2;
12287 if(preferDedicatedMemory &&
12289 finalCreateInfo.
pool == VK_NULL_HANDLE)
12298 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12302 return AllocateDedicatedMemory(
12316 VkResult res = blockVector->Allocate(
12318 m_CurrentFrameIndex.load(),
12324 if(res == VK_SUCCESS)
12332 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12336 res = AllocateDedicatedMemory(
12342 finalCreateInfo.pUserData,
12346 if(res == VK_SUCCESS)
12349 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
12355 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
12362 VkResult VmaAllocator_T::AllocateDedicatedMemory(
12364 VmaSuballocationType suballocType,
12365 uint32_t memTypeIndex,
12367 bool isUserDataString,
12369 VkBuffer dedicatedBuffer,
12370 VkImage dedicatedImage,
12373 VMA_ASSERT(pAllocation);
12375 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
12376 allocInfo.memoryTypeIndex = memTypeIndex;
12377 allocInfo.allocationSize = size;
12379 #if VMA_DEDICATED_ALLOCATION 12380 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
12381 if(m_UseKhrDedicatedAllocation)
12383 if(dedicatedBuffer != VK_NULL_HANDLE)
12385 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
12386 dedicatedAllocInfo.buffer = dedicatedBuffer;
12387 allocInfo.pNext = &dedicatedAllocInfo;
12389 else if(dedicatedImage != VK_NULL_HANDLE)
12391 dedicatedAllocInfo.image = dedicatedImage;
12392 allocInfo.pNext = &dedicatedAllocInfo;
12395 #endif // #if VMA_DEDICATED_ALLOCATION 12398 VkDeviceMemory hMemory = VK_NULL_HANDLE;
12399 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
12402 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
12406 void* pMappedData = VMA_NULL;
12409 res = (*m_VulkanFunctions.vkMapMemory)(
12418 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
12419 FreeVulkanMemory(memTypeIndex, size, hMemory);
12424 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
12425 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
12426 (*pAllocation)->SetUserData(
this, pUserData);
12427 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12429 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12434 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
12435 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
12436 VMA_ASSERT(pDedicatedAllocations);
12437 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
12440 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
12445 void VmaAllocator_T::GetBufferMemoryRequirements(
12447 VkMemoryRequirements& memReq,
12448 bool& requiresDedicatedAllocation,
12449 bool& prefersDedicatedAllocation)
const 12451 #if VMA_DEDICATED_ALLOCATION 12452 if(m_UseKhrDedicatedAllocation)
12454 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
12455 memReqInfo.buffer = hBuffer;
12457 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
12459 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
12460 memReq2.pNext = &memDedicatedReq;
12462 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
12464 memReq = memReq2.memoryRequirements;
12465 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
12466 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
12469 #endif // #if VMA_DEDICATED_ALLOCATION 12471 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
12472 requiresDedicatedAllocation =
false;
12473 prefersDedicatedAllocation =
false;
12477 void VmaAllocator_T::GetImageMemoryRequirements(
12479 VkMemoryRequirements& memReq,
12480 bool& requiresDedicatedAllocation,
12481 bool& prefersDedicatedAllocation)
const 12483 #if VMA_DEDICATED_ALLOCATION 12484 if(m_UseKhrDedicatedAllocation)
12486 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
12487 memReqInfo.image = hImage;
12489 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
12491 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
12492 memReq2.pNext = &memDedicatedReq;
12494 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
12496 memReq = memReq2.memoryRequirements;
12497 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
12498 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
12501 #endif // #if VMA_DEDICATED_ALLOCATION 12503 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
12504 requiresDedicatedAllocation =
false;
12505 prefersDedicatedAllocation =
false;
12509 VkResult VmaAllocator_T::AllocateMemory(
12510 const VkMemoryRequirements& vkMemReq,
12511 bool requiresDedicatedAllocation,
12512 bool prefersDedicatedAllocation,
12513 VkBuffer dedicatedBuffer,
12514 VkImage dedicatedImage,
12516 VmaSuballocationType suballocType,
12519 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
12521 if(vkMemReq.size == 0)
12523 return VK_ERROR_VALIDATION_FAILED_EXT;
12528 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
12529 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12534 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
12535 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12537 if(requiresDedicatedAllocation)
12541 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
12542 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12544 if(createInfo.
pool != VK_NULL_HANDLE)
12546 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
12547 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12550 if((createInfo.
pool != VK_NULL_HANDLE) &&
12553 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
12554 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12557 if(createInfo.
pool != VK_NULL_HANDLE)
12559 const VkDeviceSize alignmentForPool = VMA_MAX(
12560 vkMemReq.alignment,
12561 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
12562 return createInfo.
pool->m_BlockVector.Allocate(
12564 m_CurrentFrameIndex.load(),
12574 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
12575 uint32_t memTypeIndex = UINT32_MAX;
12577 if(res == VK_SUCCESS)
12579 VkDeviceSize alignmentForMemType = VMA_MAX(
12580 vkMemReq.alignment,
12581 GetMemoryTypeMinAlignment(memTypeIndex));
12583 res = AllocateMemoryOfType(
12585 alignmentForMemType,
12586 requiresDedicatedAllocation || prefersDedicatedAllocation,
12594 if(res == VK_SUCCESS)
12604 memoryTypeBits &= ~(1u << memTypeIndex);
12607 if(res == VK_SUCCESS)
12609 alignmentForMemType = VMA_MAX(
12610 vkMemReq.alignment,
12611 GetMemoryTypeMinAlignment(memTypeIndex));
12613 res = AllocateMemoryOfType(
12615 alignmentForMemType,
12616 requiresDedicatedAllocation || prefersDedicatedAllocation,
12624 if(res == VK_SUCCESS)
12634 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12645 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
12647 VMA_ASSERT(allocation);
12649 if(TouchAllocation(allocation))
12651 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12653 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
12656 switch(allocation->GetType())
12658 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12660 VmaBlockVector* pBlockVector = VMA_NULL;
12661 VmaPool hPool = allocation->GetPool();
12662 if(hPool != VK_NULL_HANDLE)
12664 pBlockVector = &hPool->m_BlockVector;
12668 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
12669 pBlockVector = m_pBlockVectors[memTypeIndex];
12671 pBlockVector->Free(allocation);
12674 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12675 FreeDedicatedMemory(allocation);
12682 allocation->SetUserData(
this, VMA_NULL);
12683 vma_delete(
this, allocation);
12686 VkResult VmaAllocator_T::ResizeAllocation(
12688 VkDeviceSize newSize)
12690 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
12692 return VK_ERROR_VALIDATION_FAILED_EXT;
12694 if(newSize == alloc->GetSize())
12699 switch(alloc->GetType())
12701 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12702 return VK_ERROR_FEATURE_NOT_PRESENT;
12703 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12704 if(alloc->GetBlock()->m_pMetadata->ResizeAllocation(alloc, newSize))
12706 alloc->ChangeSize(newSize);
12707 VMA_HEAVY_ASSERT(alloc->GetBlock()->m_pMetadata->Validate());
12712 return VK_ERROR_OUT_OF_POOL_MEMORY;
12716 return VK_ERROR_VALIDATION_FAILED_EXT;
12720 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
12723 InitStatInfo(pStats->
total);
12724 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
12726 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
12730 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12732 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
12733 VMA_ASSERT(pBlockVector);
12734 pBlockVector->AddStats(pStats);
12739 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
12740 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
12742 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
12747 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12749 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
12750 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
12751 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
12752 VMA_ASSERT(pDedicatedAllocVector);
12753 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
12756 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
12757 VmaAddStatInfo(pStats->
total, allocationStatInfo);
12758 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
12759 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
12764 VmaPostprocessCalcStatInfo(pStats->
total);
12765 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
12766 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
12767 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
12768 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
12771 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
12773 VkResult VmaAllocator_T::Defragment(
12775 size_t allocationCount,
12776 VkBool32* pAllocationsChanged,
12780 if(pAllocationsChanged != VMA_NULL)
12782 memset(pAllocationsChanged, 0, allocationCount *
sizeof(VkBool32));
12784 if(pDefragmentationStats != VMA_NULL)
12786 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
12789 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
12791 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
12793 const size_t poolCount = m_Pools.size();
12796 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
12799 VMA_ASSERT(hAlloc);
12800 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
12802 const VkMemoryPropertyFlags requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
12803 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
12805 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags) &&
12807 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
12809 VmaBlockVector* pAllocBlockVector = VMA_NULL;
12811 const VmaPool hAllocPool = hAlloc->GetPool();
12813 if(hAllocPool != VK_NULL_HANDLE)
12816 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
12818 pAllocBlockVector = &hAllocPool->m_BlockVector;
12824 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
12827 if(pAllocBlockVector != VMA_NULL)
12829 VmaDefragmentator*
const pDefragmentator =
12830 pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
12831 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
12832 &pAllocationsChanged[allocIndex] : VMA_NULL;
12833 pDefragmentator->AddAllocation(hAlloc, pChanged);
12838 VkResult result = VK_SUCCESS;
12842 VkDeviceSize maxBytesToMove = SIZE_MAX;
12843 uint32_t maxAllocationsToMove = UINT32_MAX;
12844 if(pDefragmentationInfo != VMA_NULL)
12851 for(uint32_t memTypeIndex = 0;
12852 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
12856 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
12858 result = m_pBlockVectors[memTypeIndex]->Defragment(
12859 pDefragmentationStats,
12861 maxAllocationsToMove);
12866 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
12868 result = m_Pools[poolIndex]->m_BlockVector.Defragment(
12869 pDefragmentationStats,
12871 maxAllocationsToMove);
12877 for(
size_t poolIndex = poolCount; poolIndex--; )
12879 m_Pools[poolIndex]->m_BlockVector.DestroyDefragmentator();
12883 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
12885 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
12887 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
12896 if(hAllocation->CanBecomeLost())
12902 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12903 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12906 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
12910 pAllocationInfo->
offset = 0;
12911 pAllocationInfo->
size = hAllocation->GetSize();
12913 pAllocationInfo->
pUserData = hAllocation->GetUserData();
12916 else if(localLastUseFrameIndex == localCurrFrameIndex)
12918 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
12919 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
12920 pAllocationInfo->
offset = hAllocation->GetOffset();
12921 pAllocationInfo->
size = hAllocation->GetSize();
12923 pAllocationInfo->
pUserData = hAllocation->GetUserData();
12928 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12930 localLastUseFrameIndex = localCurrFrameIndex;
12937 #if VMA_STATS_STRING_ENABLED 12938 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12939 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12942 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
12943 if(localLastUseFrameIndex == localCurrFrameIndex)
12949 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12951 localLastUseFrameIndex = localCurrFrameIndex;
12957 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
12958 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
12959 pAllocationInfo->
offset = hAllocation->GetOffset();
12960 pAllocationInfo->
size = hAllocation->GetSize();
12961 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
12962 pAllocationInfo->
pUserData = hAllocation->GetUserData();
12966 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
12969 if(hAllocation->CanBecomeLost())
12971 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12972 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12975 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
12979 else if(localLastUseFrameIndex == localCurrFrameIndex)
12985 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12987 localLastUseFrameIndex = localCurrFrameIndex;
12994 #if VMA_STATS_STRING_ENABLED 12995 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12996 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12999 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
13000 if(localLastUseFrameIndex == localCurrFrameIndex)
13006 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
13008 localLastUseFrameIndex = localCurrFrameIndex;
13020 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
13030 return VK_ERROR_INITIALIZATION_FAILED;
13033 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
13035 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
13037 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
13038 if(res != VK_SUCCESS)
13040 vma_delete(
this, *pPool);
13047 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
13048 (*pPool)->SetId(m_NextPoolId++);
13049 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
13055 void VmaAllocator_T::DestroyPool(
VmaPool pool)
13059 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
13060 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
13061 VMA_ASSERT(success &&
"Pool not found in Allocator.");
13064 vma_delete(
this, pool);
13069 pool->m_BlockVector.GetPoolStats(pPoolStats);
13072 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
13074 m_CurrentFrameIndex.store(frameIndex);
13077 void VmaAllocator_T::MakePoolAllocationsLost(
13079 size_t* pLostAllocationCount)
13081 hPool->m_BlockVector.MakePoolAllocationsLost(
13082 m_CurrentFrameIndex.load(),
13083 pLostAllocationCount);
13086 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
13088 return hPool->m_BlockVector.CheckCorruption();
13091 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
13093 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
13096 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
13098 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
13100 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
13101 VMA_ASSERT(pBlockVector);
13102 VkResult localRes = pBlockVector->CheckCorruption();
13105 case VK_ERROR_FEATURE_NOT_PRESENT:
13108 finalRes = VK_SUCCESS;
13118 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
13119 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
13121 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
13123 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
13126 case VK_ERROR_FEATURE_NOT_PRESENT:
13129 finalRes = VK_SUCCESS;
13141 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
13143 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
13144 (*pAllocation)->InitLost();
13147 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
13149 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
13152 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
13154 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
13155 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
13157 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
13158 if(res == VK_SUCCESS)
13160 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
13165 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
13170 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
13173 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
13175 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
13181 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
13183 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
13185 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
13188 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
13190 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
13191 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
13193 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
13194 m_HeapSizeLimit[heapIndex] += size;
13198 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
13200 if(hAllocation->CanBecomeLost())
13202 return VK_ERROR_MEMORY_MAP_FAILED;
13205 switch(hAllocation->GetType())
13207 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13209 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
13210 char *pBytes = VMA_NULL;
13211 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
13212 if(res == VK_SUCCESS)
13214 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
13215 hAllocation->BlockAllocMap();
13219 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13220 return hAllocation->DedicatedAllocMap(
this, ppData);
13223 return VK_ERROR_MEMORY_MAP_FAILED;
13229 switch(hAllocation->GetType())
13231 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13233 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
13234 hAllocation->BlockAllocUnmap();
13235 pBlock->Unmap(
this, 1);
13238 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13239 hAllocation->DedicatedAllocUnmap(
this);
13246 VkResult VmaAllocator_T::BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer)
13248 VkResult res = VK_SUCCESS;
13249 switch(hAllocation->GetType())
13251 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13252 res = GetVulkanFunctions().vkBindBufferMemory(
13255 hAllocation->GetMemory(),
13258 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13260 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
13261 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
13262 res = pBlock->BindBufferMemory(
this, hAllocation, hBuffer);
13271 VkResult VmaAllocator_T::BindImageMemory(
VmaAllocation hAllocation, VkImage hImage)
13273 VkResult res = VK_SUCCESS;
13274 switch(hAllocation->GetType())
13276 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13277 res = GetVulkanFunctions().vkBindImageMemory(
13280 hAllocation->GetMemory(),
13283 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13285 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
13286 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
13287 res = pBlock->BindImageMemory(
this, hAllocation, hImage);
13296 void VmaAllocator_T::FlushOrInvalidateAllocation(
13298 VkDeviceSize offset, VkDeviceSize size,
13299 VMA_CACHE_OPERATION op)
13301 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
13302 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
13304 const VkDeviceSize allocationSize = hAllocation->GetSize();
13305 VMA_ASSERT(offset <= allocationSize);
13307 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
13309 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
13310 memRange.memory = hAllocation->GetMemory();
13312 switch(hAllocation->GetType())
13314 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13315 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
13316 if(size == VK_WHOLE_SIZE)
13318 memRange.size = allocationSize - memRange.offset;
13322 VMA_ASSERT(offset + size <= allocationSize);
13323 memRange.size = VMA_MIN(
13324 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
13325 allocationSize - memRange.offset);
13329 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13332 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
13333 if(size == VK_WHOLE_SIZE)
13335 size = allocationSize - offset;
13339 VMA_ASSERT(offset + size <= allocationSize);
13341 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
13344 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
13345 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
13346 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
13347 memRange.offset += allocationOffset;
13348 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
13359 case VMA_CACHE_FLUSH:
13360 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
13362 case VMA_CACHE_INVALIDATE:
13363 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
13372 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
13374 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
13376 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
13378 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
13379 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
13380 VMA_ASSERT(pDedicatedAllocations);
13381 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
13382 VMA_ASSERT(success);
13385 VkDeviceMemory hMemory = allocation->GetMemory();
13397 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
13399 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
13402 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
13404 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
13405 !hAllocation->CanBecomeLost() &&
13406 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
13408 void* pData = VMA_NULL;
13409 VkResult res = Map(hAllocation, &pData);
13410 if(res == VK_SUCCESS)
13412 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
13413 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
13414 Unmap(hAllocation);
13418 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
13423 #if VMA_STATS_STRING_ENABLED 13425 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
13427 bool dedicatedAllocationsStarted =
false;
13428 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
13430 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
13431 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
13432 VMA_ASSERT(pDedicatedAllocVector);
13433 if(pDedicatedAllocVector->empty() ==
false)
13435 if(dedicatedAllocationsStarted ==
false)
13437 dedicatedAllocationsStarted =
true;
13438 json.WriteString(
"DedicatedAllocations");
13439 json.BeginObject();
13442 json.BeginString(
"Type ");
13443 json.ContinueString(memTypeIndex);
13448 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
13450 json.BeginObject(
true);
13452 hAlloc->PrintParameters(json);
13459 if(dedicatedAllocationsStarted)
13465 bool allocationsStarted =
false;
13466 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
13468 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
13470 if(allocationsStarted ==
false)
13472 allocationsStarted =
true;
13473 json.WriteString(
"DefaultPools");
13474 json.BeginObject();
13477 json.BeginString(
"Type ");
13478 json.ContinueString(memTypeIndex);
13481 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
13484 if(allocationsStarted)
13492 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
13493 const size_t poolCount = m_Pools.size();
13496 json.WriteString(
"Pools");
13497 json.BeginObject();
13498 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
13500 json.BeginString();
13501 json.ContinueString(m_Pools[poolIndex]->GetId());
13504 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
13511 #endif // #if VMA_STATS_STRING_ENABLED 13520 VMA_ASSERT(pCreateInfo && pAllocator);
13521 VMA_DEBUG_LOG(
"vmaCreateAllocator");
13523 return (*pAllocator)->Init(pCreateInfo);
13529 if(allocator != VK_NULL_HANDLE)
13531 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
13532 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
13533 vma_delete(&allocationCallbacks, allocator);
13539 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
13541 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
13542 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
13547 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
13549 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
13550 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
13555 uint32_t memoryTypeIndex,
13556 VkMemoryPropertyFlags* pFlags)
13558 VMA_ASSERT(allocator && pFlags);
13559 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
13560 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
13565 uint32_t frameIndex)
13567 VMA_ASSERT(allocator);
13568 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
13570 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13572 allocator->SetCurrentFrameIndex(frameIndex);
13579 VMA_ASSERT(allocator && pStats);
13580 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13581 allocator->CalculateStats(pStats);
13584 #if VMA_STATS_STRING_ENABLED 13588 char** ppStatsString,
13589 VkBool32 detailedMap)
13591 VMA_ASSERT(allocator && ppStatsString);
13592 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13594 VmaStringBuilder sb(allocator);
13596 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
13597 json.BeginObject();
13600 allocator->CalculateStats(&stats);
13602 json.WriteString(
"Total");
13603 VmaPrintStatInfo(json, stats.
total);
13605 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
13607 json.BeginString(
"Heap ");
13608 json.ContinueString(heapIndex);
13610 json.BeginObject();
13612 json.WriteString(
"Size");
13613 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
13615 json.WriteString(
"Flags");
13616 json.BeginArray(
true);
13617 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
13619 json.WriteString(
"DEVICE_LOCAL");
13625 json.WriteString(
"Stats");
13626 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
13629 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
13631 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
13633 json.BeginString(
"Type ");
13634 json.ContinueString(typeIndex);
13637 json.BeginObject();
13639 json.WriteString(
"Flags");
13640 json.BeginArray(
true);
13641 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
13642 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
13644 json.WriteString(
"DEVICE_LOCAL");
13646 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
13648 json.WriteString(
"HOST_VISIBLE");
13650 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
13652 json.WriteString(
"HOST_COHERENT");
13654 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
13656 json.WriteString(
"HOST_CACHED");
13658 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
13660 json.WriteString(
"LAZILY_ALLOCATED");
13666 json.WriteString(
"Stats");
13667 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
13676 if(detailedMap == VK_TRUE)
13678 allocator->PrintDetailedMap(json);
13684 const size_t len = sb.GetLength();
13685 char*
const pChars = vma_new_array(allocator,
char, len + 1);
13688 memcpy(pChars, sb.GetData(), len);
13690 pChars[len] =
'\0';
13691 *ppStatsString = pChars;
13696 char* pStatsString)
13698 if(pStatsString != VMA_NULL)
13700 VMA_ASSERT(allocator);
13701 size_t len = strlen(pStatsString);
13702 vma_delete_array(allocator, pStatsString, len + 1);
13706 #endif // #if VMA_STATS_STRING_ENABLED 13713 uint32_t memoryTypeBits,
13715 uint32_t* pMemoryTypeIndex)
13717 VMA_ASSERT(allocator != VK_NULL_HANDLE);
13718 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
13719 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
13726 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
13727 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
13732 preferredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
13736 switch(pAllocationCreateInfo->
usage)
13741 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
13743 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
13747 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
13750 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
13751 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
13753 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
13757 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
13758 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
13764 *pMemoryTypeIndex = UINT32_MAX;
13765 uint32_t minCost = UINT32_MAX;
13766 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
13767 memTypeIndex < allocator->GetMemoryTypeCount();
13768 ++memTypeIndex, memTypeBit <<= 1)
13771 if((memTypeBit & memoryTypeBits) != 0)
13773 const VkMemoryPropertyFlags currFlags =
13774 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
13776 if((requiredFlags & ~currFlags) == 0)
13779 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
13781 if(currCost < minCost)
13783 *pMemoryTypeIndex = memTypeIndex;
13788 minCost = currCost;
13793 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
13798 const VkBufferCreateInfo* pBufferCreateInfo,
13800 uint32_t* pMemoryTypeIndex)
13802 VMA_ASSERT(allocator != VK_NULL_HANDLE);
13803 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
13804 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
13805 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
13807 const VkDevice hDev = allocator->m_hDevice;
13808 VkBuffer hBuffer = VK_NULL_HANDLE;
13809 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
13810 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
13811 if(res == VK_SUCCESS)
13813 VkMemoryRequirements memReq = {};
13814 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
13815 hDev, hBuffer, &memReq);
13819 memReq.memoryTypeBits,
13820 pAllocationCreateInfo,
13823 allocator->GetVulkanFunctions().vkDestroyBuffer(
13824 hDev, hBuffer, allocator->GetAllocationCallbacks());
13831 const VkImageCreateInfo* pImageCreateInfo,
13833 uint32_t* pMemoryTypeIndex)
13835 VMA_ASSERT(allocator != VK_NULL_HANDLE);
13836 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
13837 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
13838 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
13840 const VkDevice hDev = allocator->m_hDevice;
13841 VkImage hImage = VK_NULL_HANDLE;
13842 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
13843 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
13844 if(res == VK_SUCCESS)
13846 VkMemoryRequirements memReq = {};
13847 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
13848 hDev, hImage, &memReq);
13852 memReq.memoryTypeBits,
13853 pAllocationCreateInfo,
13856 allocator->GetVulkanFunctions().vkDestroyImage(
13857 hDev, hImage, allocator->GetAllocationCallbacks());
13867 VMA_ASSERT(allocator && pCreateInfo && pPool);
13869 VMA_DEBUG_LOG(
"vmaCreatePool");
13871 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13873 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
13875 #if VMA_RECORDING_ENABLED 13876 if(allocator->GetRecorder() != VMA_NULL)
13878 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
13889 VMA_ASSERT(allocator);
13891 if(pool == VK_NULL_HANDLE)
13896 VMA_DEBUG_LOG(
"vmaDestroyPool");
13898 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13900 #if VMA_RECORDING_ENABLED 13901 if(allocator->GetRecorder() != VMA_NULL)
13903 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
13907 allocator->DestroyPool(pool);
13915 VMA_ASSERT(allocator && pool && pPoolStats);
13917 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13919 allocator->GetPoolStats(pool, pPoolStats);
13925 size_t* pLostAllocationCount)
13927 VMA_ASSERT(allocator && pool);
13929 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13931 #if VMA_RECORDING_ENABLED 13932 if(allocator->GetRecorder() != VMA_NULL)
13934 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
13938 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
13943 VMA_ASSERT(allocator && pool);
13945 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13947 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
13949 return allocator->CheckPoolCorruption(pool);
13954 const VkMemoryRequirements* pVkMemoryRequirements,
13959 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
13961 VMA_DEBUG_LOG(
"vmaAllocateMemory");
13963 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13965 VkResult result = allocator->AllocateMemory(
13966 *pVkMemoryRequirements,
13972 VMA_SUBALLOCATION_TYPE_UNKNOWN,
13975 #if VMA_RECORDING_ENABLED 13976 if(allocator->GetRecorder() != VMA_NULL)
13978 allocator->GetRecorder()->RecordAllocateMemory(
13979 allocator->GetCurrentFrameIndex(),
13980 *pVkMemoryRequirements,
13986 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
13988 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
14001 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
14003 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
14005 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14007 VkMemoryRequirements vkMemReq = {};
14008 bool requiresDedicatedAllocation =
false;
14009 bool prefersDedicatedAllocation =
false;
14010 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
14011 requiresDedicatedAllocation,
14012 prefersDedicatedAllocation);
14014 VkResult result = allocator->AllocateMemory(
14016 requiresDedicatedAllocation,
14017 prefersDedicatedAllocation,
14021 VMA_SUBALLOCATION_TYPE_BUFFER,
14024 #if VMA_RECORDING_ENABLED 14025 if(allocator->GetRecorder() != VMA_NULL)
14027 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
14028 allocator->GetCurrentFrameIndex(),
14030 requiresDedicatedAllocation,
14031 prefersDedicatedAllocation,
14037 if(pAllocationInfo && result == VK_SUCCESS)
14039 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
14052 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
14054 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
14056 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14058 VkMemoryRequirements vkMemReq = {};
14059 bool requiresDedicatedAllocation =
false;
14060 bool prefersDedicatedAllocation =
false;
14061 allocator->GetImageMemoryRequirements(image, vkMemReq,
14062 requiresDedicatedAllocation, prefersDedicatedAllocation);
14064 VkResult result = allocator->AllocateMemory(
14066 requiresDedicatedAllocation,
14067 prefersDedicatedAllocation,
14071 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
14074 #if VMA_RECORDING_ENABLED 14075 if(allocator->GetRecorder() != VMA_NULL)
14077 allocator->GetRecorder()->RecordAllocateMemoryForImage(
14078 allocator->GetCurrentFrameIndex(),
14080 requiresDedicatedAllocation,
14081 prefersDedicatedAllocation,
14087 if(pAllocationInfo && result == VK_SUCCESS)
14089 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
14099 VMA_ASSERT(allocator);
14101 if(allocation == VK_NULL_HANDLE)
14106 VMA_DEBUG_LOG(
"vmaFreeMemory");
14108 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14110 #if VMA_RECORDING_ENABLED 14111 if(allocator->GetRecorder() != VMA_NULL)
14113 allocator->GetRecorder()->RecordFreeMemory(
14114 allocator->GetCurrentFrameIndex(),
14119 allocator->FreeMemory(allocation);
14125 VkDeviceSize newSize)
14127 VMA_ASSERT(allocator && allocation);
14129 VMA_DEBUG_LOG(
"vmaResizeAllocation");
14131 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14133 #if VMA_RECORDING_ENABLED 14134 if(allocator->GetRecorder() != VMA_NULL)
14136 allocator->GetRecorder()->RecordResizeAllocation(
14137 allocator->GetCurrentFrameIndex(),
14143 return allocator->ResizeAllocation(allocation, newSize);
14151 VMA_ASSERT(allocator && allocation && pAllocationInfo);
14153 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14155 #if VMA_RECORDING_ENABLED 14156 if(allocator->GetRecorder() != VMA_NULL)
14158 allocator->GetRecorder()->RecordGetAllocationInfo(
14159 allocator->GetCurrentFrameIndex(),
14164 allocator->GetAllocationInfo(allocation, pAllocationInfo);
14171 VMA_ASSERT(allocator && allocation);
14173 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14175 #if VMA_RECORDING_ENABLED 14176 if(allocator->GetRecorder() != VMA_NULL)
14178 allocator->GetRecorder()->RecordTouchAllocation(
14179 allocator->GetCurrentFrameIndex(),
14184 return allocator->TouchAllocation(allocation);
14192 VMA_ASSERT(allocator && allocation);
14194 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14196 allocation->SetUserData(allocator, pUserData);
14198 #if VMA_RECORDING_ENABLED 14199 if(allocator->GetRecorder() != VMA_NULL)
14201 allocator->GetRecorder()->RecordSetAllocationUserData(
14202 allocator->GetCurrentFrameIndex(),
14213 VMA_ASSERT(allocator && pAllocation);
14215 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
14217 allocator->CreateLostAllocation(pAllocation);
14219 #if VMA_RECORDING_ENABLED 14220 if(allocator->GetRecorder() != VMA_NULL)
14222 allocator->GetRecorder()->RecordCreateLostAllocation(
14223 allocator->GetCurrentFrameIndex(),
14234 VMA_ASSERT(allocator && allocation && ppData);
14236 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14238 VkResult res = allocator->Map(allocation, ppData);
14240 #if VMA_RECORDING_ENABLED 14241 if(allocator->GetRecorder() != VMA_NULL)
14243 allocator->GetRecorder()->RecordMapMemory(
14244 allocator->GetCurrentFrameIndex(),
14256 VMA_ASSERT(allocator && allocation);
14258 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14260 #if VMA_RECORDING_ENABLED 14261 if(allocator->GetRecorder() != VMA_NULL)
14263 allocator->GetRecorder()->RecordUnmapMemory(
14264 allocator->GetCurrentFrameIndex(),
14269 allocator->Unmap(allocation);
14274 VMA_ASSERT(allocator && allocation);
14276 VMA_DEBUG_LOG(
"vmaFlushAllocation");
14278 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14280 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
14282 #if VMA_RECORDING_ENABLED 14283 if(allocator->GetRecorder() != VMA_NULL)
14285 allocator->GetRecorder()->RecordFlushAllocation(
14286 allocator->GetCurrentFrameIndex(),
14287 allocation, offset, size);
14294 VMA_ASSERT(allocator && allocation);
14296 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
14298 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14300 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
14302 #if VMA_RECORDING_ENABLED 14303 if(allocator->GetRecorder() != VMA_NULL)
14305 allocator->GetRecorder()->RecordInvalidateAllocation(
14306 allocator->GetCurrentFrameIndex(),
14307 allocation, offset, size);
14314 VMA_ASSERT(allocator);
14316 VMA_DEBUG_LOG(
"vmaCheckCorruption");
14318 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14320 return allocator->CheckCorruption(memoryTypeBits);
14326 size_t allocationCount,
14327 VkBool32* pAllocationsChanged,
14331 VMA_ASSERT(allocator && pAllocations);
14333 VMA_DEBUG_LOG(
"vmaDefragment");
14335 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14337 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
14345 VMA_ASSERT(allocator && allocation && buffer);
14347 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
14349 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14351 return allocator->BindBufferMemory(allocation, buffer);
14359 VMA_ASSERT(allocator && allocation && image);
14361 VMA_DEBUG_LOG(
"vmaBindImageMemory");
14363 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14365 return allocator->BindImageMemory(allocation, image);
14370 const VkBufferCreateInfo* pBufferCreateInfo,
14376 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
14378 if(pBufferCreateInfo->size == 0)
14380 return VK_ERROR_VALIDATION_FAILED_EXT;
14383 VMA_DEBUG_LOG(
"vmaCreateBuffer");
14385 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14387 *pBuffer = VK_NULL_HANDLE;
14388 *pAllocation = VK_NULL_HANDLE;
14391 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
14392 allocator->m_hDevice,
14394 allocator->GetAllocationCallbacks(),
14399 VkMemoryRequirements vkMemReq = {};
14400 bool requiresDedicatedAllocation =
false;
14401 bool prefersDedicatedAllocation =
false;
14402 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
14403 requiresDedicatedAllocation, prefersDedicatedAllocation);
14407 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
14409 VMA_ASSERT(vkMemReq.alignment %
14410 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
14412 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
14414 VMA_ASSERT(vkMemReq.alignment %
14415 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
14417 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
14419 VMA_ASSERT(vkMemReq.alignment %
14420 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
14424 res = allocator->AllocateMemory(
14426 requiresDedicatedAllocation,
14427 prefersDedicatedAllocation,
14430 *pAllocationCreateInfo,
14431 VMA_SUBALLOCATION_TYPE_BUFFER,
14434 #if VMA_RECORDING_ENABLED 14435 if(allocator->GetRecorder() != VMA_NULL)
14437 allocator->GetRecorder()->RecordCreateBuffer(
14438 allocator->GetCurrentFrameIndex(),
14439 *pBufferCreateInfo,
14440 *pAllocationCreateInfo,
14448 res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
14452 #if VMA_STATS_STRING_ENABLED 14453 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
14455 if(pAllocationInfo != VMA_NULL)
14457 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
14462 allocator->FreeMemory(*pAllocation);
14463 *pAllocation = VK_NULL_HANDLE;
14464 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
14465 *pBuffer = VK_NULL_HANDLE;
14468 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
14469 *pBuffer = VK_NULL_HANDLE;
14480 VMA_ASSERT(allocator);
14482 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
14487 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
14489 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14491 #if VMA_RECORDING_ENABLED 14492 if(allocator->GetRecorder() != VMA_NULL)
14494 allocator->GetRecorder()->RecordDestroyBuffer(
14495 allocator->GetCurrentFrameIndex(),
14500 if(buffer != VK_NULL_HANDLE)
14502 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
14505 if(allocation != VK_NULL_HANDLE)
14507 allocator->FreeMemory(allocation);
14513 const VkImageCreateInfo* pImageCreateInfo,
14519 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
14521 if(pImageCreateInfo->extent.width == 0 ||
14522 pImageCreateInfo->extent.height == 0 ||
14523 pImageCreateInfo->extent.depth == 0 ||
14524 pImageCreateInfo->mipLevels == 0 ||
14525 pImageCreateInfo->arrayLayers == 0)
14527 return VK_ERROR_VALIDATION_FAILED_EXT;
14530 VMA_DEBUG_LOG(
"vmaCreateImage");
14532 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14534 *pImage = VK_NULL_HANDLE;
14535 *pAllocation = VK_NULL_HANDLE;
14538 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
14539 allocator->m_hDevice,
14541 allocator->GetAllocationCallbacks(),
14545 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
14546 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
14547 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
14550 VkMemoryRequirements vkMemReq = {};
14551 bool requiresDedicatedAllocation =
false;
14552 bool prefersDedicatedAllocation =
false;
14553 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
14554 requiresDedicatedAllocation, prefersDedicatedAllocation);
14556 res = allocator->AllocateMemory(
14558 requiresDedicatedAllocation,
14559 prefersDedicatedAllocation,
14562 *pAllocationCreateInfo,
14566 #if VMA_RECORDING_ENABLED 14567 if(allocator->GetRecorder() != VMA_NULL)
14569 allocator->GetRecorder()->RecordCreateImage(
14570 allocator->GetCurrentFrameIndex(),
14572 *pAllocationCreateInfo,
14580 res = allocator->BindImageMemory(*pAllocation, *pImage);
14584 #if VMA_STATS_STRING_ENABLED 14585 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
14587 if(pAllocationInfo != VMA_NULL)
14589 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
14594 allocator->FreeMemory(*pAllocation);
14595 *pAllocation = VK_NULL_HANDLE;
14596 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
14597 *pImage = VK_NULL_HANDLE;
14600 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
14601 *pImage = VK_NULL_HANDLE;
14612 VMA_ASSERT(allocator);
14614 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
14619 VMA_DEBUG_LOG(
"vmaDestroyImage");
14621 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14623 #if VMA_RECORDING_ENABLED 14624 if(allocator->GetRecorder() != VMA_NULL)
14626 allocator->GetRecorder()->RecordDestroyImage(
14627 allocator->GetCurrentFrameIndex(),
14632 if(image != VK_NULL_HANDLE)
14634 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
14636 if(allocation != VK_NULL_HANDLE)
14638 allocator->FreeMemory(allocation);
14642 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1589
+
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1891
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
-
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1643
+
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1646
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
void vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Invalidates memory of given allocation.
Represents single memory allocation.
-
Definition: vk_mem_alloc.h:1617
-
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:2213
-
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1598
+
Definition: vk_mem_alloc.h:1620
+
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:2216
+
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1601
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
-
Definition: vk_mem_alloc.h:1845
-
Definition: vk_mem_alloc.h:1948
-
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1590
-
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2313
-
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1640
-
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2583
-
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:2102
-
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1487
+
Definition: vk_mem_alloc.h:1848
+
Definition: vk_mem_alloc.h:1951
+
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1593
+
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2316
+
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1643
+
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2586
+
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:2105
+
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1488
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
-
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:2194
-
Definition: vk_mem_alloc.h:1925
-
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1579
-
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:2001
-
Definition: vk_mem_alloc.h:1872
-
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1652
-
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2130
+
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:2197
+
Definition: vk_mem_alloc.h:1928
+
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1582
+
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:2004
+
Definition: vk_mem_alloc.h:1875
+
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1655
+
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2133
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
-
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1706
-
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1637
+
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1709
+
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1640
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
-
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1876
+
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1879
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
-
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1778
-
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1595
-
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1777
-
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2587
+
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1781
+
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1598
+
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1780
+
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2590
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1669
-
VmaStatInfo total
Definition: vk_mem_alloc.h:1787
-
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2595
-
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1985
-
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2578
-
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1596
-
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1521
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1672
+
VmaStatInfo total
Definition: vk_mem_alloc.h:1790
+
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2598
+
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1988
+
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2581
+
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1599
+
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1524
Represents main object of this library initialized.
-
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1646
+
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1649
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
-
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:2144
-
Definition: vk_mem_alloc.h:2138
-
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1713
-
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2323
+
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:2147
+
Definition: vk_mem_alloc.h:2141
+
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1716
+
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2326
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
-
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1591
-
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1615
-
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:2022
-
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:2164
-
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:2200
+
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1594
+
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1618
+
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:2025
+
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:2167
+
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:2203
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
-
Definition: vk_mem_alloc.h:1577
-
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:2147
+
Definition: vk_mem_alloc.h:1580
+
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:2150
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
-
VmaMemoryUsage
Definition: vk_mem_alloc.h:1823
+
VmaMemoryUsage
Definition: vk_mem_alloc.h:1826
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
void vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
-
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2573
+
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2576
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
-
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:2591
-
Definition: vk_mem_alloc.h:1862
-
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:2009
-
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1594
+
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:2594
+
Definition: vk_mem_alloc.h:1865
+
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:2012
+
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1597
Represents custom memory pool.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
-
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1783
-
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1527
+
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1786
+
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1530
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
-
Definition: vk_mem_alloc.h:1969
+
Definition: vk_mem_alloc.h:1972
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
-
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1548
+
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1551
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
-
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1619
-
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1553
-
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2593
+
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1622
+
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1556
+
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2596
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
-
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1996
-
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:2210
+
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1999
+
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:2213
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
-
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1587
-
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1766
-
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes. Optional.
Definition: vk_mem_alloc.h:2159
-
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1540
-
Definition: vk_mem_alloc.h:2134
+
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1590
+
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1769
+
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes. Optional.
Definition: vk_mem_alloc.h:2162
+
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1543
+
Definition: vk_mem_alloc.h:2137
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
Definition: vk_mem_alloc.h:1932
-
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1779
-
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1544
-
Definition: vk_mem_alloc.h:1959
-
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:2150
-
Definition: vk_mem_alloc.h:1871
-
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1593
+
Definition: vk_mem_alloc.h:1935
+
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1782
+
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1547
+
Definition: vk_mem_alloc.h:1962
+
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:2153
+
Definition: vk_mem_alloc.h:1874
+
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1596
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
-
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1991
-
Definition: vk_mem_alloc.h:1982
+
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1994
+
Definition: vk_mem_alloc.h:1985
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
-
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1769
-
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1589
-
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:2172
-
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1655
-
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2203
-
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1980
-
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:2015
+
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1772
+
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1592
+
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:2175
+
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1658
+
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2206
+
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1983
+
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:2018
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
-
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1694
-
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1785
-
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1912
-
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1778
+
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1697
+
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1788
+
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1915
+
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1781
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
-
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1600
-
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1625
-
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1542
-
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1599
+
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1603
+
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1628
+
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1545
+
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1602
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2186
-
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1592
-
Definition: vk_mem_alloc.h:1943
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2189
+
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1595
+
Definition: vk_mem_alloc.h:1946
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
-
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1633
-
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2337
-
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:1649
-
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1778
-
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1775
+
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1636
+
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2340
+
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:1652
+
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1781
+
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1778
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits)
Checks magic number in margins around all allocations in given memory types (in both default and cust...
-
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:2191
+
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:2194
VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool)
Checks magic number in margins around all allocations in given memory pool in search for corruptions...
-
Definition: vk_mem_alloc.h:1952
-
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:2318
-
Definition: vk_mem_alloc.h:1966
-
Definition: vk_mem_alloc.h:1978
-
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2589
-
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1585
+
Definition: vk_mem_alloc.h:1955
+
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:2321
+
Definition: vk_mem_alloc.h:1969
+
Definition: vk_mem_alloc.h:1981
+
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2592
+
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1588
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
-
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1773
-
Definition: vk_mem_alloc.h:1828
-
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:2140
+
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1776
+
Definition: vk_mem_alloc.h:1831
+
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:2143
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
-
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1622
-
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1771
-
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1597
-
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1601
-
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1899
-
Definition: vk_mem_alloc.h:1973
-
Definition: vk_mem_alloc.h:1855
-
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2332
+
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1625
+
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1774
+
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1600
+
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1604
+
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1902
+
Definition: vk_mem_alloc.h:1976
+
Definition: vk_mem_alloc.h:1858
+
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2335
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
-
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1575
+
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1578
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
-
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1588
-
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2119
+
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1591
+
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2122
VkResult vmaResizeAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize newSize)
Tries to resize an allocation in place, if there is enough free memory after it.
-
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2299
+
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2302
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
Definition: vk_mem_alloc.h:1963
-
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:2084
-
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1779
+
Definition: vk_mem_alloc.h:1966
+
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:2087
+
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1782
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame...
-
Definition: vk_mem_alloc.h:1938
-
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1609
-
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1786
+
Definition: vk_mem_alloc.h:1941
+
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1612
+
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1789
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
-
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2197
-
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1779
+
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2200
+
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1782
struct VmaRecordSettings VmaRecordSettings
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
-
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2304
+
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2307