23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 1659 #ifndef VMA_RECORDING_ENABLED 1661 #define VMA_RECORDING_ENABLED 1 1663 #define VMA_RECORDING_ENABLED 0 1668 #define NOMINMAX // For windows.h 1672 #include <vulkan/vulkan.h> 1675 #if VMA_RECORDING_ENABLED 1676 #include <windows.h> 1679 #if !defined(VMA_DEDICATED_ALLOCATION) 1680 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation 1681 #define VMA_DEDICATED_ALLOCATION 1 1683 #define VMA_DEDICATED_ALLOCATION 0 1701 uint32_t memoryType,
1702 VkDeviceMemory memory,
1707 uint32_t memoryType,
1708 VkDeviceMemory memory,
1781 #if VMA_DEDICATED_ALLOCATION 1782 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1783 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1910 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1918 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1928 uint32_t memoryTypeIndex,
1929 VkMemoryPropertyFlags* pFlags);
1941 uint32_t frameIndex);
1974 #ifndef VMA_STATS_STRING_ENABLED 1975 #define VMA_STATS_STRING_ENABLED 1 1978 #if VMA_STATS_STRING_ENABLED 1985 char** ppStatsString,
1986 VkBool32 detailedMap);
1990 char* pStatsString);
1992 #endif // #if VMA_STATS_STRING_ENABLED 2225 uint32_t memoryTypeBits,
2227 uint32_t* pMemoryTypeIndex);
2243 const VkBufferCreateInfo* pBufferCreateInfo,
2245 uint32_t* pMemoryTypeIndex);
2261 const VkImageCreateInfo* pImageCreateInfo,
2263 uint32_t* pMemoryTypeIndex);
2435 size_t* pLostAllocationCount);
2534 const VkMemoryRequirements* pVkMemoryRequirements,
2560 const VkMemoryRequirements* pVkMemoryRequirements,
2562 size_t allocationCount,
2607 size_t allocationCount,
2633 VkDeviceSize newSize);
3013 size_t allocationCount,
3014 VkBool32* pAllocationsChanged,
3080 const VkBufferCreateInfo* pBufferCreateInfo,
3105 const VkImageCreateInfo* pImageCreateInfo,
3131 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 3134 #if defined(__cplusplus) && defined(__INTELLISENSE__) 3135 #define VMA_IMPLEMENTATION 3138 #ifdef VMA_IMPLEMENTATION 3139 #undef VMA_IMPLEMENTATION 3161 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 3162 #define VMA_STATIC_VULKAN_FUNCTIONS 1 3174 #if VMA_USE_STL_CONTAINERS 3175 #define VMA_USE_STL_VECTOR 1 3176 #define VMA_USE_STL_UNORDERED_MAP 1 3177 #define VMA_USE_STL_LIST 1 3180 #ifndef VMA_USE_STL_SHARED_MUTEX 3182 #if __cplusplus >= 201703L 3183 #define VMA_USE_STL_SHARED_MUTEX 1 3187 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L 3188 #define VMA_USE_STL_SHARED_MUTEX 1 3190 #define VMA_USE_STL_SHARED_MUTEX 0 3198 #if VMA_USE_STL_VECTOR 3202 #if VMA_USE_STL_UNORDERED_MAP 3203 #include <unordered_map> 3206 #if VMA_USE_STL_LIST 3215 #include <algorithm> 3220 #define VMA_NULL nullptr 3223 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16) 3225 void *aligned_alloc(
size_t alignment,
size_t size)
3228 if(alignment <
sizeof(
void*))
3230 alignment =
sizeof(
void*);
3233 return memalign(alignment, size);
3235 #elif defined(__APPLE__) || defined(__ANDROID__) 3237 void *aligned_alloc(
size_t alignment,
size_t size)
3240 if(alignment <
sizeof(
void*))
3242 alignment =
sizeof(
void*);
3246 if(posix_memalign(&pointer, alignment, size) == 0)
3260 #define VMA_ASSERT(expr) assert(expr) 3262 #define VMA_ASSERT(expr) 3268 #ifndef VMA_HEAVY_ASSERT 3270 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 3272 #define VMA_HEAVY_ASSERT(expr) 3276 #ifndef VMA_ALIGN_OF 3277 #define VMA_ALIGN_OF(type) (__alignof(type)) 3280 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 3282 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 3284 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 3288 #ifndef VMA_SYSTEM_FREE 3290 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 3292 #define VMA_SYSTEM_FREE(ptr) free(ptr) 3297 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 3301 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 3305 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 3309 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 3312 #ifndef VMA_DEBUG_LOG 3313 #define VMA_DEBUG_LOG(format, ...) 3323 #if VMA_STATS_STRING_ENABLED 3324 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
3326 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
3328 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
3330 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
3332 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
3334 snprintf(outStr, strLen,
"%p", ptr);
3342 void Lock() { m_Mutex.lock(); }
3343 void Unlock() { m_Mutex.unlock(); }
3347 #define VMA_MUTEX VmaMutex 3351 #ifndef VMA_RW_MUTEX 3352 #if VMA_USE_STL_SHARED_MUTEX 3354 #include <shared_mutex> 3358 void LockRead() { m_Mutex.lock_shared(); }
3359 void UnlockRead() { m_Mutex.unlock_shared(); }
3360 void LockWrite() { m_Mutex.lock(); }
3361 void UnlockWrite() { m_Mutex.unlock(); }
3363 std::shared_mutex m_Mutex;
3365 #define VMA_RW_MUTEX VmaRWMutex 3366 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600 3372 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
3373 void LockRead() { AcquireSRWLockShared(&m_Lock); }
3374 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
3375 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
3376 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
3380 #define VMA_RW_MUTEX VmaRWMutex 3386 void LockRead() { m_Mutex.Lock(); }
3387 void UnlockRead() { m_Mutex.Unlock(); }
3388 void LockWrite() { m_Mutex.Lock(); }
3389 void UnlockWrite() { m_Mutex.Unlock(); }
3393 #define VMA_RW_MUTEX VmaRWMutex 3394 #endif // #if VMA_USE_STL_SHARED_MUTEX 3395 #endif // #ifndef VMA_RW_MUTEX 3405 #ifndef VMA_ATOMIC_UINT32 3407 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 3410 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 3415 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 3418 #ifndef VMA_DEBUG_ALIGNMENT 3423 #define VMA_DEBUG_ALIGNMENT (1) 3426 #ifndef VMA_DEBUG_MARGIN 3431 #define VMA_DEBUG_MARGIN (0) 3434 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS 3439 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0) 3442 #ifndef VMA_DEBUG_DETECT_CORRUPTION 3448 #define VMA_DEBUG_DETECT_CORRUPTION (0) 3451 #ifndef VMA_DEBUG_GLOBAL_MUTEX 3456 #define VMA_DEBUG_GLOBAL_MUTEX (0) 3459 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 3464 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 3467 #ifndef VMA_SMALL_HEAP_MAX_SIZE 3468 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 3472 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 3473 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 3477 #ifndef VMA_CLASS_NO_COPY 3478 #define VMA_CLASS_NO_COPY(className) \ 3480 className(const className&) = delete; \ 3481 className& operator=(const className&) = delete; 3484 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
3487 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
3489 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
3490 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
3496 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
3498 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
3499 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
3502 static inline uint32_t VmaCountBitsSet(uint32_t v)
3504 uint32_t c = v - ((v >> 1) & 0x55555555);
3505 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
3506 c = ((c >> 4) + c) & 0x0F0F0F0F;
3507 c = ((c >> 8) + c) & 0x00FF00FF;
3508 c = ((c >> 16) + c) & 0x0000FFFF;
3514 template <
typename T>
3515 static inline T VmaAlignUp(T val, T align)
3517 return (val + align - 1) / align * align;
3521 template <
typename T>
3522 static inline T VmaAlignDown(T val, T align)
3524 return val / align * align;
3528 template <
typename T>
3529 static inline T VmaRoundDiv(T x, T y)
3531 return (x + (y / (T)2)) / y;
3539 template <
typename T>
3540 inline bool VmaIsPow2(T x)
3542 return (x & (x-1)) == 0;
3546 static inline uint32_t VmaNextPow2(uint32_t v)
3557 static inline uint64_t VmaNextPow2(uint64_t v)
3571 static inline uint32_t VmaPrevPow2(uint32_t v)
3581 static inline uint64_t VmaPrevPow2(uint64_t v)
3593 static inline bool VmaStrIsEmpty(
const char* pStr)
3595 return pStr == VMA_NULL || *pStr ==
'\0';
3598 #if VMA_STATS_STRING_ENABLED 3600 static const char* VmaAlgorithmToStr(uint32_t algorithm)
3616 #endif // #if VMA_STATS_STRING_ENABLED 3620 template<
typename Iterator,
typename Compare>
3621 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
3623 Iterator centerValue = end; --centerValue;
3624 Iterator insertIndex = beg;
3625 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
3627 if(cmp(*memTypeIndex, *centerValue))
3629 if(insertIndex != memTypeIndex)
3631 VMA_SWAP(*memTypeIndex, *insertIndex);
3636 if(insertIndex != centerValue)
3638 VMA_SWAP(*insertIndex, *centerValue);
3643 template<
typename Iterator,
typename Compare>
3644 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
3648 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
3649 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
3650 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
3654 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 3656 #endif // #ifndef VMA_SORT 3665 static inline bool VmaBlocksOnSamePage(
3666 VkDeviceSize resourceAOffset,
3667 VkDeviceSize resourceASize,
3668 VkDeviceSize resourceBOffset,
3669 VkDeviceSize pageSize)
3671 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
3672 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
3673 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
3674 VkDeviceSize resourceBStart = resourceBOffset;
3675 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
3676 return resourceAEndPage == resourceBStartPage;
3679 enum VmaSuballocationType
3681 VMA_SUBALLOCATION_TYPE_FREE = 0,
3682 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
3683 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
3684 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
3685 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
3686 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
3687 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
3696 static inline bool VmaIsBufferImageGranularityConflict(
3697 VmaSuballocationType suballocType1,
3698 VmaSuballocationType suballocType2)
3700 if(suballocType1 > suballocType2)
3702 VMA_SWAP(suballocType1, suballocType2);
3705 switch(suballocType1)
3707 case VMA_SUBALLOCATION_TYPE_FREE:
3709 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
3711 case VMA_SUBALLOCATION_TYPE_BUFFER:
3713 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3714 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3715 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
3717 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3718 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
3719 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3720 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
3722 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3723 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
3731 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
3733 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION 3734 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
3735 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3736 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
3738 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
3745 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
3747 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION 3748 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
3749 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3750 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
3752 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
3765 static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo)
3767 memset(&outBufCreateInfo, 0,
sizeof(outBufCreateInfo));
3768 outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
3769 outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3770 outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE;
3776 VMA_CLASS_NO_COPY(VmaMutexLock)
3778 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
3779 m_pMutex(useMutex ? &mutex : VMA_NULL)
3780 {
if(m_pMutex) { m_pMutex->Lock(); } }
3782 {
if(m_pMutex) { m_pMutex->Unlock(); } }
3784 VMA_MUTEX* m_pMutex;
3788 struct VmaMutexLockRead
3790 VMA_CLASS_NO_COPY(VmaMutexLockRead)
3792 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
3793 m_pMutex(useMutex ? &mutex : VMA_NULL)
3794 {
if(m_pMutex) { m_pMutex->LockRead(); } }
3795 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
3797 VMA_RW_MUTEX* m_pMutex;
3801 struct VmaMutexLockWrite
3803 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
3805 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
3806 m_pMutex(useMutex ? &mutex : VMA_NULL)
3807 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
3808 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
3810 VMA_RW_MUTEX* m_pMutex;
3813 #if VMA_DEBUG_GLOBAL_MUTEX 3814 static VMA_MUTEX gDebugGlobalMutex;
3815 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 3817 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 3821 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
3832 template <
typename CmpLess,
typename IterT,
typename KeyT>
3833 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key,
const CmpLess& cmp)
3835 size_t down = 0, up = (end - beg);
3838 const size_t mid = (down + up) / 2;
3839 if(cmp(*(beg+mid), key))
3851 template<
typename CmpLess,
typename IterT,
typename KeyT>
3852 IterT VmaBinaryFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value,
const CmpLess& cmp)
3854 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
3855 beg, end, value, cmp);
3857 (!cmp(*it, value) && !cmp(value, *it)))
3869 template<
typename T>
3870 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
3872 for(uint32_t i = 0; i < count; ++i)
3874 const T iPtr = arr[i];
3875 if(iPtr == VMA_NULL)
3879 for(uint32_t j = i + 1; j < count; ++j)
3893 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
3895 if((pAllocationCallbacks != VMA_NULL) &&
3896 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
3898 return (*pAllocationCallbacks->pfnAllocation)(
3899 pAllocationCallbacks->pUserData,
3902 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
3906 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
3910 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
3912 if((pAllocationCallbacks != VMA_NULL) &&
3913 (pAllocationCallbacks->pfnFree != VMA_NULL))
3915 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
3919 VMA_SYSTEM_FREE(ptr);
3923 template<
typename T>
3924 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
3926 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
3929 template<
typename T>
3930 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
3932 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
3935 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 3937 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 3939 template<
typename T>
3940 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
3943 VmaFree(pAllocationCallbacks, ptr);
3946 template<
typename T>
3947 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
3951 for(
size_t i = count; i--; )
3955 VmaFree(pAllocationCallbacks, ptr);
3960 template<
typename T>
3961 class VmaStlAllocator
3964 const VkAllocationCallbacks*
const m_pCallbacks;
3965 typedef T value_type;
3967 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
3968 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
3970 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
3971 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
3973 template<
typename U>
3974 bool operator==(
const VmaStlAllocator<U>& rhs)
const 3976 return m_pCallbacks == rhs.m_pCallbacks;
3978 template<
typename U>
3979 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 3981 return m_pCallbacks != rhs.m_pCallbacks;
3984 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
3987 #if VMA_USE_STL_VECTOR 3989 #define VmaVector std::vector 3991 template<
typename T,
typename allocatorT>
3992 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
3994 vec.insert(vec.begin() + index, item);
3997 template<
typename T,
typename allocatorT>
3998 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
4000 vec.erase(vec.begin() + index);
4003 #else // #if VMA_USE_STL_VECTOR 4008 template<
typename T,
typename AllocatorT>
4012 typedef T value_type;
4014 VmaVector(
const AllocatorT& allocator) :
4015 m_Allocator(allocator),
4022 VmaVector(
size_t count,
const AllocatorT& allocator) :
4023 m_Allocator(allocator),
4024 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
4030 VmaVector(
const VmaVector<T, AllocatorT>& src) :
4031 m_Allocator(src.m_Allocator),
4032 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
4033 m_Count(src.m_Count),
4034 m_Capacity(src.m_Count)
4038 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
4044 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4047 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
4051 resize(rhs.m_Count);
4054 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
4060 bool empty()
const {
return m_Count == 0; }
4061 size_t size()
const {
return m_Count; }
4062 T* data() {
return m_pArray; }
4063 const T* data()
const {
return m_pArray; }
4065 T& operator[](
size_t index)
4067 VMA_HEAVY_ASSERT(index < m_Count);
4068 return m_pArray[index];
4070 const T& operator[](
size_t index)
const 4072 VMA_HEAVY_ASSERT(index < m_Count);
4073 return m_pArray[index];
4078 VMA_HEAVY_ASSERT(m_Count > 0);
4081 const T& front()
const 4083 VMA_HEAVY_ASSERT(m_Count > 0);
4088 VMA_HEAVY_ASSERT(m_Count > 0);
4089 return m_pArray[m_Count - 1];
4091 const T& back()
const 4093 VMA_HEAVY_ASSERT(m_Count > 0);
4094 return m_pArray[m_Count - 1];
4097 void reserve(
size_t newCapacity,
bool freeMemory =
false)
4099 newCapacity = VMA_MAX(newCapacity, m_Count);
4101 if((newCapacity < m_Capacity) && !freeMemory)
4103 newCapacity = m_Capacity;
4106 if(newCapacity != m_Capacity)
4108 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
4111 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
4113 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4114 m_Capacity = newCapacity;
4115 m_pArray = newArray;
4119 void resize(
size_t newCount,
bool freeMemory =
false)
4121 size_t newCapacity = m_Capacity;
4122 if(newCount > m_Capacity)
4124 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
4128 newCapacity = newCount;
4131 if(newCapacity != m_Capacity)
4133 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
4134 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
4135 if(elementsToCopy != 0)
4137 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
4139 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4140 m_Capacity = newCapacity;
4141 m_pArray = newArray;
4147 void clear(
bool freeMemory =
false)
4149 resize(0, freeMemory);
4152 void insert(
size_t index,
const T& src)
4154 VMA_HEAVY_ASSERT(index <= m_Count);
4155 const size_t oldCount = size();
4156 resize(oldCount + 1);
4157 if(index < oldCount)
4159 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
4161 m_pArray[index] = src;
4164 void remove(
size_t index)
4166 VMA_HEAVY_ASSERT(index < m_Count);
4167 const size_t oldCount = size();
4168 if(index < oldCount - 1)
4170 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
4172 resize(oldCount - 1);
4175 void push_back(
const T& src)
4177 const size_t newIndex = size();
4178 resize(newIndex + 1);
4179 m_pArray[newIndex] = src;
4184 VMA_HEAVY_ASSERT(m_Count > 0);
4188 void push_front(
const T& src)
4195 VMA_HEAVY_ASSERT(m_Count > 0);
4199 typedef T* iterator;
4201 iterator begin() {
return m_pArray; }
4202 iterator end() {
return m_pArray + m_Count; }
4205 AllocatorT m_Allocator;
4211 template<
typename T,
typename allocatorT>
4212 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
4214 vec.insert(index, item);
4217 template<
typename T,
typename allocatorT>
4218 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
4223 #endif // #if VMA_USE_STL_VECTOR 4225 template<
typename CmpLess,
typename VectorT>
4226 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
4228 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4230 vector.data() + vector.size(),
4232 CmpLess()) - vector.data();
4233 VmaVectorInsert(vector, indexToInsert, value);
4234 return indexToInsert;
4237 template<
typename CmpLess,
typename VectorT>
4238 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
4241 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
4246 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
4248 size_t indexToRemove = it - vector.begin();
4249 VmaVectorRemove(vector, indexToRemove);
4263 template<
typename T>
4264 class VmaPoolAllocator
4266 VMA_CLASS_NO_COPY(VmaPoolAllocator)
4268 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
4269 ~VmaPoolAllocator();
4277 uint32_t NextFreeIndex;
4285 uint32_t FirstFreeIndex;
4288 const VkAllocationCallbacks* m_pAllocationCallbacks;
4289 const uint32_t m_FirstBlockCapacity;
4290 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
4292 ItemBlock& CreateNewBlock();
4295 template<
typename T>
4296 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) :
4297 m_pAllocationCallbacks(pAllocationCallbacks),
4298 m_FirstBlockCapacity(firstBlockCapacity),
4299 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
4301 VMA_ASSERT(m_FirstBlockCapacity > 1);
4304 template<
typename T>
4305 VmaPoolAllocator<T>::~VmaPoolAllocator()
4310 template<
typename T>
4311 void VmaPoolAllocator<T>::Clear()
4313 for(
size_t i = m_ItemBlocks.size(); i--; )
4314 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
4315 m_ItemBlocks.clear();
4318 template<
typename T>
4319 T* VmaPoolAllocator<T>::Alloc()
4321 for(
size_t i = m_ItemBlocks.size(); i--; )
4323 ItemBlock& block = m_ItemBlocks[i];
4325 if(block.FirstFreeIndex != UINT32_MAX)
4327 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
4328 block.FirstFreeIndex = pItem->NextFreeIndex;
4329 return &pItem->Value;
4334 ItemBlock& newBlock = CreateNewBlock();
4335 Item*
const pItem = &newBlock.pItems[0];
4336 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
4337 return &pItem->Value;
4340 template<
typename T>
4341 void VmaPoolAllocator<T>::Free(T* ptr)
4344 for(
size_t i = m_ItemBlocks.size(); i--; )
4346 ItemBlock& block = m_ItemBlocks[i];
4350 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
4353 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
4355 const uint32_t index = static_cast<uint32_t>(pItemPtr - block.pItems);
4356 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
4357 block.FirstFreeIndex = index;
4361 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
4364 template<
typename T>
4365 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
4367 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
4368 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
4370 const ItemBlock newBlock = {
4371 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
4375 m_ItemBlocks.push_back(newBlock);
4378 for(uint32_t i = 0; i < newBlockCapacity - 1; ++i)
4379 newBlock.pItems[i].NextFreeIndex = i + 1;
4380 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
4381 return m_ItemBlocks.back();
4387 #if VMA_USE_STL_LIST 4389 #define VmaList std::list 4391 #else // #if VMA_USE_STL_LIST 4393 template<
typename T>
4402 template<
typename T>
4405 VMA_CLASS_NO_COPY(VmaRawList)
4407 typedef VmaListItem<T> ItemType;
4409 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
4413 size_t GetCount()
const {
return m_Count; }
4414 bool IsEmpty()
const {
return m_Count == 0; }
4416 ItemType* Front() {
return m_pFront; }
4417 const ItemType* Front()
const {
return m_pFront; }
4418 ItemType* Back() {
return m_pBack; }
4419 const ItemType* Back()
const {
return m_pBack; }
4421 ItemType* PushBack();
4422 ItemType* PushFront();
4423 ItemType* PushBack(
const T& value);
4424 ItemType* PushFront(
const T& value);
4429 ItemType* InsertBefore(ItemType* pItem);
4431 ItemType* InsertAfter(ItemType* pItem);
4433 ItemType* InsertBefore(ItemType* pItem,
const T& value);
4434 ItemType* InsertAfter(ItemType* pItem,
const T& value);
4436 void Remove(ItemType* pItem);
4439 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
4440 VmaPoolAllocator<ItemType> m_ItemAllocator;
4446 template<
typename T>
4447 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
4448 m_pAllocationCallbacks(pAllocationCallbacks),
4449 m_ItemAllocator(pAllocationCallbacks, 128),
4456 template<
typename T>
4457 VmaRawList<T>::~VmaRawList()
4463 template<
typename T>
4464 void VmaRawList<T>::Clear()
4466 if(IsEmpty() ==
false)
4468 ItemType* pItem = m_pBack;
4469 while(pItem != VMA_NULL)
4471 ItemType*
const pPrevItem = pItem->pPrev;
4472 m_ItemAllocator.Free(pItem);
4475 m_pFront = VMA_NULL;
4481 template<
typename T>
4482 VmaListItem<T>* VmaRawList<T>::PushBack()
4484 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4485 pNewItem->pNext = VMA_NULL;
4488 pNewItem->pPrev = VMA_NULL;
4489 m_pFront = pNewItem;
4495 pNewItem->pPrev = m_pBack;
4496 m_pBack->pNext = pNewItem;
4503 template<
typename T>
4504 VmaListItem<T>* VmaRawList<T>::PushFront()
4506 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4507 pNewItem->pPrev = VMA_NULL;
4510 pNewItem->pNext = VMA_NULL;
4511 m_pFront = pNewItem;
4517 pNewItem->pNext = m_pFront;
4518 m_pFront->pPrev = pNewItem;
4519 m_pFront = pNewItem;
4525 template<
typename T>
4526 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
4528 ItemType*
const pNewItem = PushBack();
4529 pNewItem->Value = value;
4533 template<
typename T>
4534 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
4536 ItemType*
const pNewItem = PushFront();
4537 pNewItem->Value = value;
4541 template<
typename T>
4542 void VmaRawList<T>::PopBack()
4544 VMA_HEAVY_ASSERT(m_Count > 0);
4545 ItemType*
const pBackItem = m_pBack;
4546 ItemType*
const pPrevItem = pBackItem->pPrev;
4547 if(pPrevItem != VMA_NULL)
4549 pPrevItem->pNext = VMA_NULL;
4551 m_pBack = pPrevItem;
4552 m_ItemAllocator.Free(pBackItem);
4556 template<
typename T>
4557 void VmaRawList<T>::PopFront()
4559 VMA_HEAVY_ASSERT(m_Count > 0);
4560 ItemType*
const pFrontItem = m_pFront;
4561 ItemType*
const pNextItem = pFrontItem->pNext;
4562 if(pNextItem != VMA_NULL)
4564 pNextItem->pPrev = VMA_NULL;
4566 m_pFront = pNextItem;
4567 m_ItemAllocator.Free(pFrontItem);
4571 template<
typename T>
4572 void VmaRawList<T>::Remove(ItemType* pItem)
4574 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
4575 VMA_HEAVY_ASSERT(m_Count > 0);
4577 if(pItem->pPrev != VMA_NULL)
4579 pItem->pPrev->pNext = pItem->pNext;
4583 VMA_HEAVY_ASSERT(m_pFront == pItem);
4584 m_pFront = pItem->pNext;
4587 if(pItem->pNext != VMA_NULL)
4589 pItem->pNext->pPrev = pItem->pPrev;
4593 VMA_HEAVY_ASSERT(m_pBack == pItem);
4594 m_pBack = pItem->pPrev;
4597 m_ItemAllocator.Free(pItem);
4601 template<
typename T>
4602 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
4604 if(pItem != VMA_NULL)
4606 ItemType*
const prevItem = pItem->pPrev;
4607 ItemType*
const newItem = m_ItemAllocator.Alloc();
4608 newItem->pPrev = prevItem;
4609 newItem->pNext = pItem;
4610 pItem->pPrev = newItem;
4611 if(prevItem != VMA_NULL)
4613 prevItem->pNext = newItem;
4617 VMA_HEAVY_ASSERT(m_pFront == pItem);
4627 template<
typename T>
4628 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
4630 if(pItem != VMA_NULL)
4632 ItemType*
const nextItem = pItem->pNext;
4633 ItemType*
const newItem = m_ItemAllocator.Alloc();
4634 newItem->pNext = nextItem;
4635 newItem->pPrev = pItem;
4636 pItem->pNext = newItem;
4637 if(nextItem != VMA_NULL)
4639 nextItem->pPrev = newItem;
4643 VMA_HEAVY_ASSERT(m_pBack == pItem);
4653 template<
typename T>
4654 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
4656 ItemType*
const newItem = InsertBefore(pItem);
4657 newItem->Value = value;
4661 template<
typename T>
4662 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
4664 ItemType*
const newItem = InsertAfter(pItem);
4665 newItem->Value = value;
4669 template<
typename T,
typename AllocatorT>
4672 VMA_CLASS_NO_COPY(VmaList)
4683 T& operator*()
const 4685 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4686 return m_pItem->Value;
4688 T* operator->()
const 4690 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4691 return &m_pItem->Value;
4694 iterator& operator++()
4696 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4697 m_pItem = m_pItem->pNext;
4700 iterator& operator--()
4702 if(m_pItem != VMA_NULL)
4704 m_pItem = m_pItem->pPrev;
4708 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4709 m_pItem = m_pList->Back();
4714 iterator operator++(
int)
4716 iterator result = *
this;
4720 iterator operator--(
int)
4722 iterator result = *
this;
4727 bool operator==(
const iterator& rhs)
const 4729 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4730 return m_pItem == rhs.m_pItem;
4732 bool operator!=(
const iterator& rhs)
const 4734 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4735 return m_pItem != rhs.m_pItem;
4739 VmaRawList<T>* m_pList;
4740 VmaListItem<T>* m_pItem;
4742 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
4748 friend class VmaList<T, AllocatorT>;
4751 class const_iterator
4760 const_iterator(
const iterator& src) :
4761 m_pList(src.m_pList),
4762 m_pItem(src.m_pItem)
4766 const T& operator*()
const 4768 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4769 return m_pItem->Value;
4771 const T* operator->()
const 4773 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4774 return &m_pItem->Value;
4777 const_iterator& operator++()
4779 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4780 m_pItem = m_pItem->pNext;
4783 const_iterator& operator--()
4785 if(m_pItem != VMA_NULL)
4787 m_pItem = m_pItem->pPrev;
4791 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4792 m_pItem = m_pList->Back();
4797 const_iterator operator++(
int)
4799 const_iterator result = *
this;
4803 const_iterator operator--(
int)
4805 const_iterator result = *
this;
4810 bool operator==(
const const_iterator& rhs)
const 4812 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4813 return m_pItem == rhs.m_pItem;
4815 bool operator!=(
const const_iterator& rhs)
const 4817 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4818 return m_pItem != rhs.m_pItem;
4822 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
4828 const VmaRawList<T>* m_pList;
4829 const VmaListItem<T>* m_pItem;
4831 friend class VmaList<T, AllocatorT>;
4834 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
4836 bool empty()
const {
return m_RawList.IsEmpty(); }
4837 size_t size()
const {
return m_RawList.GetCount(); }
4839 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
4840 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
4842 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
4843 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
4845 void clear() { m_RawList.Clear(); }
4846 void push_back(
const T& value) { m_RawList.PushBack(value); }
4847 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
4848 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
4851 VmaRawList<T> m_RawList;
4854 #endif // #if VMA_USE_STL_LIST 4862 #if VMA_USE_STL_UNORDERED_MAP 4864 #define VmaPair std::pair 4866 #define VMA_MAP_TYPE(KeyT, ValueT) \ 4867 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 4869 #else // #if VMA_USE_STL_UNORDERED_MAP 4871 template<
typename T1,
typename T2>
4877 VmaPair() : first(), second() { }
4878 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
4884 template<
typename KeyT,
typename ValueT>
4888 typedef VmaPair<KeyT, ValueT> PairType;
4889 typedef PairType* iterator;
4891 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
4893 iterator begin() {
return m_Vector.begin(); }
4894 iterator end() {
return m_Vector.end(); }
4896 void insert(
const PairType& pair);
4897 iterator find(
const KeyT& key);
4898 void erase(iterator it);
4901 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
4904 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 4906 template<
typename FirstT,
typename SecondT>
4907 struct VmaPairFirstLess
4909 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 4911 return lhs.first < rhs.first;
4913 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 4915 return lhs.first < rhsFirst;
4919 template<
typename KeyT,
typename ValueT>
4920 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
4922 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4924 m_Vector.data() + m_Vector.size(),
4926 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
4927 VmaVectorInsert(m_Vector, indexToInsert, pair);
4930 template<
typename KeyT,
typename ValueT>
4931 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
4933 PairType* it = VmaBinaryFindFirstNotLess(
4935 m_Vector.data() + m_Vector.size(),
4937 VmaPairFirstLess<KeyT, ValueT>());
4938 if((it != m_Vector.end()) && (it->first == key))
4944 return m_Vector.end();
4948 template<
typename KeyT,
typename ValueT>
4949 void VmaMap<KeyT, ValueT>::erase(iterator it)
4951 VmaVectorRemove(m_Vector, it - m_Vector.begin());
4954 #endif // #if VMA_USE_STL_UNORDERED_MAP 4960 class VmaDeviceMemoryBlock;
4962 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
4964 struct VmaAllocation_T
4967 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
4971 FLAG_USER_DATA_STRING = 0x01,
4975 enum ALLOCATION_TYPE
4977 ALLOCATION_TYPE_NONE,
4978 ALLOCATION_TYPE_BLOCK,
4979 ALLOCATION_TYPE_DEDICATED,
4987 void Ctor(uint32_t currentFrameIndex,
bool userDataString)
4991 m_pUserData = VMA_NULL;
4992 m_LastUseFrameIndex = currentFrameIndex;
4993 m_Type = (uint8_t)ALLOCATION_TYPE_NONE;
4994 m_SuballocationType = (uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN;
4996 m_Flags = userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0;
4998 #if VMA_STATS_STRING_ENABLED 4999 m_CreationFrameIndex = currentFrameIndex;
5000 m_BufferImageUsage = 0;
5006 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
5009 VMA_ASSERT(m_pUserData == VMA_NULL);
5012 void InitBlockAllocation(
5013 VmaDeviceMemoryBlock* block,
5014 VkDeviceSize offset,
5015 VkDeviceSize alignment,
5017 VmaSuballocationType suballocationType,
5021 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5022 VMA_ASSERT(block != VMA_NULL);
5023 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5024 m_Alignment = alignment;
5026 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5027 m_SuballocationType = (uint8_t)suballocationType;
5028 m_BlockAllocation.m_Block = block;
5029 m_BlockAllocation.m_Offset = offset;
5030 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
5035 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5036 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
5037 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5038 m_BlockAllocation.m_Block = VMA_NULL;
5039 m_BlockAllocation.m_Offset = 0;
5040 m_BlockAllocation.m_CanBecomeLost =
true;
5043 void ChangeBlockAllocation(
5045 VmaDeviceMemoryBlock* block,
5046 VkDeviceSize offset);
5048 void ChangeSize(VkDeviceSize newSize);
5049 void ChangeOffset(VkDeviceSize newOffset);
5052 void InitDedicatedAllocation(
5053 uint32_t memoryTypeIndex,
5054 VkDeviceMemory hMemory,
5055 VmaSuballocationType suballocationType,
5059 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5060 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
5061 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
5064 m_SuballocationType = (uint8_t)suballocationType;
5065 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5066 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
5067 m_DedicatedAllocation.m_hMemory = hMemory;
5068 m_DedicatedAllocation.m_pMappedData = pMappedData;
5071 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
5072 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
5073 VkDeviceSize GetSize()
const {
return m_Size; }
5074 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
5075 void* GetUserData()
const {
return m_pUserData; }
5076 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
5077 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
5079 VmaDeviceMemoryBlock* GetBlock()
const 5081 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5082 return m_BlockAllocation.m_Block;
5084 VkDeviceSize GetOffset()
const;
5085 VkDeviceMemory GetMemory()
const;
5086 uint32_t GetMemoryTypeIndex()
const;
5087 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
5088 void* GetMappedData()
const;
5089 bool CanBecomeLost()
const;
5091 uint32_t GetLastUseFrameIndex()
const 5093 return m_LastUseFrameIndex.load();
5095 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
5097 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
5107 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5109 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
5111 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
5122 void BlockAllocMap();
5123 void BlockAllocUnmap();
5124 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
5127 #if VMA_STATS_STRING_ENABLED 5128 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
5129 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
5131 void InitBufferImageUsage(uint32_t bufferImageUsage)
5133 VMA_ASSERT(m_BufferImageUsage == 0);
5134 m_BufferImageUsage = bufferImageUsage;
5137 void PrintParameters(
class VmaJsonWriter& json)
const;
5141 VkDeviceSize m_Alignment;
5142 VkDeviceSize m_Size;
5144 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
5146 uint8_t m_SuballocationType;
5153 struct BlockAllocation
5155 VmaDeviceMemoryBlock* m_Block;
5156 VkDeviceSize m_Offset;
5157 bool m_CanBecomeLost;
5161 struct DedicatedAllocation
5163 uint32_t m_MemoryTypeIndex;
5164 VkDeviceMemory m_hMemory;
5165 void* m_pMappedData;
5171 BlockAllocation m_BlockAllocation;
5173 DedicatedAllocation m_DedicatedAllocation;
5176 #if VMA_STATS_STRING_ENABLED 5177 uint32_t m_CreationFrameIndex;
5178 uint32_t m_BufferImageUsage;
5188 struct VmaSuballocation
5190 VkDeviceSize offset;
5193 VmaSuballocationType type;
5197 struct VmaSuballocationOffsetLess
5199 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 5201 return lhs.offset < rhs.offset;
5204 struct VmaSuballocationOffsetGreater
5206 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 5208 return lhs.offset > rhs.offset;
5212 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
5215 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
5217 enum class VmaAllocationRequestType
5239 struct VmaAllocationRequest
5241 VkDeviceSize offset;
5242 VkDeviceSize sumFreeSize;
5243 VkDeviceSize sumItemSize;
5244 VmaSuballocationList::iterator item;
5245 size_t itemsToMakeLostCount;
5247 VmaAllocationRequestType type;
5249 VkDeviceSize CalcCost()
const 5251 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
5259 class VmaBlockMetadata
5263 virtual ~VmaBlockMetadata() { }
5264 virtual void Init(VkDeviceSize size) { m_Size = size; }
5267 virtual bool Validate()
const = 0;
5268 VkDeviceSize GetSize()
const {
return m_Size; }
5269 virtual size_t GetAllocationCount()
const = 0;
5270 virtual VkDeviceSize GetSumFreeSize()
const = 0;
5271 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
5273 virtual bool IsEmpty()
const = 0;
5275 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
5277 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
5279 #if VMA_STATS_STRING_ENABLED 5280 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
5286 virtual bool CreateAllocationRequest(
5287 uint32_t currentFrameIndex,
5288 uint32_t frameInUseCount,
5289 VkDeviceSize bufferImageGranularity,
5290 VkDeviceSize allocSize,
5291 VkDeviceSize allocAlignment,
5293 VmaSuballocationType allocType,
5294 bool canMakeOtherLost,
5297 VmaAllocationRequest* pAllocationRequest) = 0;
5299 virtual bool MakeRequestedAllocationsLost(
5300 uint32_t currentFrameIndex,
5301 uint32_t frameInUseCount,
5302 VmaAllocationRequest* pAllocationRequest) = 0;
5304 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
5306 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
5310 const VmaAllocationRequest& request,
5311 VmaSuballocationType type,
5312 VkDeviceSize allocSize,
5317 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
5320 virtual bool ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize) {
return false; }
5323 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
5325 #if VMA_STATS_STRING_ENABLED 5326 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
5327 VkDeviceSize unusedBytes,
5328 size_t allocationCount,
5329 size_t unusedRangeCount)
const;
5330 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
5331 VkDeviceSize offset,
5333 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
5334 VkDeviceSize offset,
5335 VkDeviceSize size)
const;
5336 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
5340 VkDeviceSize m_Size;
5341 const VkAllocationCallbacks* m_pAllocationCallbacks;
5344 #define VMA_VALIDATE(cond) do { if(!(cond)) { \ 5345 VMA_ASSERT(0 && "Validation failed: " #cond); \ 5349 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
5351 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
5354 virtual ~VmaBlockMetadata_Generic();
5355 virtual void Init(VkDeviceSize size);
5357 virtual bool Validate()
const;
5358 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
5359 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5360 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5361 virtual bool IsEmpty()
const;
5363 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5364 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5366 #if VMA_STATS_STRING_ENABLED 5367 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5370 virtual bool CreateAllocationRequest(
5371 uint32_t currentFrameIndex,
5372 uint32_t frameInUseCount,
5373 VkDeviceSize bufferImageGranularity,
5374 VkDeviceSize allocSize,
5375 VkDeviceSize allocAlignment,
5377 VmaSuballocationType allocType,
5378 bool canMakeOtherLost,
5380 VmaAllocationRequest* pAllocationRequest);
5382 virtual bool MakeRequestedAllocationsLost(
5383 uint32_t currentFrameIndex,
5384 uint32_t frameInUseCount,
5385 VmaAllocationRequest* pAllocationRequest);
5387 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5389 virtual VkResult CheckCorruption(
const void* pBlockData);
5392 const VmaAllocationRequest& request,
5393 VmaSuballocationType type,
5394 VkDeviceSize allocSize,
5398 virtual void FreeAtOffset(VkDeviceSize offset);
5400 virtual bool ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize);
5405 bool IsBufferImageGranularityConflictPossible(
5406 VkDeviceSize bufferImageGranularity,
5407 VmaSuballocationType& inOutPrevSuballocType)
const;
5410 friend class VmaDefragmentationAlgorithm_Generic;
5411 friend class VmaDefragmentationAlgorithm_Fast;
5413 uint32_t m_FreeCount;
5414 VkDeviceSize m_SumFreeSize;
5415 VmaSuballocationList m_Suballocations;
5418 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
5420 bool ValidateFreeSuballocationList()
const;
5424 bool CheckAllocation(
5425 uint32_t currentFrameIndex,
5426 uint32_t frameInUseCount,
5427 VkDeviceSize bufferImageGranularity,
5428 VkDeviceSize allocSize,
5429 VkDeviceSize allocAlignment,
5430 VmaSuballocationType allocType,
5431 VmaSuballocationList::const_iterator suballocItem,
5432 bool canMakeOtherLost,
5433 VkDeviceSize* pOffset,
5434 size_t* itemsToMakeLostCount,
5435 VkDeviceSize* pSumFreeSize,
5436 VkDeviceSize* pSumItemSize)
const;
5438 void MergeFreeWithNext(VmaSuballocationList::iterator item);
5442 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
5445 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
5448 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
5529 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
5531 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
5534 virtual ~VmaBlockMetadata_Linear();
5535 virtual void Init(VkDeviceSize size);
5537 virtual bool Validate()
const;
5538 virtual size_t GetAllocationCount()
const;
5539 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5540 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5541 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
5543 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5544 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5546 #if VMA_STATS_STRING_ENABLED 5547 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5550 virtual bool CreateAllocationRequest(
5551 uint32_t currentFrameIndex,
5552 uint32_t frameInUseCount,
5553 VkDeviceSize bufferImageGranularity,
5554 VkDeviceSize allocSize,
5555 VkDeviceSize allocAlignment,
5557 VmaSuballocationType allocType,
5558 bool canMakeOtherLost,
5560 VmaAllocationRequest* pAllocationRequest);
5562 virtual bool MakeRequestedAllocationsLost(
5563 uint32_t currentFrameIndex,
5564 uint32_t frameInUseCount,
5565 VmaAllocationRequest* pAllocationRequest);
5567 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5569 virtual VkResult CheckCorruption(
const void* pBlockData);
5572 const VmaAllocationRequest& request,
5573 VmaSuballocationType type,
5574 VkDeviceSize allocSize,
5578 virtual void FreeAtOffset(VkDeviceSize offset);
5588 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
5590 enum SECOND_VECTOR_MODE
5592 SECOND_VECTOR_EMPTY,
5597 SECOND_VECTOR_RING_BUFFER,
5603 SECOND_VECTOR_DOUBLE_STACK,
5606 VkDeviceSize m_SumFreeSize;
5607 SuballocationVectorType m_Suballocations0, m_Suballocations1;
5608 uint32_t m_1stVectorIndex;
5609 SECOND_VECTOR_MODE m_2ndVectorMode;
5611 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5612 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5613 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5614 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5617 size_t m_1stNullItemsBeginCount;
5619 size_t m_1stNullItemsMiddleCount;
5621 size_t m_2ndNullItemsCount;
5623 bool ShouldCompact1st()
const;
5624 void CleanupAfterFree();
5626 bool CreateAllocationRequest_LowerAddress(
5627 uint32_t currentFrameIndex,
5628 uint32_t frameInUseCount,
5629 VkDeviceSize bufferImageGranularity,
5630 VkDeviceSize allocSize,
5631 VkDeviceSize allocAlignment,
5632 VmaSuballocationType allocType,
5633 bool canMakeOtherLost,
5635 VmaAllocationRequest* pAllocationRequest);
5636 bool CreateAllocationRequest_UpperAddress(
5637 uint32_t currentFrameIndex,
5638 uint32_t frameInUseCount,
5639 VkDeviceSize bufferImageGranularity,
5640 VkDeviceSize allocSize,
5641 VkDeviceSize allocAlignment,
5642 VmaSuballocationType allocType,
5643 bool canMakeOtherLost,
5645 VmaAllocationRequest* pAllocationRequest);
5659 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
5661 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
5664 virtual ~VmaBlockMetadata_Buddy();
5665 virtual void Init(VkDeviceSize size);
5667 virtual bool Validate()
const;
5668 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
5669 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
5670 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5671 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
5673 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5674 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5676 #if VMA_STATS_STRING_ENABLED 5677 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5680 virtual bool CreateAllocationRequest(
5681 uint32_t currentFrameIndex,
5682 uint32_t frameInUseCount,
5683 VkDeviceSize bufferImageGranularity,
5684 VkDeviceSize allocSize,
5685 VkDeviceSize allocAlignment,
5687 VmaSuballocationType allocType,
5688 bool canMakeOtherLost,
5690 VmaAllocationRequest* pAllocationRequest);
5692 virtual bool MakeRequestedAllocationsLost(
5693 uint32_t currentFrameIndex,
5694 uint32_t frameInUseCount,
5695 VmaAllocationRequest* pAllocationRequest);
5697 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5699 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
5702 const VmaAllocationRequest& request,
5703 VmaSuballocationType type,
5704 VkDeviceSize allocSize,
5707 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
5708 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
5711 static const VkDeviceSize MIN_NODE_SIZE = 32;
5712 static const size_t MAX_LEVELS = 30;
5714 struct ValidationContext
5716 size_t calculatedAllocationCount;
5717 size_t calculatedFreeCount;
5718 VkDeviceSize calculatedSumFreeSize;
5720 ValidationContext() :
5721 calculatedAllocationCount(0),
5722 calculatedFreeCount(0),
5723 calculatedSumFreeSize(0) { }
5728 VkDeviceSize offset;
5758 VkDeviceSize m_UsableSize;
5759 uint32_t m_LevelCount;
5765 } m_FreeList[MAX_LEVELS];
5767 size_t m_AllocationCount;
5771 VkDeviceSize m_SumFreeSize;
5773 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
5774 void DeleteNode(Node* node);
5775 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
5776 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
5777 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
5779 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
5780 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
5784 void AddToFreeListFront(uint32_t level, Node* node);
5788 void RemoveFromFreeList(uint32_t level, Node* node);
5790 #if VMA_STATS_STRING_ENABLED 5791 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
5801 class VmaDeviceMemoryBlock
5803 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
5805 VmaBlockMetadata* m_pMetadata;
5809 ~VmaDeviceMemoryBlock()
5811 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5812 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5819 uint32_t newMemoryTypeIndex,
5820 VkDeviceMemory newMemory,
5821 VkDeviceSize newSize,
5823 uint32_t algorithm);
5827 VmaPool GetParentPool()
const {
return m_hParentPool; }
5828 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
5829 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5830 uint32_t GetId()
const {
return m_Id; }
5831 void* GetMappedData()
const {
return m_pMappedData; }
5834 bool Validate()
const;
5839 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
5842 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5843 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5845 VkResult BindBufferMemory(
5849 VkResult BindImageMemory(
5856 uint32_t m_MemoryTypeIndex;
5858 VkDeviceMemory m_hMemory;
5866 uint32_t m_MapCount;
5867 void* m_pMappedData;
5870 struct VmaPointerLess
5872 bool operator()(
const void* lhs,
const void* rhs)
const 5878 struct VmaDefragmentationMove
5880 size_t srcBlockIndex;
5881 size_t dstBlockIndex;
5882 VkDeviceSize srcOffset;
5883 VkDeviceSize dstOffset;
5887 class VmaDefragmentationAlgorithm;
5895 struct VmaBlockVector
5897 VMA_CLASS_NO_COPY(VmaBlockVector)
5902 uint32_t memoryTypeIndex,
5903 VkDeviceSize preferredBlockSize,
5904 size_t minBlockCount,
5905 size_t maxBlockCount,
5906 VkDeviceSize bufferImageGranularity,
5907 uint32_t frameInUseCount,
5909 bool explicitBlockSize,
5910 uint32_t algorithm);
5913 VkResult CreateMinBlocks();
5915 VmaPool GetParentPool()
const {
return m_hParentPool; }
5916 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5917 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
5918 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
5919 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
5920 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
5924 bool IsEmpty()
const {
return m_Blocks.empty(); }
5925 bool IsCorruptionDetectionEnabled()
const;
5928 uint32_t currentFrameIndex,
5930 VkDeviceSize alignment,
5932 VmaSuballocationType suballocType,
5933 size_t allocationCount,
5942 #if VMA_STATS_STRING_ENABLED 5943 void PrintDetailedMap(
class VmaJsonWriter& json);
5946 void MakePoolAllocationsLost(
5947 uint32_t currentFrameIndex,
5948 size_t* pLostAllocationCount);
5949 VkResult CheckCorruption();
5953 class VmaBlockVectorDefragmentationContext* pCtx,
5955 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
5956 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
5957 VkCommandBuffer commandBuffer);
5958 void DefragmentationEnd(
5959 class VmaBlockVectorDefragmentationContext* pCtx,
5965 size_t GetBlockCount()
const {
return m_Blocks.size(); }
5966 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
5967 size_t CalcAllocationCount()
const;
5968 bool IsBufferImageGranularityConflictPossible()
const;
5971 friend class VmaDefragmentationAlgorithm_Generic;
5975 const uint32_t m_MemoryTypeIndex;
5976 const VkDeviceSize m_PreferredBlockSize;
5977 const size_t m_MinBlockCount;
5978 const size_t m_MaxBlockCount;
5979 const VkDeviceSize m_BufferImageGranularity;
5980 const uint32_t m_FrameInUseCount;
5981 const bool m_IsCustomPool;
5982 const bool m_ExplicitBlockSize;
5983 const uint32_t m_Algorithm;
5987 bool m_HasEmptyBlock;
5988 VMA_RW_MUTEX m_Mutex;
5990 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
5991 uint32_t m_NextBlockId;
5993 VkDeviceSize CalcMaxBlockSize()
const;
5996 void Remove(VmaDeviceMemoryBlock* pBlock);
6000 void IncrementallySortBlocks();
6002 VkResult AllocatePage(
6003 uint32_t currentFrameIndex,
6005 VkDeviceSize alignment,
6007 VmaSuballocationType suballocType,
6011 VkResult AllocateFromBlock(
6012 VmaDeviceMemoryBlock* pBlock,
6013 uint32_t currentFrameIndex,
6015 VkDeviceSize alignment,
6018 VmaSuballocationType suballocType,
6022 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
6025 void ApplyDefragmentationMovesCpu(
6026 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6027 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
6029 void ApplyDefragmentationMovesGpu(
6030 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6031 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6032 VkCommandBuffer commandBuffer);
6043 VMA_CLASS_NO_COPY(VmaPool_T)
6045 VmaBlockVector m_BlockVector;
6050 VkDeviceSize preferredBlockSize);
6053 uint32_t GetId()
const {
return m_Id; }
6054 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
6056 #if VMA_STATS_STRING_ENABLED 6071 class VmaDefragmentationAlgorithm
6073 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
6075 VmaDefragmentationAlgorithm(
6077 VmaBlockVector* pBlockVector,
6078 uint32_t currentFrameIndex) :
6079 m_hAllocator(hAllocator),
6080 m_pBlockVector(pBlockVector),
6081 m_CurrentFrameIndex(currentFrameIndex)
6084 virtual ~VmaDefragmentationAlgorithm()
6088 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
6089 virtual void AddAll() = 0;
6091 virtual VkResult Defragment(
6092 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6093 VkDeviceSize maxBytesToMove,
6094 uint32_t maxAllocationsToMove) = 0;
6096 virtual VkDeviceSize GetBytesMoved()
const = 0;
6097 virtual uint32_t GetAllocationsMoved()
const = 0;
6101 VmaBlockVector*
const m_pBlockVector;
6102 const uint32_t m_CurrentFrameIndex;
6104 struct AllocationInfo
6107 VkBool32* m_pChanged;
6110 m_hAllocation(VK_NULL_HANDLE),
6111 m_pChanged(VMA_NULL)
6115 m_hAllocation(hAlloc),
6116 m_pChanged(pChanged)
6122 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
6124 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
6126 VmaDefragmentationAlgorithm_Generic(
6128 VmaBlockVector* pBlockVector,
6129 uint32_t currentFrameIndex,
6130 bool overlappingMoveSupported);
6131 virtual ~VmaDefragmentationAlgorithm_Generic();
6133 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6134 virtual void AddAll() { m_AllAllocations =
true; }
6136 virtual VkResult Defragment(
6137 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6138 VkDeviceSize maxBytesToMove,
6139 uint32_t maxAllocationsToMove);
6141 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6142 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6145 uint32_t m_AllocationCount;
6146 bool m_AllAllocations;
6148 VkDeviceSize m_BytesMoved;
6149 uint32_t m_AllocationsMoved;
6151 struct AllocationInfoSizeGreater
6153 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 6155 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
6159 struct AllocationInfoOffsetGreater
6161 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 6163 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
6169 size_t m_OriginalBlockIndex;
6170 VmaDeviceMemoryBlock* m_pBlock;
6171 bool m_HasNonMovableAllocations;
6172 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
6174 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
6175 m_OriginalBlockIndex(SIZE_MAX),
6177 m_HasNonMovableAllocations(true),
6178 m_Allocations(pAllocationCallbacks)
6182 void CalcHasNonMovableAllocations()
6184 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
6185 const size_t defragmentAllocCount = m_Allocations.size();
6186 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
6189 void SortAllocationsBySizeDescending()
6191 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
6194 void SortAllocationsByOffsetDescending()
6196 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
6200 struct BlockPointerLess
6202 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 6204 return pLhsBlockInfo->m_pBlock < pRhsBlock;
6206 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 6208 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
6214 struct BlockInfoCompareMoveDestination
6216 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 6218 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
6222 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
6226 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
6234 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
6235 BlockInfoVector m_Blocks;
6237 VkResult DefragmentRound(
6238 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6239 VkDeviceSize maxBytesToMove,
6240 uint32_t maxAllocationsToMove);
6242 size_t CalcBlocksWithNonMovableCount()
const;
6244 static bool MoveMakesSense(
6245 size_t dstBlockIndex, VkDeviceSize dstOffset,
6246 size_t srcBlockIndex, VkDeviceSize srcOffset);
6249 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
6251 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
6253 VmaDefragmentationAlgorithm_Fast(
6255 VmaBlockVector* pBlockVector,
6256 uint32_t currentFrameIndex,
6257 bool overlappingMoveSupported);
6258 virtual ~VmaDefragmentationAlgorithm_Fast();
6260 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
6261 virtual void AddAll() { m_AllAllocations =
true; }
6263 virtual VkResult Defragment(
6264 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6265 VkDeviceSize maxBytesToMove,
6266 uint32_t maxAllocationsToMove);
6268 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6269 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6274 size_t origBlockIndex;
6277 class FreeSpaceDatabase
6283 s.blockInfoIndex = SIZE_MAX;
6284 for(
size_t i = 0; i < MAX_COUNT; ++i)
6286 m_FreeSpaces[i] = s;
6290 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
6292 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6298 size_t bestIndex = SIZE_MAX;
6299 for(
size_t i = 0; i < MAX_COUNT; ++i)
6302 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
6307 if(m_FreeSpaces[i].size < size &&
6308 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
6314 if(bestIndex != SIZE_MAX)
6316 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
6317 m_FreeSpaces[bestIndex].offset = offset;
6318 m_FreeSpaces[bestIndex].size = size;
6322 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
6323 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
6325 size_t bestIndex = SIZE_MAX;
6326 VkDeviceSize bestFreeSpaceAfter = 0;
6327 for(
size_t i = 0; i < MAX_COUNT; ++i)
6330 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
6332 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
6334 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
6336 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
6338 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
6341 bestFreeSpaceAfter = freeSpaceAfter;
6347 if(bestIndex != SIZE_MAX)
6349 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
6350 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
6352 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6355 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
6356 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
6357 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
6362 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
6372 static const size_t MAX_COUNT = 4;
6376 size_t blockInfoIndex;
6377 VkDeviceSize offset;
6379 } m_FreeSpaces[MAX_COUNT];
6382 const bool m_OverlappingMoveSupported;
6384 uint32_t m_AllocationCount;
6385 bool m_AllAllocations;
6387 VkDeviceSize m_BytesMoved;
6388 uint32_t m_AllocationsMoved;
6390 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
6392 void PreprocessMetadata();
6393 void PostprocessMetadata();
6394 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
6397 struct VmaBlockDefragmentationContext
6401 BLOCK_FLAG_USED = 0x00000001,
6407 class VmaBlockVectorDefragmentationContext
6409 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
6413 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
6415 VmaBlockVectorDefragmentationContext(
6418 VmaBlockVector* pBlockVector,
6419 uint32_t currFrameIndex);
6420 ~VmaBlockVectorDefragmentationContext();
6422 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
6423 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
6424 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
6426 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6427 void AddAll() { m_AllAllocations =
true; }
6429 void Begin(
bool overlappingMoveSupported);
6436 VmaBlockVector*
const m_pBlockVector;
6437 const uint32_t m_CurrFrameIndex;
6439 VmaDefragmentationAlgorithm* m_pAlgorithm;
6447 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
6448 bool m_AllAllocations;
6451 struct VmaDefragmentationContext_T
6454 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
6456 VmaDefragmentationContext_T(
6458 uint32_t currFrameIndex,
6461 ~VmaDefragmentationContext_T();
6463 void AddPools(uint32_t poolCount,
VmaPool* pPools);
6464 void AddAllocations(
6465 uint32_t allocationCount,
6467 VkBool32* pAllocationsChanged);
6475 VkResult Defragment(
6476 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
6477 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
6482 const uint32_t m_CurrFrameIndex;
6483 const uint32_t m_Flags;
6486 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
6488 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
6491 #if VMA_RECORDING_ENABLED 6498 void WriteConfiguration(
6499 const VkPhysicalDeviceProperties& devProps,
6500 const VkPhysicalDeviceMemoryProperties& memProps,
6501 bool dedicatedAllocationExtensionEnabled);
6504 void RecordCreateAllocator(uint32_t frameIndex);
6505 void RecordDestroyAllocator(uint32_t frameIndex);
6506 void RecordCreatePool(uint32_t frameIndex,
6509 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
6510 void RecordAllocateMemory(uint32_t frameIndex,
6511 const VkMemoryRequirements& vkMemReq,
6514 void RecordAllocateMemoryPages(uint32_t frameIndex,
6515 const VkMemoryRequirements& vkMemReq,
6517 uint64_t allocationCount,
6519 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
6520 const VkMemoryRequirements& vkMemReq,
6521 bool requiresDedicatedAllocation,
6522 bool prefersDedicatedAllocation,
6525 void RecordAllocateMemoryForImage(uint32_t frameIndex,
6526 const VkMemoryRequirements& vkMemReq,
6527 bool requiresDedicatedAllocation,
6528 bool prefersDedicatedAllocation,
6531 void RecordFreeMemory(uint32_t frameIndex,
6533 void RecordFreeMemoryPages(uint32_t frameIndex,
6534 uint64_t allocationCount,
6536 void RecordResizeAllocation(
6537 uint32_t frameIndex,
6539 VkDeviceSize newSize);
6540 void RecordSetAllocationUserData(uint32_t frameIndex,
6542 const void* pUserData);
6543 void RecordCreateLostAllocation(uint32_t frameIndex,
6545 void RecordMapMemory(uint32_t frameIndex,
6547 void RecordUnmapMemory(uint32_t frameIndex,
6549 void RecordFlushAllocation(uint32_t frameIndex,
6550 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6551 void RecordInvalidateAllocation(uint32_t frameIndex,
6552 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6553 void RecordCreateBuffer(uint32_t frameIndex,
6554 const VkBufferCreateInfo& bufCreateInfo,
6557 void RecordCreateImage(uint32_t frameIndex,
6558 const VkImageCreateInfo& imageCreateInfo,
6561 void RecordDestroyBuffer(uint32_t frameIndex,
6563 void RecordDestroyImage(uint32_t frameIndex,
6565 void RecordTouchAllocation(uint32_t frameIndex,
6567 void RecordGetAllocationInfo(uint32_t frameIndex,
6569 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
6571 void RecordDefragmentationBegin(uint32_t frameIndex,
6574 void RecordDefragmentationEnd(uint32_t frameIndex,
6584 class UserDataString
6588 const char* GetString()
const {
return m_Str; }
6598 VMA_MUTEX m_FileMutex;
6600 int64_t m_StartCounter;
6602 void GetBasicParams(CallParams& outParams);
6605 template<
typename T>
6606 void PrintPointerList(uint64_t count,
const T* pItems)
6610 fprintf(m_File,
"%p", pItems[0]);
6611 for(uint64_t i = 1; i < count; ++i)
6613 fprintf(m_File,
" %p", pItems[i]);
6618 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
6622 #endif // #if VMA_RECORDING_ENABLED 6627 class VmaAllocationObjectAllocator
6629 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
6631 VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks);
6638 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
6642 struct VmaAllocator_T
6644 VMA_CLASS_NO_COPY(VmaAllocator_T)
6647 bool m_UseKhrDedicatedAllocation;
6649 bool m_AllocationCallbacksSpecified;
6650 VkAllocationCallbacks m_AllocationCallbacks;
6652 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
6655 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
6656 VMA_MUTEX m_HeapSizeLimitMutex;
6658 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
6659 VkPhysicalDeviceMemoryProperties m_MemProps;
6662 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
6665 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
6666 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
6667 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
6673 const VkAllocationCallbacks* GetAllocationCallbacks()
const 6675 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
6679 return m_VulkanFunctions;
6682 VkDeviceSize GetBufferImageGranularity()
const 6685 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
6686 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
6689 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
6690 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
6692 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 6694 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
6695 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
6698 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const 6700 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
6701 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
6704 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const 6706 return IsMemoryTypeNonCoherent(memTypeIndex) ?
6707 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
6708 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
6711 bool IsIntegratedGpu()
const 6713 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
6716 #if VMA_RECORDING_ENABLED 6717 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
6720 void GetBufferMemoryRequirements(
6722 VkMemoryRequirements& memReq,
6723 bool& requiresDedicatedAllocation,
6724 bool& prefersDedicatedAllocation)
const;
6725 void GetImageMemoryRequirements(
6727 VkMemoryRequirements& memReq,
6728 bool& requiresDedicatedAllocation,
6729 bool& prefersDedicatedAllocation)
const;
6732 VkResult AllocateMemory(
6733 const VkMemoryRequirements& vkMemReq,
6734 bool requiresDedicatedAllocation,
6735 bool prefersDedicatedAllocation,
6736 VkBuffer dedicatedBuffer,
6737 VkImage dedicatedImage,
6739 VmaSuballocationType suballocType,
6740 size_t allocationCount,
6745 size_t allocationCount,
6748 VkResult ResizeAllocation(
6750 VkDeviceSize newSize);
6752 void CalculateStats(
VmaStats* pStats);
6754 #if VMA_STATS_STRING_ENABLED 6755 void PrintDetailedMap(
class VmaJsonWriter& json);
6758 VkResult DefragmentationBegin(
6762 VkResult DefragmentationEnd(
6769 void DestroyPool(
VmaPool pool);
6772 void SetCurrentFrameIndex(uint32_t frameIndex);
6773 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
6775 void MakePoolAllocationsLost(
6777 size_t* pLostAllocationCount);
6778 VkResult CheckPoolCorruption(
VmaPool hPool);
6779 VkResult CheckCorruption(uint32_t memoryTypeBits);
6783 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
6784 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
6789 VkResult BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer);
6790 VkResult BindImageMemory(
VmaAllocation hAllocation, VkImage hImage);
6792 void FlushOrInvalidateAllocation(
6794 VkDeviceSize offset, VkDeviceSize size,
6795 VMA_CACHE_OPERATION op);
6797 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
6803 uint32_t GetGpuDefragmentationMemoryTypeBits();
6806 VkDeviceSize m_PreferredLargeHeapBlockSize;
6808 VkPhysicalDevice m_PhysicalDevice;
6809 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
6810 VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits;
6812 VMA_RW_MUTEX m_PoolsMutex;
6814 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
6815 uint32_t m_NextPoolId;
6819 #if VMA_RECORDING_ENABLED 6820 VmaRecorder* m_pRecorder;
6825 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
6827 VkResult AllocateMemoryOfType(
6829 VkDeviceSize alignment,
6830 bool dedicatedAllocation,
6831 VkBuffer dedicatedBuffer,
6832 VkImage dedicatedImage,
6834 uint32_t memTypeIndex,
6835 VmaSuballocationType suballocType,
6836 size_t allocationCount,
6840 VkResult AllocateDedicatedMemoryPage(
6842 VmaSuballocationType suballocType,
6843 uint32_t memTypeIndex,
6844 const VkMemoryAllocateInfo& allocInfo,
6846 bool isUserDataString,
6851 VkResult AllocateDedicatedMemory(
6853 VmaSuballocationType suballocType,
6854 uint32_t memTypeIndex,
6856 bool isUserDataString,
6858 VkBuffer dedicatedBuffer,
6859 VkImage dedicatedImage,
6860 size_t allocationCount,
6869 uint32_t CalculateGpuDefragmentationMemoryTypeBits()
const;
6875 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
6877 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
6880 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
6882 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
6885 template<
typename T>
6888 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
6891 template<
typename T>
6892 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
6894 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
6897 template<
typename T>
6898 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
6903 VmaFree(hAllocator, ptr);
6907 template<
typename T>
6908 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
6912 for(
size_t i = count; i--; )
6914 VmaFree(hAllocator, ptr);
6921 #if VMA_STATS_STRING_ENABLED 6923 class VmaStringBuilder
6926 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
6927 size_t GetLength()
const {
return m_Data.size(); }
6928 const char* GetData()
const {
return m_Data.data(); }
6930 void Add(
char ch) { m_Data.push_back(ch); }
6931 void Add(
const char* pStr);
6932 void AddNewLine() { Add(
'\n'); }
6933 void AddNumber(uint32_t num);
6934 void AddNumber(uint64_t num);
6935 void AddPointer(
const void* ptr);
6938 VmaVector< char, VmaStlAllocator<char> > m_Data;
6941 void VmaStringBuilder::Add(
const char* pStr)
6943 const size_t strLen = strlen(pStr);
6946 const size_t oldCount = m_Data.size();
6947 m_Data.resize(oldCount + strLen);
6948 memcpy(m_Data.data() + oldCount, pStr, strLen);
6952 void VmaStringBuilder::AddNumber(uint32_t num)
6955 VmaUint32ToStr(buf,
sizeof(buf), num);
6959 void VmaStringBuilder::AddNumber(uint64_t num)
6962 VmaUint64ToStr(buf,
sizeof(buf), num);
6966 void VmaStringBuilder::AddPointer(
const void* ptr)
6969 VmaPtrToStr(buf,
sizeof(buf), ptr);
6973 #endif // #if VMA_STATS_STRING_ENABLED 6978 #if VMA_STATS_STRING_ENABLED 6982 VMA_CLASS_NO_COPY(VmaJsonWriter)
6984 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
6987 void BeginObject(
bool singleLine =
false);
6990 void BeginArray(
bool singleLine =
false);
6993 void WriteString(
const char* pStr);
6994 void BeginString(
const char* pStr = VMA_NULL);
6995 void ContinueString(
const char* pStr);
6996 void ContinueString(uint32_t n);
6997 void ContinueString(uint64_t n);
6998 void ContinueString_Pointer(
const void* ptr);
6999 void EndString(
const char* pStr = VMA_NULL);
7001 void WriteNumber(uint32_t n);
7002 void WriteNumber(uint64_t n);
7003 void WriteBool(
bool b);
7007 static const char*
const INDENT;
7009 enum COLLECTION_TYPE
7011 COLLECTION_TYPE_OBJECT,
7012 COLLECTION_TYPE_ARRAY,
7016 COLLECTION_TYPE type;
7017 uint32_t valueCount;
7018 bool singleLineMode;
7021 VmaStringBuilder& m_SB;
7022 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
7023 bool m_InsideString;
7025 void BeginValue(
bool isString);
7026 void WriteIndent(
bool oneLess =
false);
7029 const char*
const VmaJsonWriter::INDENT =
" ";
7031 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
7033 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
7034 m_InsideString(false)
7038 VmaJsonWriter::~VmaJsonWriter()
7040 VMA_ASSERT(!m_InsideString);
7041 VMA_ASSERT(m_Stack.empty());
7044 void VmaJsonWriter::BeginObject(
bool singleLine)
7046 VMA_ASSERT(!m_InsideString);
7052 item.type = COLLECTION_TYPE_OBJECT;
7053 item.valueCount = 0;
7054 item.singleLineMode = singleLine;
7055 m_Stack.push_back(item);
7058 void VmaJsonWriter::EndObject()
7060 VMA_ASSERT(!m_InsideString);
7065 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
7069 void VmaJsonWriter::BeginArray(
bool singleLine)
7071 VMA_ASSERT(!m_InsideString);
7077 item.type = COLLECTION_TYPE_ARRAY;
7078 item.valueCount = 0;
7079 item.singleLineMode = singleLine;
7080 m_Stack.push_back(item);
7083 void VmaJsonWriter::EndArray()
7085 VMA_ASSERT(!m_InsideString);
7090 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
7094 void VmaJsonWriter::WriteString(
const char* pStr)
7100 void VmaJsonWriter::BeginString(
const char* pStr)
7102 VMA_ASSERT(!m_InsideString);
7106 m_InsideString =
true;
7107 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7109 ContinueString(pStr);
7113 void VmaJsonWriter::ContinueString(
const char* pStr)
7115 VMA_ASSERT(m_InsideString);
7117 const size_t strLen = strlen(pStr);
7118 for(
size_t i = 0; i < strLen; ++i)
7151 VMA_ASSERT(0 &&
"Character not currently supported.");
7157 void VmaJsonWriter::ContinueString(uint32_t n)
7159 VMA_ASSERT(m_InsideString);
7163 void VmaJsonWriter::ContinueString(uint64_t n)
7165 VMA_ASSERT(m_InsideString);
7169 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
7171 VMA_ASSERT(m_InsideString);
7172 m_SB.AddPointer(ptr);
7175 void VmaJsonWriter::EndString(
const char* pStr)
7177 VMA_ASSERT(m_InsideString);
7178 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7180 ContinueString(pStr);
7183 m_InsideString =
false;
7186 void VmaJsonWriter::WriteNumber(uint32_t n)
7188 VMA_ASSERT(!m_InsideString);
7193 void VmaJsonWriter::WriteNumber(uint64_t n)
7195 VMA_ASSERT(!m_InsideString);
7200 void VmaJsonWriter::WriteBool(
bool b)
7202 VMA_ASSERT(!m_InsideString);
7204 m_SB.Add(b ?
"true" :
"false");
7207 void VmaJsonWriter::WriteNull()
7209 VMA_ASSERT(!m_InsideString);
7214 void VmaJsonWriter::BeginValue(
bool isString)
7216 if(!m_Stack.empty())
7218 StackItem& currItem = m_Stack.back();
7219 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7220 currItem.valueCount % 2 == 0)
7222 VMA_ASSERT(isString);
7225 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7226 currItem.valueCount % 2 != 0)
7230 else if(currItem.valueCount > 0)
7239 ++currItem.valueCount;
7243 void VmaJsonWriter::WriteIndent(
bool oneLess)
7245 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
7249 size_t count = m_Stack.size();
7250 if(count > 0 && oneLess)
7254 for(
size_t i = 0; i < count; ++i)
7261 #endif // #if VMA_STATS_STRING_ENABLED 7265 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
7267 if(IsUserDataString())
7269 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
7271 FreeUserDataString(hAllocator);
7273 if(pUserData != VMA_NULL)
7275 const char*
const newStrSrc = (
char*)pUserData;
7276 const size_t newStrLen = strlen(newStrSrc);
7277 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
7278 memcpy(newStrDst, newStrSrc, newStrLen + 1);
7279 m_pUserData = newStrDst;
7284 m_pUserData = pUserData;
7288 void VmaAllocation_T::ChangeBlockAllocation(
7290 VmaDeviceMemoryBlock* block,
7291 VkDeviceSize offset)
7293 VMA_ASSERT(block != VMA_NULL);
7294 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7297 if(block != m_BlockAllocation.m_Block)
7299 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
7300 if(IsPersistentMap())
7302 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
7303 block->Map(hAllocator, mapRefCount, VMA_NULL);
7306 m_BlockAllocation.m_Block = block;
7307 m_BlockAllocation.m_Offset = offset;
7310 void VmaAllocation_T::ChangeSize(VkDeviceSize newSize)
7312 VMA_ASSERT(newSize > 0);
7316 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
7318 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7319 m_BlockAllocation.m_Offset = newOffset;
7322 VkDeviceSize VmaAllocation_T::GetOffset()
const 7326 case ALLOCATION_TYPE_BLOCK:
7327 return m_BlockAllocation.m_Offset;
7328 case ALLOCATION_TYPE_DEDICATED:
7336 VkDeviceMemory VmaAllocation_T::GetMemory()
const 7340 case ALLOCATION_TYPE_BLOCK:
7341 return m_BlockAllocation.m_Block->GetDeviceMemory();
7342 case ALLOCATION_TYPE_DEDICATED:
7343 return m_DedicatedAllocation.m_hMemory;
7346 return VK_NULL_HANDLE;
7350 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 7354 case ALLOCATION_TYPE_BLOCK:
7355 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
7356 case ALLOCATION_TYPE_DEDICATED:
7357 return m_DedicatedAllocation.m_MemoryTypeIndex;
7364 void* VmaAllocation_T::GetMappedData()
const 7368 case ALLOCATION_TYPE_BLOCK:
7371 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
7372 VMA_ASSERT(pBlockData != VMA_NULL);
7373 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
7380 case ALLOCATION_TYPE_DEDICATED:
7381 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
7382 return m_DedicatedAllocation.m_pMappedData;
7389 bool VmaAllocation_T::CanBecomeLost()
const 7393 case ALLOCATION_TYPE_BLOCK:
7394 return m_BlockAllocation.m_CanBecomeLost;
7395 case ALLOCATION_TYPE_DEDICATED:
7403 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
7405 VMA_ASSERT(CanBecomeLost());
7411 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
7414 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7419 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
7425 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
7435 #if VMA_STATS_STRING_ENABLED 7438 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
7447 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const 7449 json.WriteString(
"Type");
7450 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
7452 json.WriteString(
"Size");
7453 json.WriteNumber(m_Size);
7455 if(m_pUserData != VMA_NULL)
7457 json.WriteString(
"UserData");
7458 if(IsUserDataString())
7460 json.WriteString((
const char*)m_pUserData);
7465 json.ContinueString_Pointer(m_pUserData);
7470 json.WriteString(
"CreationFrameIndex");
7471 json.WriteNumber(m_CreationFrameIndex);
7473 json.WriteString(
"LastUseFrameIndex");
7474 json.WriteNumber(GetLastUseFrameIndex());
7476 if(m_BufferImageUsage != 0)
7478 json.WriteString(
"Usage");
7479 json.WriteNumber(m_BufferImageUsage);
7485 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
7487 VMA_ASSERT(IsUserDataString());
7488 if(m_pUserData != VMA_NULL)
7490 char*
const oldStr = (
char*)m_pUserData;
7491 const size_t oldStrLen = strlen(oldStr);
7492 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
7493 m_pUserData = VMA_NULL;
7497 void VmaAllocation_T::BlockAllocMap()
7499 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7501 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7507 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
7511 void VmaAllocation_T::BlockAllocUnmap()
7513 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7515 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7521 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
7525 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
7527 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7531 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7533 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
7534 *ppData = m_DedicatedAllocation.m_pMappedData;
7540 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
7541 return VK_ERROR_MEMORY_MAP_FAILED;
7546 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
7547 hAllocator->m_hDevice,
7548 m_DedicatedAllocation.m_hMemory,
7553 if(result == VK_SUCCESS)
7555 m_DedicatedAllocation.m_pMappedData = *ppData;
7562 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
7564 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7566 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7571 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
7572 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
7573 hAllocator->m_hDevice,
7574 m_DedicatedAllocation.m_hMemory);
7579 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
7583 #if VMA_STATS_STRING_ENABLED 7585 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
7589 json.WriteString(
"Blocks");
7592 json.WriteString(
"Allocations");
7595 json.WriteString(
"UnusedRanges");
7598 json.WriteString(
"UsedBytes");
7601 json.WriteString(
"UnusedBytes");
7606 json.WriteString(
"AllocationSize");
7607 json.BeginObject(
true);
7608 json.WriteString(
"Min");
7610 json.WriteString(
"Avg");
7612 json.WriteString(
"Max");
7619 json.WriteString(
"UnusedRangeSize");
7620 json.BeginObject(
true);
7621 json.WriteString(
"Min");
7623 json.WriteString(
"Avg");
7625 json.WriteString(
"Max");
7633 #endif // #if VMA_STATS_STRING_ENABLED 7635 struct VmaSuballocationItemSizeLess
7638 const VmaSuballocationList::iterator lhs,
7639 const VmaSuballocationList::iterator rhs)
const 7641 return lhs->size < rhs->size;
7644 const VmaSuballocationList::iterator lhs,
7645 VkDeviceSize rhsSize)
const 7647 return lhs->size < rhsSize;
7655 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
7657 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
7661 #if VMA_STATS_STRING_ENABLED 7663 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
7664 VkDeviceSize unusedBytes,
7665 size_t allocationCount,
7666 size_t unusedRangeCount)
const 7670 json.WriteString(
"TotalBytes");
7671 json.WriteNumber(GetSize());
7673 json.WriteString(
"UnusedBytes");
7674 json.WriteNumber(unusedBytes);
7676 json.WriteString(
"Allocations");
7677 json.WriteNumber((uint64_t)allocationCount);
7679 json.WriteString(
"UnusedRanges");
7680 json.WriteNumber((uint64_t)unusedRangeCount);
7682 json.WriteString(
"Suballocations");
7686 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
7687 VkDeviceSize offset,
7690 json.BeginObject(
true);
7692 json.WriteString(
"Offset");
7693 json.WriteNumber(offset);
7695 hAllocation->PrintParameters(json);
7700 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
7701 VkDeviceSize offset,
7702 VkDeviceSize size)
const 7704 json.BeginObject(
true);
7706 json.WriteString(
"Offset");
7707 json.WriteNumber(offset);
7709 json.WriteString(
"Type");
7710 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
7712 json.WriteString(
"Size");
7713 json.WriteNumber(size);
7718 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const 7724 #endif // #if VMA_STATS_STRING_ENABLED 7729 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
7730 VmaBlockMetadata(hAllocator),
7733 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7734 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
7738 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
7742 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
7744 VmaBlockMetadata::Init(size);
7747 m_SumFreeSize = size;
7749 VmaSuballocation suballoc = {};
7750 suballoc.offset = 0;
7751 suballoc.size = size;
7752 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7753 suballoc.hAllocation = VK_NULL_HANDLE;
7755 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
7756 m_Suballocations.push_back(suballoc);
7757 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
7759 m_FreeSuballocationsBySize.push_back(suballocItem);
7762 bool VmaBlockMetadata_Generic::Validate()
const 7764 VMA_VALIDATE(!m_Suballocations.empty());
7767 VkDeviceSize calculatedOffset = 0;
7769 uint32_t calculatedFreeCount = 0;
7771 VkDeviceSize calculatedSumFreeSize = 0;
7774 size_t freeSuballocationsToRegister = 0;
7776 bool prevFree =
false;
7778 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7779 suballocItem != m_Suballocations.cend();
7782 const VmaSuballocation& subAlloc = *suballocItem;
7785 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
7787 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
7789 VMA_VALIDATE(!prevFree || !currFree);
7791 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
7795 calculatedSumFreeSize += subAlloc.size;
7796 ++calculatedFreeCount;
7797 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7799 ++freeSuballocationsToRegister;
7803 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
7807 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
7808 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
7811 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
7814 calculatedOffset += subAlloc.size;
7815 prevFree = currFree;
7820 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
7822 VkDeviceSize lastSize = 0;
7823 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
7825 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
7828 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
7830 VMA_VALIDATE(suballocItem->size >= lastSize);
7832 lastSize = suballocItem->size;
7836 VMA_VALIDATE(ValidateFreeSuballocationList());
7837 VMA_VALIDATE(calculatedOffset == GetSize());
7838 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
7839 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
7844 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const 7846 if(!m_FreeSuballocationsBySize.empty())
7848 return m_FreeSuballocationsBySize.back()->size;
7856 bool VmaBlockMetadata_Generic::IsEmpty()
const 7858 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
7861 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 7865 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
7877 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7878 suballocItem != m_Suballocations.cend();
7881 const VmaSuballocation& suballoc = *suballocItem;
7882 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
7895 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const 7897 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
7899 inoutStats.
size += GetSize();
7906 #if VMA_STATS_STRING_ENABLED 7908 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const 7910 PrintDetailedMap_Begin(json,
7912 m_Suballocations.size() - (size_t)m_FreeCount,
7916 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7917 suballocItem != m_Suballocations.cend();
7918 ++suballocItem, ++i)
7920 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7922 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
7926 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
7930 PrintDetailedMap_End(json);
7933 #endif // #if VMA_STATS_STRING_ENABLED 7935 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
7936 uint32_t currentFrameIndex,
7937 uint32_t frameInUseCount,
7938 VkDeviceSize bufferImageGranularity,
7939 VkDeviceSize allocSize,
7940 VkDeviceSize allocAlignment,
7942 VmaSuballocationType allocType,
7943 bool canMakeOtherLost,
7945 VmaAllocationRequest* pAllocationRequest)
7947 VMA_ASSERT(allocSize > 0);
7948 VMA_ASSERT(!upperAddress);
7949 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
7950 VMA_ASSERT(pAllocationRequest != VMA_NULL);
7951 VMA_HEAVY_ASSERT(Validate());
7953 pAllocationRequest->type = VmaAllocationRequestType::Normal;
7956 if(canMakeOtherLost ==
false &&
7957 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
7963 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
7964 if(freeSuballocCount > 0)
7969 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
7970 m_FreeSuballocationsBySize.data(),
7971 m_FreeSuballocationsBySize.data() + freeSuballocCount,
7972 allocSize + 2 * VMA_DEBUG_MARGIN,
7973 VmaSuballocationItemSizeLess());
7974 size_t index = it - m_FreeSuballocationsBySize.data();
7975 for(; index < freeSuballocCount; ++index)
7980 bufferImageGranularity,
7984 m_FreeSuballocationsBySize[index],
7986 &pAllocationRequest->offset,
7987 &pAllocationRequest->itemsToMakeLostCount,
7988 &pAllocationRequest->sumFreeSize,
7989 &pAllocationRequest->sumItemSize))
7991 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
7996 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
7998 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7999 it != m_Suballocations.end();
8002 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
8005 bufferImageGranularity,
8011 &pAllocationRequest->offset,
8012 &pAllocationRequest->itemsToMakeLostCount,
8013 &pAllocationRequest->sumFreeSize,
8014 &pAllocationRequest->sumItemSize))
8016 pAllocationRequest->item = it;
8024 for(
size_t index = freeSuballocCount; index--; )
8029 bufferImageGranularity,
8033 m_FreeSuballocationsBySize[index],
8035 &pAllocationRequest->offset,
8036 &pAllocationRequest->itemsToMakeLostCount,
8037 &pAllocationRequest->sumFreeSize,
8038 &pAllocationRequest->sumItemSize))
8040 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
8047 if(canMakeOtherLost)
8052 VmaAllocationRequest tmpAllocRequest = {};
8053 tmpAllocRequest.type = VmaAllocationRequestType::Normal;
8054 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
8055 suballocIt != m_Suballocations.end();
8058 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
8059 suballocIt->hAllocation->CanBecomeLost())
8064 bufferImageGranularity,
8070 &tmpAllocRequest.offset,
8071 &tmpAllocRequest.itemsToMakeLostCount,
8072 &tmpAllocRequest.sumFreeSize,
8073 &tmpAllocRequest.sumItemSize))
8077 *pAllocationRequest = tmpAllocRequest;
8078 pAllocationRequest->item = suballocIt;
8081 if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
8083 *pAllocationRequest = tmpAllocRequest;
8084 pAllocationRequest->item = suballocIt;
8097 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
8098 uint32_t currentFrameIndex,
8099 uint32_t frameInUseCount,
8100 VmaAllocationRequest* pAllocationRequest)
8102 VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
8104 while(pAllocationRequest->itemsToMakeLostCount > 0)
8106 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
8108 ++pAllocationRequest->item;
8110 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8111 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
8112 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
8113 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8115 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
8116 --pAllocationRequest->itemsToMakeLostCount;
8124 VMA_HEAVY_ASSERT(Validate());
8125 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8126 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
8131 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8133 uint32_t lostAllocationCount = 0;
8134 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8135 it != m_Suballocations.end();
8138 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
8139 it->hAllocation->CanBecomeLost() &&
8140 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8142 it = FreeSuballocation(it);
8143 ++lostAllocationCount;
8146 return lostAllocationCount;
8149 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
8151 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8152 it != m_Suballocations.end();
8155 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
8157 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
8159 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
8160 return VK_ERROR_VALIDATION_FAILED_EXT;
8162 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
8164 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
8165 return VK_ERROR_VALIDATION_FAILED_EXT;
8173 void VmaBlockMetadata_Generic::Alloc(
8174 const VmaAllocationRequest& request,
8175 VmaSuballocationType type,
8176 VkDeviceSize allocSize,
8179 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
8180 VMA_ASSERT(request.item != m_Suballocations.end());
8181 VmaSuballocation& suballoc = *request.item;
8183 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8185 VMA_ASSERT(request.offset >= suballoc.offset);
8186 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
8187 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
8188 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
8192 UnregisterFreeSuballocation(request.item);
8194 suballoc.offset = request.offset;
8195 suballoc.size = allocSize;
8196 suballoc.type = type;
8197 suballoc.hAllocation = hAllocation;
8202 VmaSuballocation paddingSuballoc = {};
8203 paddingSuballoc.offset = request.offset + allocSize;
8204 paddingSuballoc.size = paddingEnd;
8205 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8206 VmaSuballocationList::iterator next = request.item;
8208 const VmaSuballocationList::iterator paddingEndItem =
8209 m_Suballocations.insert(next, paddingSuballoc);
8210 RegisterFreeSuballocation(paddingEndItem);
8216 VmaSuballocation paddingSuballoc = {};
8217 paddingSuballoc.offset = request.offset - paddingBegin;
8218 paddingSuballoc.size = paddingBegin;
8219 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8220 const VmaSuballocationList::iterator paddingBeginItem =
8221 m_Suballocations.insert(request.item, paddingSuballoc);
8222 RegisterFreeSuballocation(paddingBeginItem);
8226 m_FreeCount = m_FreeCount - 1;
8227 if(paddingBegin > 0)
8235 m_SumFreeSize -= allocSize;
8238 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
8240 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8241 suballocItem != m_Suballocations.end();
8244 VmaSuballocation& suballoc = *suballocItem;
8245 if(suballoc.hAllocation == allocation)
8247 FreeSuballocation(suballocItem);
8248 VMA_HEAVY_ASSERT(Validate());
8252 VMA_ASSERT(0 &&
"Not found!");
8255 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
8257 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8258 suballocItem != m_Suballocations.end();
8261 VmaSuballocation& suballoc = *suballocItem;
8262 if(suballoc.offset == offset)
8264 FreeSuballocation(suballocItem);
8268 VMA_ASSERT(0 &&
"Not found!");
8271 bool VmaBlockMetadata_Generic::ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize)
8273 typedef VmaSuballocationList::iterator iter_type;
8274 for(iter_type suballocItem = m_Suballocations.begin();
8275 suballocItem != m_Suballocations.end();
8278 VmaSuballocation& suballoc = *suballocItem;
8279 if(suballoc.hAllocation == alloc)
8281 iter_type nextItem = suballocItem;
8285 VMA_ASSERT(newSize != alloc->GetSize() && newSize > 0);
8288 if(newSize < alloc->GetSize())
8290 const VkDeviceSize sizeDiff = suballoc.size - newSize;
8293 if(nextItem != m_Suballocations.end())
8296 if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8299 UnregisterFreeSuballocation(nextItem);
8300 nextItem->offset -= sizeDiff;
8301 nextItem->size += sizeDiff;
8302 RegisterFreeSuballocation(nextItem);
8308 VmaSuballocation newFreeSuballoc;
8309 newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
8310 newFreeSuballoc.offset = suballoc.offset + newSize;
8311 newFreeSuballoc.size = sizeDiff;
8312 newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8313 iter_type newFreeSuballocIt = m_Suballocations.insert(nextItem, newFreeSuballoc);
8314 RegisterFreeSuballocation(newFreeSuballocIt);
8323 VmaSuballocation newFreeSuballoc;
8324 newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
8325 newFreeSuballoc.offset = suballoc.offset + newSize;
8326 newFreeSuballoc.size = sizeDiff;
8327 newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8328 m_Suballocations.push_back(newFreeSuballoc);
8330 iter_type newFreeSuballocIt = m_Suballocations.end();
8331 RegisterFreeSuballocation(--newFreeSuballocIt);
8336 suballoc.size = newSize;
8337 m_SumFreeSize += sizeDiff;
8342 const VkDeviceSize sizeDiff = newSize - suballoc.size;
8345 if(nextItem != m_Suballocations.end())
8348 if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8351 if(nextItem->size < sizeDiff + VMA_DEBUG_MARGIN)
8357 if(nextItem->size > sizeDiff)
8360 UnregisterFreeSuballocation(nextItem);
8361 nextItem->offset += sizeDiff;
8362 nextItem->size -= sizeDiff;
8363 RegisterFreeSuballocation(nextItem);
8369 UnregisterFreeSuballocation(nextItem);
8370 m_Suballocations.erase(nextItem);
8386 suballoc.size = newSize;
8387 m_SumFreeSize -= sizeDiff;
8394 VMA_ASSERT(0 &&
"Not found!");
8398 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const 8400 VkDeviceSize lastSize = 0;
8401 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
8403 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
8405 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
8406 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8407 VMA_VALIDATE(it->size >= lastSize);
8408 lastSize = it->size;
8413 bool VmaBlockMetadata_Generic::CheckAllocation(
8414 uint32_t currentFrameIndex,
8415 uint32_t frameInUseCount,
8416 VkDeviceSize bufferImageGranularity,
8417 VkDeviceSize allocSize,
8418 VkDeviceSize allocAlignment,
8419 VmaSuballocationType allocType,
8420 VmaSuballocationList::const_iterator suballocItem,
8421 bool canMakeOtherLost,
8422 VkDeviceSize* pOffset,
8423 size_t* itemsToMakeLostCount,
8424 VkDeviceSize* pSumFreeSize,
8425 VkDeviceSize* pSumItemSize)
const 8427 VMA_ASSERT(allocSize > 0);
8428 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8429 VMA_ASSERT(suballocItem != m_Suballocations.cend());
8430 VMA_ASSERT(pOffset != VMA_NULL);
8432 *itemsToMakeLostCount = 0;
8436 if(canMakeOtherLost)
8438 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8440 *pSumFreeSize = suballocItem->size;
8444 if(suballocItem->hAllocation->CanBecomeLost() &&
8445 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8447 ++*itemsToMakeLostCount;
8448 *pSumItemSize = suballocItem->size;
8457 if(GetSize() - suballocItem->offset < allocSize)
8463 *pOffset = suballocItem->offset;
8466 if(VMA_DEBUG_MARGIN > 0)
8468 *pOffset += VMA_DEBUG_MARGIN;
8472 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8476 if(bufferImageGranularity > 1)
8478 bool bufferImageGranularityConflict =
false;
8479 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8480 while(prevSuballocItem != m_Suballocations.cbegin())
8483 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8484 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8486 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8488 bufferImageGranularityConflict =
true;
8496 if(bufferImageGranularityConflict)
8498 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8504 if(*pOffset >= suballocItem->offset + suballocItem->size)
8510 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
8513 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8515 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
8517 if(suballocItem->offset + totalSize > GetSize())
8524 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
8525 if(totalSize > suballocItem->size)
8527 VkDeviceSize remainingSize = totalSize - suballocItem->size;
8528 while(remainingSize > 0)
8531 if(lastSuballocItem == m_Suballocations.cend())
8535 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8537 *pSumFreeSize += lastSuballocItem->size;
8541 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
8542 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
8543 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8545 ++*itemsToMakeLostCount;
8546 *pSumItemSize += lastSuballocItem->size;
8553 remainingSize = (lastSuballocItem->size < remainingSize) ?
8554 remainingSize - lastSuballocItem->size : 0;
8560 if(bufferImageGranularity > 1)
8562 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
8564 while(nextSuballocItem != m_Suballocations.cend())
8566 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8567 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8569 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8571 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
8572 if(nextSuballoc.hAllocation->CanBecomeLost() &&
8573 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8575 ++*itemsToMakeLostCount;
8594 const VmaSuballocation& suballoc = *suballocItem;
8595 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8597 *pSumFreeSize = suballoc.size;
8600 if(suballoc.size < allocSize)
8606 *pOffset = suballoc.offset;
8609 if(VMA_DEBUG_MARGIN > 0)
8611 *pOffset += VMA_DEBUG_MARGIN;
8615 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8619 if(bufferImageGranularity > 1)
8621 bool bufferImageGranularityConflict =
false;
8622 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8623 while(prevSuballocItem != m_Suballocations.cbegin())
8626 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8627 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8629 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8631 bufferImageGranularityConflict =
true;
8639 if(bufferImageGranularityConflict)
8641 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8646 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
8649 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8652 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
8659 if(bufferImageGranularity > 1)
8661 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
8663 while(nextSuballocItem != m_Suballocations.cend())
8665 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8666 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8668 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8687 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
8689 VMA_ASSERT(item != m_Suballocations.end());
8690 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8692 VmaSuballocationList::iterator nextItem = item;
8694 VMA_ASSERT(nextItem != m_Suballocations.end());
8695 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
8697 item->size += nextItem->size;
8699 m_Suballocations.erase(nextItem);
8702 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
8705 VmaSuballocation& suballoc = *suballocItem;
8706 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8707 suballoc.hAllocation = VK_NULL_HANDLE;
8711 m_SumFreeSize += suballoc.size;
8714 bool mergeWithNext =
false;
8715 bool mergeWithPrev =
false;
8717 VmaSuballocationList::iterator nextItem = suballocItem;
8719 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
8721 mergeWithNext =
true;
8724 VmaSuballocationList::iterator prevItem = suballocItem;
8725 if(suballocItem != m_Suballocations.begin())
8728 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8730 mergeWithPrev =
true;
8736 UnregisterFreeSuballocation(nextItem);
8737 MergeFreeWithNext(suballocItem);
8742 UnregisterFreeSuballocation(prevItem);
8743 MergeFreeWithNext(prevItem);
8744 RegisterFreeSuballocation(prevItem);
8749 RegisterFreeSuballocation(suballocItem);
8750 return suballocItem;
8754 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
8756 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8757 VMA_ASSERT(item->size > 0);
8761 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
8763 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8765 if(m_FreeSuballocationsBySize.empty())
8767 m_FreeSuballocationsBySize.push_back(item);
8771 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
8779 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
8781 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8782 VMA_ASSERT(item->size > 0);
8786 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
8788 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8790 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
8791 m_FreeSuballocationsBySize.data(),
8792 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
8794 VmaSuballocationItemSizeLess());
8795 for(
size_t index = it - m_FreeSuballocationsBySize.data();
8796 index < m_FreeSuballocationsBySize.size();
8799 if(m_FreeSuballocationsBySize[index] == item)
8801 VmaVectorRemove(m_FreeSuballocationsBySize, index);
8804 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
8806 VMA_ASSERT(0 &&
"Not found.");
8812 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
8813 VkDeviceSize bufferImageGranularity,
8814 VmaSuballocationType& inOutPrevSuballocType)
const 8816 if(bufferImageGranularity == 1 || IsEmpty())
8821 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
8822 bool typeConflictFound =
false;
8823 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
8824 it != m_Suballocations.cend();
8827 const VmaSuballocationType suballocType = it->type;
8828 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
8830 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
8831 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
8833 typeConflictFound =
true;
8835 inOutPrevSuballocType = suballocType;
8839 return typeConflictFound || minAlignment >= bufferImageGranularity;
8845 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
8846 VmaBlockMetadata(hAllocator),
8848 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8849 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8850 m_1stVectorIndex(0),
8851 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
8852 m_1stNullItemsBeginCount(0),
8853 m_1stNullItemsMiddleCount(0),
8854 m_2ndNullItemsCount(0)
8858 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
8862 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
8864 VmaBlockMetadata::Init(size);
8865 m_SumFreeSize = size;
8868 bool VmaBlockMetadata_Linear::Validate()
const 8870 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8871 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8873 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
8874 VMA_VALIDATE(!suballocations1st.empty() ||
8875 suballocations2nd.empty() ||
8876 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
8878 if(!suballocations1st.empty())
8881 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
8883 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
8885 if(!suballocations2nd.empty())
8888 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
8891 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
8892 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
8894 VkDeviceSize sumUsedSize = 0;
8895 const size_t suballoc1stCount = suballocations1st.size();
8896 VkDeviceSize offset = VMA_DEBUG_MARGIN;
8898 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8900 const size_t suballoc2ndCount = suballocations2nd.size();
8901 size_t nullItem2ndCount = 0;
8902 for(
size_t i = 0; i < suballoc2ndCount; ++i)
8904 const VmaSuballocation& suballoc = suballocations2nd[i];
8905 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8907 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8908 VMA_VALIDATE(suballoc.offset >= offset);
8912 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8913 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8914 sumUsedSize += suballoc.size;
8921 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8924 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
8927 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
8929 const VmaSuballocation& suballoc = suballocations1st[i];
8930 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
8931 suballoc.hAllocation == VK_NULL_HANDLE);
8934 size_t nullItem1stCount = m_1stNullItemsBeginCount;
8936 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
8938 const VmaSuballocation& suballoc = suballocations1st[i];
8939 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8941 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8942 VMA_VALIDATE(suballoc.offset >= offset);
8943 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
8947 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8948 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8949 sumUsedSize += suballoc.size;
8956 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8958 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
8960 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8962 const size_t suballoc2ndCount = suballocations2nd.size();
8963 size_t nullItem2ndCount = 0;
8964 for(
size_t i = suballoc2ndCount; i--; )
8966 const VmaSuballocation& suballoc = suballocations2nd[i];
8967 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8969 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8970 VMA_VALIDATE(suballoc.offset >= offset);
8974 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8975 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8976 sumUsedSize += suballoc.size;
8983 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8986 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
8989 VMA_VALIDATE(offset <= GetSize());
8990 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
8995 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const 8997 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
8998 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
9001 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const 9003 const VkDeviceSize size = GetSize();
9015 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9017 switch(m_2ndVectorMode)
9019 case SECOND_VECTOR_EMPTY:
9025 const size_t suballocations1stCount = suballocations1st.size();
9026 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
9027 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
9028 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
9030 firstSuballoc.offset,
9031 size - (lastSuballoc.offset + lastSuballoc.size));
9035 case SECOND_VECTOR_RING_BUFFER:
9040 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9041 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
9042 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
9043 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
9047 case SECOND_VECTOR_DOUBLE_STACK:
9052 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9053 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
9054 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
9055 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
9065 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 9067 const VkDeviceSize size = GetSize();
9068 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9069 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9070 const size_t suballoc1stCount = suballocations1st.size();
9071 const size_t suballoc2ndCount = suballocations2nd.size();
9082 VkDeviceSize lastOffset = 0;
9084 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9086 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9087 size_t nextAlloc2ndIndex = 0;
9088 while(lastOffset < freeSpace2ndTo1stEnd)
9091 while(nextAlloc2ndIndex < suballoc2ndCount &&
9092 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9094 ++nextAlloc2ndIndex;
9098 if(nextAlloc2ndIndex < suballoc2ndCount)
9100 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9103 if(lastOffset < suballoc.offset)
9106 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9120 lastOffset = suballoc.offset + suballoc.size;
9121 ++nextAlloc2ndIndex;
9127 if(lastOffset < freeSpace2ndTo1stEnd)
9129 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9137 lastOffset = freeSpace2ndTo1stEnd;
9142 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9143 const VkDeviceSize freeSpace1stTo2ndEnd =
9144 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9145 while(lastOffset < freeSpace1stTo2ndEnd)
9148 while(nextAlloc1stIndex < suballoc1stCount &&
9149 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9151 ++nextAlloc1stIndex;
9155 if(nextAlloc1stIndex < suballoc1stCount)
9157 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9160 if(lastOffset < suballoc.offset)
9163 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9177 lastOffset = suballoc.offset + suballoc.size;
9178 ++nextAlloc1stIndex;
9184 if(lastOffset < freeSpace1stTo2ndEnd)
9186 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9194 lastOffset = freeSpace1stTo2ndEnd;
9198 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9200 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9201 while(lastOffset < size)
9204 while(nextAlloc2ndIndex != SIZE_MAX &&
9205 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9207 --nextAlloc2ndIndex;
9211 if(nextAlloc2ndIndex != SIZE_MAX)
9213 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9216 if(lastOffset < suballoc.offset)
9219 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9233 lastOffset = suballoc.offset + suballoc.size;
9234 --nextAlloc2ndIndex;
9240 if(lastOffset < size)
9242 const VkDeviceSize unusedRangeSize = size - lastOffset;
9258 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const 9260 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9261 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9262 const VkDeviceSize size = GetSize();
9263 const size_t suballoc1stCount = suballocations1st.size();
9264 const size_t suballoc2ndCount = suballocations2nd.size();
9266 inoutStats.
size += size;
9268 VkDeviceSize lastOffset = 0;
9270 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9272 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9273 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
9274 while(lastOffset < freeSpace2ndTo1stEnd)
9277 while(nextAlloc2ndIndex < suballoc2ndCount &&
9278 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9280 ++nextAlloc2ndIndex;
9284 if(nextAlloc2ndIndex < suballoc2ndCount)
9286 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9289 if(lastOffset < suballoc.offset)
9292 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9303 lastOffset = suballoc.offset + suballoc.size;
9304 ++nextAlloc2ndIndex;
9309 if(lastOffset < freeSpace2ndTo1stEnd)
9312 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9319 lastOffset = freeSpace2ndTo1stEnd;
9324 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9325 const VkDeviceSize freeSpace1stTo2ndEnd =
9326 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9327 while(lastOffset < freeSpace1stTo2ndEnd)
9330 while(nextAlloc1stIndex < suballoc1stCount &&
9331 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9333 ++nextAlloc1stIndex;
9337 if(nextAlloc1stIndex < suballoc1stCount)
9339 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9342 if(lastOffset < suballoc.offset)
9345 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9356 lastOffset = suballoc.offset + suballoc.size;
9357 ++nextAlloc1stIndex;
9362 if(lastOffset < freeSpace1stTo2ndEnd)
9365 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9372 lastOffset = freeSpace1stTo2ndEnd;
9376 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9378 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9379 while(lastOffset < size)
9382 while(nextAlloc2ndIndex != SIZE_MAX &&
9383 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9385 --nextAlloc2ndIndex;
9389 if(nextAlloc2ndIndex != SIZE_MAX)
9391 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9394 if(lastOffset < suballoc.offset)
9397 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9408 lastOffset = suballoc.offset + suballoc.size;
9409 --nextAlloc2ndIndex;
9414 if(lastOffset < size)
9417 const VkDeviceSize unusedRangeSize = size - lastOffset;
9430 #if VMA_STATS_STRING_ENABLED 9431 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const 9433 const VkDeviceSize size = GetSize();
9434 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9435 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9436 const size_t suballoc1stCount = suballocations1st.size();
9437 const size_t suballoc2ndCount = suballocations2nd.size();
9441 size_t unusedRangeCount = 0;
9442 VkDeviceSize usedBytes = 0;
9444 VkDeviceSize lastOffset = 0;
9446 size_t alloc2ndCount = 0;
9447 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9449 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9450 size_t nextAlloc2ndIndex = 0;
9451 while(lastOffset < freeSpace2ndTo1stEnd)
9454 while(nextAlloc2ndIndex < suballoc2ndCount &&
9455 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9457 ++nextAlloc2ndIndex;
9461 if(nextAlloc2ndIndex < suballoc2ndCount)
9463 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9466 if(lastOffset < suballoc.offset)
9475 usedBytes += suballoc.size;
9478 lastOffset = suballoc.offset + suballoc.size;
9479 ++nextAlloc2ndIndex;
9484 if(lastOffset < freeSpace2ndTo1stEnd)
9491 lastOffset = freeSpace2ndTo1stEnd;
9496 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9497 size_t alloc1stCount = 0;
9498 const VkDeviceSize freeSpace1stTo2ndEnd =
9499 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9500 while(lastOffset < freeSpace1stTo2ndEnd)
9503 while(nextAlloc1stIndex < suballoc1stCount &&
9504 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9506 ++nextAlloc1stIndex;
9510 if(nextAlloc1stIndex < suballoc1stCount)
9512 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9515 if(lastOffset < suballoc.offset)
9524 usedBytes += suballoc.size;
9527 lastOffset = suballoc.offset + suballoc.size;
9528 ++nextAlloc1stIndex;
9533 if(lastOffset < size)
9540 lastOffset = freeSpace1stTo2ndEnd;
9544 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9546 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9547 while(lastOffset < size)
9550 while(nextAlloc2ndIndex != SIZE_MAX &&
9551 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9553 --nextAlloc2ndIndex;
9557 if(nextAlloc2ndIndex != SIZE_MAX)
9559 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9562 if(lastOffset < suballoc.offset)
9571 usedBytes += suballoc.size;
9574 lastOffset = suballoc.offset + suballoc.size;
9575 --nextAlloc2ndIndex;
9580 if(lastOffset < size)
9592 const VkDeviceSize unusedBytes = size - usedBytes;
9593 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
9598 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9600 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9601 size_t nextAlloc2ndIndex = 0;
9602 while(lastOffset < freeSpace2ndTo1stEnd)
9605 while(nextAlloc2ndIndex < suballoc2ndCount &&
9606 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9608 ++nextAlloc2ndIndex;
9612 if(nextAlloc2ndIndex < suballoc2ndCount)
9614 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9617 if(lastOffset < suballoc.offset)
9620 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9621 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9626 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9629 lastOffset = suballoc.offset + suballoc.size;
9630 ++nextAlloc2ndIndex;
9635 if(lastOffset < freeSpace2ndTo1stEnd)
9638 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9639 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9643 lastOffset = freeSpace2ndTo1stEnd;
9648 nextAlloc1stIndex = m_1stNullItemsBeginCount;
9649 while(lastOffset < freeSpace1stTo2ndEnd)
9652 while(nextAlloc1stIndex < suballoc1stCount &&
9653 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9655 ++nextAlloc1stIndex;
9659 if(nextAlloc1stIndex < suballoc1stCount)
9661 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9664 if(lastOffset < suballoc.offset)
9667 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9668 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9673 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9676 lastOffset = suballoc.offset + suballoc.size;
9677 ++nextAlloc1stIndex;
9682 if(lastOffset < freeSpace1stTo2ndEnd)
9685 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9686 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9690 lastOffset = freeSpace1stTo2ndEnd;
9694 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9696 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9697 while(lastOffset < size)
9700 while(nextAlloc2ndIndex != SIZE_MAX &&
9701 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9703 --nextAlloc2ndIndex;
9707 if(nextAlloc2ndIndex != SIZE_MAX)
9709 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9712 if(lastOffset < suballoc.offset)
9715 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9716 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9721 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9724 lastOffset = suballoc.offset + suballoc.size;
9725 --nextAlloc2ndIndex;
9730 if(lastOffset < size)
9733 const VkDeviceSize unusedRangeSize = size - lastOffset;
9734 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9743 PrintDetailedMap_End(json);
9745 #endif // #if VMA_STATS_STRING_ENABLED 9747 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
9748 uint32_t currentFrameIndex,
9749 uint32_t frameInUseCount,
9750 VkDeviceSize bufferImageGranularity,
9751 VkDeviceSize allocSize,
9752 VkDeviceSize allocAlignment,
9754 VmaSuballocationType allocType,
9755 bool canMakeOtherLost,
9757 VmaAllocationRequest* pAllocationRequest)
9759 VMA_ASSERT(allocSize > 0);
9760 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9761 VMA_ASSERT(pAllocationRequest != VMA_NULL);
9762 VMA_HEAVY_ASSERT(Validate());
9763 return upperAddress ?
9764 CreateAllocationRequest_UpperAddress(
9765 currentFrameIndex, frameInUseCount, bufferImageGranularity,
9766 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
9767 CreateAllocationRequest_LowerAddress(
9768 currentFrameIndex, frameInUseCount, bufferImageGranularity,
9769 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
9772 bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
9773 uint32_t currentFrameIndex,
9774 uint32_t frameInUseCount,
9775 VkDeviceSize bufferImageGranularity,
9776 VkDeviceSize allocSize,
9777 VkDeviceSize allocAlignment,
9778 VmaSuballocationType allocType,
9779 bool canMakeOtherLost,
9781 VmaAllocationRequest* pAllocationRequest)
9783 const VkDeviceSize size = GetSize();
9784 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9785 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9787 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9789 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
9794 if(allocSize > size)
9798 VkDeviceSize resultBaseOffset = size - allocSize;
9799 if(!suballocations2nd.empty())
9801 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
9802 resultBaseOffset = lastSuballoc.offset - allocSize;
9803 if(allocSize > lastSuballoc.offset)
9810 VkDeviceSize resultOffset = resultBaseOffset;
9813 if(VMA_DEBUG_MARGIN > 0)
9815 if(resultOffset < VMA_DEBUG_MARGIN)
9819 resultOffset -= VMA_DEBUG_MARGIN;
9823 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
9827 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
9829 bool bufferImageGranularityConflict =
false;
9830 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
9832 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
9833 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9835 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
9837 bufferImageGranularityConflict =
true;
9845 if(bufferImageGranularityConflict)
9847 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
9852 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
9853 suballocations1st.back().offset + suballocations1st.back().size :
9855 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
9859 if(bufferImageGranularity > 1)
9861 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
9863 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
9864 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9866 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
9880 pAllocationRequest->offset = resultOffset;
9881 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
9882 pAllocationRequest->sumItemSize = 0;
9884 pAllocationRequest->itemsToMakeLostCount = 0;
9885 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
9892 bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
9893 uint32_t currentFrameIndex,
9894 uint32_t frameInUseCount,
9895 VkDeviceSize bufferImageGranularity,
9896 VkDeviceSize allocSize,
9897 VkDeviceSize allocAlignment,
9898 VmaSuballocationType allocType,
9899 bool canMakeOtherLost,
9901 VmaAllocationRequest* pAllocationRequest)
9903 const VkDeviceSize size = GetSize();
9904 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9905 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9907 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9911 VkDeviceSize resultBaseOffset = 0;
9912 if(!suballocations1st.empty())
9914 const VmaSuballocation& lastSuballoc = suballocations1st.back();
9915 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
9919 VkDeviceSize resultOffset = resultBaseOffset;
9922 if(VMA_DEBUG_MARGIN > 0)
9924 resultOffset += VMA_DEBUG_MARGIN;
9928 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
9932 if(bufferImageGranularity > 1 && !suballocations1st.empty())
9934 bool bufferImageGranularityConflict =
false;
9935 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
9937 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
9938 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9940 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9942 bufferImageGranularityConflict =
true;
9950 if(bufferImageGranularityConflict)
9952 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
9956 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
9957 suballocations2nd.back().offset : size;
9960 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
9964 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9966 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
9968 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
9969 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9971 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9985 pAllocationRequest->offset = resultOffset;
9986 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
9987 pAllocationRequest->sumItemSize = 0;
9989 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
9990 pAllocationRequest->itemsToMakeLostCount = 0;
9997 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9999 VMA_ASSERT(!suballocations1st.empty());
10001 VkDeviceSize resultBaseOffset = 0;
10002 if(!suballocations2nd.empty())
10004 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
10005 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
10009 VkDeviceSize resultOffset = resultBaseOffset;
10012 if(VMA_DEBUG_MARGIN > 0)
10014 resultOffset += VMA_DEBUG_MARGIN;
10018 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
10022 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
10024 bool bufferImageGranularityConflict =
false;
10025 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
10027 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
10028 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10030 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10032 bufferImageGranularityConflict =
true;
10040 if(bufferImageGranularityConflict)
10042 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
10046 pAllocationRequest->itemsToMakeLostCount = 0;
10047 pAllocationRequest->sumItemSize = 0;
10048 size_t index1st = m_1stNullItemsBeginCount;
10050 if(canMakeOtherLost)
10052 while(index1st < suballocations1st.size() &&
10053 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
10056 const VmaSuballocation& suballoc = suballocations1st[index1st];
10057 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
10063 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10064 if(suballoc.hAllocation->CanBecomeLost() &&
10065 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10067 ++pAllocationRequest->itemsToMakeLostCount;
10068 pAllocationRequest->sumItemSize += suballoc.size;
10080 if(bufferImageGranularity > 1)
10082 while(index1st < suballocations1st.size())
10084 const VmaSuballocation& suballoc = suballocations1st[index1st];
10085 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
10087 if(suballoc.hAllocation != VK_NULL_HANDLE)
10090 if(suballoc.hAllocation->CanBecomeLost() &&
10091 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10093 ++pAllocationRequest->itemsToMakeLostCount;
10094 pAllocationRequest->sumItemSize += suballoc.size;
10112 if(index1st == suballocations1st.size() &&
10113 resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
10116 VMA_DEBUG_LOG(
"Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
10121 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
10122 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
10126 if(bufferImageGranularity > 1)
10128 for(
size_t nextSuballocIndex = index1st;
10129 nextSuballocIndex < suballocations1st.size();
10130 nextSuballocIndex++)
10132 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
10133 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10135 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10149 pAllocationRequest->offset = resultOffset;
10150 pAllocationRequest->sumFreeSize =
10151 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
10153 - pAllocationRequest->sumItemSize;
10154 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
10163 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
10164 uint32_t currentFrameIndex,
10165 uint32_t frameInUseCount,
10166 VmaAllocationRequest* pAllocationRequest)
10168 if(pAllocationRequest->itemsToMakeLostCount == 0)
10173 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
10176 SuballocationVectorType* suballocations = &AccessSuballocations1st();
10177 size_t index = m_1stNullItemsBeginCount;
10178 size_t madeLostCount = 0;
10179 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
10181 if(index == suballocations->size())
10185 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10187 suballocations = &AccessSuballocations2nd();
10191 VMA_ASSERT(!suballocations->empty());
10193 VmaSuballocation& suballoc = (*suballocations)[index];
10194 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10196 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10197 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
10198 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10200 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10201 suballoc.hAllocation = VK_NULL_HANDLE;
10202 m_SumFreeSize += suballoc.size;
10203 if(suballocations == &AccessSuballocations1st())
10205 ++m_1stNullItemsMiddleCount;
10209 ++m_2ndNullItemsCount;
10221 CleanupAfterFree();
10227 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10229 uint32_t lostAllocationCount = 0;
10231 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10232 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10234 VmaSuballocation& suballoc = suballocations1st[i];
10235 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10236 suballoc.hAllocation->CanBecomeLost() &&
10237 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10239 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10240 suballoc.hAllocation = VK_NULL_HANDLE;
10241 ++m_1stNullItemsMiddleCount;
10242 m_SumFreeSize += suballoc.size;
10243 ++lostAllocationCount;
10247 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10248 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10250 VmaSuballocation& suballoc = suballocations2nd[i];
10251 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10252 suballoc.hAllocation->CanBecomeLost() &&
10253 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10255 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10256 suballoc.hAllocation = VK_NULL_HANDLE;
10257 ++m_2ndNullItemsCount;
10258 m_SumFreeSize += suballoc.size;
10259 ++lostAllocationCount;
10263 if(lostAllocationCount)
10265 CleanupAfterFree();
10268 return lostAllocationCount;
10271 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
10273 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10274 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10276 const VmaSuballocation& suballoc = suballocations1st[i];
10277 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10279 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10281 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10282 return VK_ERROR_VALIDATION_FAILED_EXT;
10284 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10286 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10287 return VK_ERROR_VALIDATION_FAILED_EXT;
10292 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10293 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10295 const VmaSuballocation& suballoc = suballocations2nd[i];
10296 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10298 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10300 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10301 return VK_ERROR_VALIDATION_FAILED_EXT;
10303 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10305 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10306 return VK_ERROR_VALIDATION_FAILED_EXT;
10314 void VmaBlockMetadata_Linear::Alloc(
10315 const VmaAllocationRequest& request,
10316 VmaSuballocationType type,
10317 VkDeviceSize allocSize,
10320 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
10322 switch(request.type)
10324 case VmaAllocationRequestType::UpperAddress:
10326 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
10327 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
10328 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10329 suballocations2nd.push_back(newSuballoc);
10330 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
10333 case VmaAllocationRequestType::EndOf1st:
10335 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10337 VMA_ASSERT(suballocations1st.empty() ||
10338 request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
10340 VMA_ASSERT(request.offset + allocSize <= GetSize());
10342 suballocations1st.push_back(newSuballoc);
10345 case VmaAllocationRequestType::EndOf2nd:
10347 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10349 VMA_ASSERT(!suballocations1st.empty() &&
10350 request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
10351 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10353 switch(m_2ndVectorMode)
10355 case SECOND_VECTOR_EMPTY:
10357 VMA_ASSERT(suballocations2nd.empty());
10358 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
10360 case SECOND_VECTOR_RING_BUFFER:
10362 VMA_ASSERT(!suballocations2nd.empty());
10364 case SECOND_VECTOR_DOUBLE_STACK:
10365 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
10371 suballocations2nd.push_back(newSuballoc);
10375 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
10378 m_SumFreeSize -= newSuballoc.size;
10381 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
10383 FreeAtOffset(allocation->GetOffset());
10386 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
10388 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10389 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10391 if(!suballocations1st.empty())
10394 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
10395 if(firstSuballoc.offset == offset)
10397 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10398 firstSuballoc.hAllocation = VK_NULL_HANDLE;
10399 m_SumFreeSize += firstSuballoc.size;
10400 ++m_1stNullItemsBeginCount;
10401 CleanupAfterFree();
10407 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
10408 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10410 VmaSuballocation& lastSuballoc = suballocations2nd.back();
10411 if(lastSuballoc.offset == offset)
10413 m_SumFreeSize += lastSuballoc.size;
10414 suballocations2nd.pop_back();
10415 CleanupAfterFree();
10420 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
10422 VmaSuballocation& lastSuballoc = suballocations1st.back();
10423 if(lastSuballoc.offset == offset)
10425 m_SumFreeSize += lastSuballoc.size;
10426 suballocations1st.pop_back();
10427 CleanupAfterFree();
10434 VmaSuballocation refSuballoc;
10435 refSuballoc.offset = offset;
10437 SuballocationVectorType::iterator it = VmaBinaryFindSorted(
10438 suballocations1st.begin() + m_1stNullItemsBeginCount,
10439 suballocations1st.end(),
10441 VmaSuballocationOffsetLess());
10442 if(it != suballocations1st.end())
10444 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10445 it->hAllocation = VK_NULL_HANDLE;
10446 ++m_1stNullItemsMiddleCount;
10447 m_SumFreeSize += it->size;
10448 CleanupAfterFree();
10453 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
10456 VmaSuballocation refSuballoc;
10457 refSuballoc.offset = offset;
10459 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
10460 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) :
10461 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater());
10462 if(it != suballocations2nd.end())
10464 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10465 it->hAllocation = VK_NULL_HANDLE;
10466 ++m_2ndNullItemsCount;
10467 m_SumFreeSize += it->size;
10468 CleanupAfterFree();
10473 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
10476 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const 10478 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10479 const size_t suballocCount = AccessSuballocations1st().size();
10480 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
10483 void VmaBlockMetadata_Linear::CleanupAfterFree()
10485 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10486 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10490 suballocations1st.clear();
10491 suballocations2nd.clear();
10492 m_1stNullItemsBeginCount = 0;
10493 m_1stNullItemsMiddleCount = 0;
10494 m_2ndNullItemsCount = 0;
10495 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10499 const size_t suballoc1stCount = suballocations1st.size();
10500 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10501 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
10504 while(m_1stNullItemsBeginCount < suballoc1stCount &&
10505 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10507 ++m_1stNullItemsBeginCount;
10508 --m_1stNullItemsMiddleCount;
10512 while(m_1stNullItemsMiddleCount > 0 &&
10513 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
10515 --m_1stNullItemsMiddleCount;
10516 suballocations1st.pop_back();
10520 while(m_2ndNullItemsCount > 0 &&
10521 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
10523 --m_2ndNullItemsCount;
10524 suballocations2nd.pop_back();
10528 while(m_2ndNullItemsCount > 0 &&
10529 suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
10531 --m_2ndNullItemsCount;
10532 VmaVectorRemove(suballocations2nd, 0);
10535 if(ShouldCompact1st())
10537 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
10538 size_t srcIndex = m_1stNullItemsBeginCount;
10539 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
10541 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
10545 if(dstIndex != srcIndex)
10547 suballocations1st[dstIndex] = suballocations1st[srcIndex];
10551 suballocations1st.resize(nonNullItemCount);
10552 m_1stNullItemsBeginCount = 0;
10553 m_1stNullItemsMiddleCount = 0;
10557 if(suballocations2nd.empty())
10559 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10563 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
10565 suballocations1st.clear();
10566 m_1stNullItemsBeginCount = 0;
10568 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10571 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10572 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
10573 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
10574 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10576 ++m_1stNullItemsBeginCount;
10577 --m_1stNullItemsMiddleCount;
10579 m_2ndNullItemsCount = 0;
10580 m_1stVectorIndex ^= 1;
10585 VMA_HEAVY_ASSERT(Validate());
10592 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
10593 VmaBlockMetadata(hAllocator),
10595 m_AllocationCount(0),
10599 memset(m_FreeList, 0,
sizeof(m_FreeList));
10602 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
10604 DeleteNode(m_Root);
10607 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
10609 VmaBlockMetadata::Init(size);
10611 m_UsableSize = VmaPrevPow2(size);
10612 m_SumFreeSize = m_UsableSize;
10616 while(m_LevelCount < MAX_LEVELS &&
10617 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
10622 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
10623 rootNode->offset = 0;
10624 rootNode->type = Node::TYPE_FREE;
10625 rootNode->parent = VMA_NULL;
10626 rootNode->buddy = VMA_NULL;
10629 AddToFreeListFront(0, rootNode);
10632 bool VmaBlockMetadata_Buddy::Validate()
const 10635 ValidationContext ctx;
10636 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
10638 VMA_VALIDATE(
false &&
"ValidateNode failed.");
10640 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
10641 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
10644 for(uint32_t level = 0; level < m_LevelCount; ++level)
10646 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
10647 m_FreeList[level].front->free.prev == VMA_NULL);
10649 for(Node* node = m_FreeList[level].front;
10651 node = node->free.next)
10653 VMA_VALIDATE(node->type == Node::TYPE_FREE);
10655 if(node->free.next == VMA_NULL)
10657 VMA_VALIDATE(m_FreeList[level].back == node);
10661 VMA_VALIDATE(node->free.next->free.prev == node);
10667 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
10669 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
10675 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const 10677 for(uint32_t level = 0; level < m_LevelCount; ++level)
10679 if(m_FreeList[level].front != VMA_NULL)
10681 return LevelToNodeSize(level);
10687 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 10689 const VkDeviceSize unusableSize = GetUnusableSize();
10700 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
10702 if(unusableSize > 0)
10711 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const 10713 const VkDeviceSize unusableSize = GetUnusableSize();
10715 inoutStats.
size += GetSize();
10716 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
10721 if(unusableSize > 0)
10728 #if VMA_STATS_STRING_ENABLED 10730 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const 10734 CalcAllocationStatInfo(stat);
10736 PrintDetailedMap_Begin(
10742 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
10744 const VkDeviceSize unusableSize = GetUnusableSize();
10745 if(unusableSize > 0)
10747 PrintDetailedMap_UnusedRange(json,
10752 PrintDetailedMap_End(json);
10755 #endif // #if VMA_STATS_STRING_ENABLED 10757 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
10758 uint32_t currentFrameIndex,
10759 uint32_t frameInUseCount,
10760 VkDeviceSize bufferImageGranularity,
10761 VkDeviceSize allocSize,
10762 VkDeviceSize allocAlignment,
10764 VmaSuballocationType allocType,
10765 bool canMakeOtherLost,
10767 VmaAllocationRequest* pAllocationRequest)
10769 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
10773 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
10774 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
10775 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
10777 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
10778 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
10781 if(allocSize > m_UsableSize)
10786 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
10787 for(uint32_t level = targetLevel + 1; level--; )
10789 for(Node* freeNode = m_FreeList[level].front;
10790 freeNode != VMA_NULL;
10791 freeNode = freeNode->free.next)
10793 if(freeNode->offset % allocAlignment == 0)
10795 pAllocationRequest->type = VmaAllocationRequestType::Normal;
10796 pAllocationRequest->offset = freeNode->offset;
10797 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
10798 pAllocationRequest->sumItemSize = 0;
10799 pAllocationRequest->itemsToMakeLostCount = 0;
10800 pAllocationRequest->customData = (
void*)(uintptr_t)level;
10809 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
10810 uint32_t currentFrameIndex,
10811 uint32_t frameInUseCount,
10812 VmaAllocationRequest* pAllocationRequest)
10818 return pAllocationRequest->itemsToMakeLostCount == 0;
10821 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10830 void VmaBlockMetadata_Buddy::Alloc(
10831 const VmaAllocationRequest& request,
10832 VmaSuballocationType type,
10833 VkDeviceSize allocSize,
10836 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
10838 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
10839 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
10841 Node* currNode = m_FreeList[currLevel].front;
10842 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
10843 while(currNode->offset != request.offset)
10845 currNode = currNode->free.next;
10846 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
10850 while(currLevel < targetLevel)
10854 RemoveFromFreeList(currLevel, currNode);
10856 const uint32_t childrenLevel = currLevel + 1;
10859 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
10860 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
10862 leftChild->offset = currNode->offset;
10863 leftChild->type = Node::TYPE_FREE;
10864 leftChild->parent = currNode;
10865 leftChild->buddy = rightChild;
10867 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
10868 rightChild->type = Node::TYPE_FREE;
10869 rightChild->parent = currNode;
10870 rightChild->buddy = leftChild;
10873 currNode->type = Node::TYPE_SPLIT;
10874 currNode->split.leftChild = leftChild;
10877 AddToFreeListFront(childrenLevel, rightChild);
10878 AddToFreeListFront(childrenLevel, leftChild);
10883 currNode = m_FreeList[currLevel].front;
10892 VMA_ASSERT(currLevel == targetLevel &&
10893 currNode != VMA_NULL &&
10894 currNode->type == Node::TYPE_FREE);
10895 RemoveFromFreeList(currLevel, currNode);
10898 currNode->type = Node::TYPE_ALLOCATION;
10899 currNode->allocation.alloc = hAllocation;
10901 ++m_AllocationCount;
10903 m_SumFreeSize -= allocSize;
10906 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
10908 if(node->type == Node::TYPE_SPLIT)
10910 DeleteNode(node->split.leftChild->buddy);
10911 DeleteNode(node->split.leftChild);
10914 vma_delete(GetAllocationCallbacks(), node);
10917 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const 10919 VMA_VALIDATE(level < m_LevelCount);
10920 VMA_VALIDATE(curr->parent == parent);
10921 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
10922 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
10925 case Node::TYPE_FREE:
10927 ctx.calculatedSumFreeSize += levelNodeSize;
10928 ++ctx.calculatedFreeCount;
10930 case Node::TYPE_ALLOCATION:
10931 ++ctx.calculatedAllocationCount;
10932 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
10933 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
10935 case Node::TYPE_SPLIT:
10937 const uint32_t childrenLevel = level + 1;
10938 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
10939 const Node*
const leftChild = curr->split.leftChild;
10940 VMA_VALIDATE(leftChild != VMA_NULL);
10941 VMA_VALIDATE(leftChild->offset == curr->offset);
10942 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
10944 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
10946 const Node*
const rightChild = leftChild->buddy;
10947 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
10948 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
10950 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
10961 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const 10964 uint32_t level = 0;
10965 VkDeviceSize currLevelNodeSize = m_UsableSize;
10966 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
10967 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
10970 currLevelNodeSize = nextLevelNodeSize;
10971 nextLevelNodeSize = currLevelNodeSize >> 1;
10976 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
10979 Node* node = m_Root;
10980 VkDeviceSize nodeOffset = 0;
10981 uint32_t level = 0;
10982 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
10983 while(node->type == Node::TYPE_SPLIT)
10985 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
10986 if(offset < nodeOffset + nextLevelSize)
10988 node = node->split.leftChild;
10992 node = node->split.leftChild->buddy;
10993 nodeOffset += nextLevelSize;
10996 levelNodeSize = nextLevelSize;
10999 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
11000 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
11003 --m_AllocationCount;
11004 m_SumFreeSize += alloc->GetSize();
11006 node->type = Node::TYPE_FREE;
11009 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
11011 RemoveFromFreeList(level, node->buddy);
11012 Node*
const parent = node->parent;
11014 vma_delete(GetAllocationCallbacks(), node->buddy);
11015 vma_delete(GetAllocationCallbacks(), node);
11016 parent->type = Node::TYPE_FREE;
11024 AddToFreeListFront(level, node);
11027 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const 11031 case Node::TYPE_FREE:
11037 case Node::TYPE_ALLOCATION:
11039 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11045 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
11046 if(unusedRangeSize > 0)
11055 case Node::TYPE_SPLIT:
11057 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11058 const Node*
const leftChild = node->split.leftChild;
11059 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
11060 const Node*
const rightChild = leftChild->buddy;
11061 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
11069 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
11071 VMA_ASSERT(node->type == Node::TYPE_FREE);
11074 Node*
const frontNode = m_FreeList[level].front;
11075 if(frontNode == VMA_NULL)
11077 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
11078 node->free.prev = node->free.next = VMA_NULL;
11079 m_FreeList[level].front = m_FreeList[level].back = node;
11083 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
11084 node->free.prev = VMA_NULL;
11085 node->free.next = frontNode;
11086 frontNode->free.prev = node;
11087 m_FreeList[level].front = node;
11091 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
11093 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
11096 if(node->free.prev == VMA_NULL)
11098 VMA_ASSERT(m_FreeList[level].front == node);
11099 m_FreeList[level].front = node->free.next;
11103 Node*
const prevFreeNode = node->free.prev;
11104 VMA_ASSERT(prevFreeNode->free.next == node);
11105 prevFreeNode->free.next = node->free.next;
11109 if(node->free.next == VMA_NULL)
11111 VMA_ASSERT(m_FreeList[level].back == node);
11112 m_FreeList[level].back = node->free.prev;
11116 Node*
const nextFreeNode = node->free.next;
11117 VMA_ASSERT(nextFreeNode->free.prev == node);
11118 nextFreeNode->free.prev = node->free.prev;
11122 #if VMA_STATS_STRING_ENABLED 11123 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const 11127 case Node::TYPE_FREE:
11128 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
11130 case Node::TYPE_ALLOCATION:
11132 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
11133 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11134 if(allocSize < levelNodeSize)
11136 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
11140 case Node::TYPE_SPLIT:
11142 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11143 const Node*
const leftChild = node->split.leftChild;
11144 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
11145 const Node*
const rightChild = leftChild->buddy;
11146 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
11153 #endif // #if VMA_STATS_STRING_ENABLED 11159 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
11160 m_pMetadata(VMA_NULL),
11161 m_MemoryTypeIndex(UINT32_MAX),
11163 m_hMemory(VK_NULL_HANDLE),
11165 m_pMappedData(VMA_NULL)
11169 void VmaDeviceMemoryBlock::Init(
11172 uint32_t newMemoryTypeIndex,
11173 VkDeviceMemory newMemory,
11174 VkDeviceSize newSize,
11176 uint32_t algorithm)
11178 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
11180 m_hParentPool = hParentPool;
11181 m_MemoryTypeIndex = newMemoryTypeIndex;
11183 m_hMemory = newMemory;
11188 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
11191 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
11197 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
11199 m_pMetadata->Init(newSize);
11202 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
11206 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
11208 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
11209 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
11210 m_hMemory = VK_NULL_HANDLE;
11212 vma_delete(allocator, m_pMetadata);
11213 m_pMetadata = VMA_NULL;
11216 bool VmaDeviceMemoryBlock::Validate()
const 11218 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
11219 (m_pMetadata->GetSize() != 0));
11221 return m_pMetadata->Validate();
11224 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
11226 void* pData =
nullptr;
11227 VkResult res = Map(hAllocator, 1, &pData);
11228 if(res != VK_SUCCESS)
11233 res = m_pMetadata->CheckCorruption(pData);
11235 Unmap(hAllocator, 1);
11240 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
11247 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11248 if(m_MapCount != 0)
11250 m_MapCount += count;
11251 VMA_ASSERT(m_pMappedData != VMA_NULL);
11252 if(ppData != VMA_NULL)
11254 *ppData = m_pMappedData;
11260 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
11261 hAllocator->m_hDevice,
11267 if(result == VK_SUCCESS)
11269 if(ppData != VMA_NULL)
11271 *ppData = m_pMappedData;
11273 m_MapCount = count;
11279 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
11286 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11287 if(m_MapCount >= count)
11289 m_MapCount -= count;
11290 if(m_MapCount == 0)
11292 m_pMappedData = VMA_NULL;
11293 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
11298 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
11302 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11304 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11305 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11308 VkResult res = Map(hAllocator, 1, &pData);
11309 if(res != VK_SUCCESS)
11314 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
11315 VmaWriteMagicValue(pData, allocOffset + allocSize);
11317 Unmap(hAllocator, 1);
11322 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11324 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11325 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11328 VkResult res = Map(hAllocator, 1, &pData);
11329 if(res != VK_SUCCESS)
11334 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
11336 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
11338 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
11340 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
11343 Unmap(hAllocator, 1);
11348 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
11353 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11354 hAllocation->GetBlock() ==
this);
11356 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11357 return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
11358 hAllocator->m_hDevice,
11361 hAllocation->GetOffset());
11364 VkResult VmaDeviceMemoryBlock::BindImageMemory(
11369 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11370 hAllocation->GetBlock() ==
this);
11372 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11373 return hAllocator->GetVulkanFunctions().vkBindImageMemory(
11374 hAllocator->m_hDevice,
11377 hAllocation->GetOffset());
11382 memset(&outInfo, 0,
sizeof(outInfo));
11401 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
11409 VmaPool_T::VmaPool_T(
11412 VkDeviceSize preferredBlockSize) :
11416 createInfo.memoryTypeIndex,
11417 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
11418 createInfo.minBlockCount,
11419 createInfo.maxBlockCount,
11421 createInfo.frameInUseCount,
11423 createInfo.blockSize != 0,
11429 VmaPool_T::~VmaPool_T()
11433 #if VMA_STATS_STRING_ENABLED 11435 #endif // #if VMA_STATS_STRING_ENABLED 11437 VmaBlockVector::VmaBlockVector(
11440 uint32_t memoryTypeIndex,
11441 VkDeviceSize preferredBlockSize,
11442 size_t minBlockCount,
11443 size_t maxBlockCount,
11444 VkDeviceSize bufferImageGranularity,
11445 uint32_t frameInUseCount,
11447 bool explicitBlockSize,
11448 uint32_t algorithm) :
11449 m_hAllocator(hAllocator),
11450 m_hParentPool(hParentPool),
11451 m_MemoryTypeIndex(memoryTypeIndex),
11452 m_PreferredBlockSize(preferredBlockSize),
11453 m_MinBlockCount(minBlockCount),
11454 m_MaxBlockCount(maxBlockCount),
11455 m_BufferImageGranularity(bufferImageGranularity),
11456 m_FrameInUseCount(frameInUseCount),
11457 m_IsCustomPool(isCustomPool),
11458 m_ExplicitBlockSize(explicitBlockSize),
11459 m_Algorithm(algorithm),
11460 m_HasEmptyBlock(false),
11461 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
11466 VmaBlockVector::~VmaBlockVector()
11468 for(
size_t i = m_Blocks.size(); i--; )
11470 m_Blocks[i]->Destroy(m_hAllocator);
11471 vma_delete(m_hAllocator, m_Blocks[i]);
11475 VkResult VmaBlockVector::CreateMinBlocks()
11477 for(
size_t i = 0; i < m_MinBlockCount; ++i)
11479 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
11480 if(res != VK_SUCCESS)
11488 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
11490 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
11492 const size_t blockCount = m_Blocks.size();
11501 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11503 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11504 VMA_ASSERT(pBlock);
11505 VMA_HEAVY_ASSERT(pBlock->Validate());
11506 pBlock->m_pMetadata->AddPoolStats(*pStats);
11510 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const 11512 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
11513 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
11514 (VMA_DEBUG_MARGIN > 0) &&
11516 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
11519 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
11521 VkResult VmaBlockVector::Allocate(
11522 uint32_t currentFrameIndex,
11524 VkDeviceSize alignment,
11526 VmaSuballocationType suballocType,
11527 size_t allocationCount,
11531 VkResult res = VK_SUCCESS;
11533 if(IsCorruptionDetectionEnabled())
11535 size = VmaAlignUp<VkDeviceSize>(size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
11536 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
11540 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11541 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
11543 res = AllocatePage(
11549 pAllocations + allocIndex);
11550 if(res != VK_SUCCESS)
11557 if(res != VK_SUCCESS)
11560 while(allocIndex--)
11562 Free(pAllocations[allocIndex]);
11564 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
11570 VkResult VmaBlockVector::AllocatePage(
11571 uint32_t currentFrameIndex,
11573 VkDeviceSize alignment,
11575 VmaSuballocationType suballocType,
11582 const bool canCreateNewBlock =
11584 (m_Blocks.size() < m_MaxBlockCount);
11591 canMakeOtherLost =
false;
11595 if(isUpperAddress &&
11598 return VK_ERROR_FEATURE_NOT_PRESENT;
11612 return VK_ERROR_FEATURE_NOT_PRESENT;
11616 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
11618 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11626 if(!canMakeOtherLost || canCreateNewBlock)
11635 if(!m_Blocks.empty())
11637 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
11638 VMA_ASSERT(pCurrBlock);
11639 VkResult res = AllocateFromBlock(
11649 if(res == VK_SUCCESS)
11651 VMA_DEBUG_LOG(
" Returned from last block #%u", (uint32_t)(m_Blocks.size() - 1));
11661 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11663 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11664 VMA_ASSERT(pCurrBlock);
11665 VkResult res = AllocateFromBlock(
11675 if(res == VK_SUCCESS)
11677 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
11685 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11687 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11688 VMA_ASSERT(pCurrBlock);
11689 VkResult res = AllocateFromBlock(
11699 if(res == VK_SUCCESS)
11701 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
11709 if(canCreateNewBlock)
11712 VkDeviceSize newBlockSize = m_PreferredBlockSize;
11713 uint32_t newBlockSizeShift = 0;
11714 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
11716 if(!m_ExplicitBlockSize)
11719 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
11720 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
11722 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
11723 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
11725 newBlockSize = smallerNewBlockSize;
11726 ++newBlockSizeShift;
11735 size_t newBlockIndex = 0;
11736 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
11738 if(!m_ExplicitBlockSize)
11740 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
11742 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
11743 if(smallerNewBlockSize >= size)
11745 newBlockSize = smallerNewBlockSize;
11746 ++newBlockSizeShift;
11747 res = CreateBlock(newBlockSize, &newBlockIndex);
11756 if(res == VK_SUCCESS)
11758 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
11759 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
11761 res = AllocateFromBlock(
11771 if(res == VK_SUCCESS)
11773 VMA_DEBUG_LOG(
" Created new block Size=%llu", newBlockSize);
11779 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11786 if(canMakeOtherLost)
11788 uint32_t tryIndex = 0;
11789 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
11791 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
11792 VmaAllocationRequest bestRequest = {};
11793 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
11799 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11801 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11802 VMA_ASSERT(pCurrBlock);
11803 VmaAllocationRequest currRequest = {};
11804 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
11807 m_BufferImageGranularity,
11816 const VkDeviceSize currRequestCost = currRequest.CalcCost();
11817 if(pBestRequestBlock == VMA_NULL ||
11818 currRequestCost < bestRequestCost)
11820 pBestRequestBlock = pCurrBlock;
11821 bestRequest = currRequest;
11822 bestRequestCost = currRequestCost;
11824 if(bestRequestCost == 0)
11835 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11837 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11838 VMA_ASSERT(pCurrBlock);
11839 VmaAllocationRequest currRequest = {};
11840 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
11843 m_BufferImageGranularity,
11852 const VkDeviceSize currRequestCost = currRequest.CalcCost();
11853 if(pBestRequestBlock == VMA_NULL ||
11854 currRequestCost < bestRequestCost ||
11857 pBestRequestBlock = pCurrBlock;
11858 bestRequest = currRequest;
11859 bestRequestCost = currRequestCost;
11861 if(bestRequestCost == 0 ||
11871 if(pBestRequestBlock != VMA_NULL)
11875 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
11876 if(res != VK_SUCCESS)
11882 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
11888 if(pBestRequestBlock->m_pMetadata->IsEmpty())
11890 m_HasEmptyBlock =
false;
11893 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate();
11894 (*pAllocation)->Ctor(currentFrameIndex, isUserDataString);
11895 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
11896 (*pAllocation)->InitBlockAllocation(
11898 bestRequest.offset,
11904 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
11905 VMA_DEBUG_LOG(
" Returned from existing block");
11906 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
11907 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
11909 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
11911 if(IsCorruptionDetectionEnabled())
11913 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
11914 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
11929 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
11931 return VK_ERROR_TOO_MANY_OBJECTS;
11935 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11938 void VmaBlockVector::Free(
11941 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
11945 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11947 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
11949 if(IsCorruptionDetectionEnabled())
11951 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
11952 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
11955 if(hAllocation->IsPersistentMap())
11957 pBlock->Unmap(m_hAllocator, 1);
11960 pBlock->m_pMetadata->Free(hAllocation);
11961 VMA_HEAVY_ASSERT(pBlock->Validate());
11963 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
11966 if(pBlock->m_pMetadata->IsEmpty())
11969 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
11971 pBlockToDelete = pBlock;
11977 m_HasEmptyBlock =
true;
11982 else if(m_HasEmptyBlock)
11984 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
11985 if(pLastBlock->m_pMetadata->IsEmpty() && m_Blocks.size() > m_MinBlockCount)
11987 pBlockToDelete = pLastBlock;
11988 m_Blocks.pop_back();
11989 m_HasEmptyBlock =
false;
11993 IncrementallySortBlocks();
11998 if(pBlockToDelete != VMA_NULL)
12000 VMA_DEBUG_LOG(
" Deleted empty allocation");
12001 pBlockToDelete->Destroy(m_hAllocator);
12002 vma_delete(m_hAllocator, pBlockToDelete);
12006 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const 12008 VkDeviceSize result = 0;
12009 for(
size_t i = m_Blocks.size(); i--; )
12011 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
12012 if(result >= m_PreferredBlockSize)
12020 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
12022 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12024 if(m_Blocks[blockIndex] == pBlock)
12026 VmaVectorRemove(m_Blocks, blockIndex);
12033 void VmaBlockVector::IncrementallySortBlocks()
12038 for(
size_t i = 1; i < m_Blocks.size(); ++i)
12040 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
12042 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
12049 VkResult VmaBlockVector::AllocateFromBlock(
12050 VmaDeviceMemoryBlock* pBlock,
12051 uint32_t currentFrameIndex,
12053 VkDeviceSize alignment,
12056 VmaSuballocationType suballocType,
12065 VmaAllocationRequest currRequest = {};
12066 if(pBlock->m_pMetadata->CreateAllocationRequest(
12069 m_BufferImageGranularity,
12079 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
12083 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
12084 if(res != VK_SUCCESS)
12091 if(pBlock->m_pMetadata->IsEmpty())
12093 m_HasEmptyBlock =
false;
12096 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate();
12097 (*pAllocation)->Ctor(currentFrameIndex, isUserDataString);
12098 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
12099 (*pAllocation)->InitBlockAllocation(
12101 currRequest.offset,
12107 VMA_HEAVY_ASSERT(pBlock->Validate());
12108 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
12109 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12111 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12113 if(IsCorruptionDetectionEnabled())
12115 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
12116 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
12120 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12123 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
12125 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
12126 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
12127 allocInfo.allocationSize = blockSize;
12128 VkDeviceMemory mem = VK_NULL_HANDLE;
12129 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
12138 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
12144 allocInfo.allocationSize,
12148 m_Blocks.push_back(pBlock);
12149 if(pNewBlockIndex != VMA_NULL)
12151 *pNewBlockIndex = m_Blocks.size() - 1;
12157 void VmaBlockVector::ApplyDefragmentationMovesCpu(
12158 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12159 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
12161 const size_t blockCount = m_Blocks.size();
12162 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
12166 BLOCK_FLAG_USED = 0x00000001,
12167 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
12175 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
12176 blockInfo(blockCount, VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
12177 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
12180 const size_t moveCount = moves.size();
12181 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12183 const VmaDefragmentationMove& move = moves[moveIndex];
12184 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
12185 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
12188 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12191 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12193 BlockInfo& currBlockInfo = blockInfo[blockIndex];
12194 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12195 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
12197 currBlockInfo.pMappedData = pBlock->GetMappedData();
12199 if(currBlockInfo.pMappedData == VMA_NULL)
12201 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
12202 if(pDefragCtx->res == VK_SUCCESS)
12204 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
12211 if(pDefragCtx->res == VK_SUCCESS)
12213 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
12214 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
12216 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12218 const VmaDefragmentationMove& move = moves[moveIndex];
12220 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
12221 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
12223 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
12228 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
12229 memRange.memory = pSrcBlock->GetDeviceMemory();
12230 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
12231 memRange.size = VMA_MIN(
12232 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
12233 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
12234 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12239 reinterpret_cast<char*>(dstBlockInfo.pMappedData) + move.dstOffset,
12240 reinterpret_cast<char*>(srcBlockInfo.pMappedData) + move.srcOffset,
12241 static_cast<size_t>(move.size));
12243 if(IsCorruptionDetectionEnabled())
12245 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
12246 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
12252 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
12253 memRange.memory = pDstBlock->GetDeviceMemory();
12254 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
12255 memRange.size = VMA_MIN(
12256 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
12257 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
12258 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12265 for(
size_t blockIndex = blockCount; blockIndex--; )
12267 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
12268 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
12270 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12271 pBlock->Unmap(m_hAllocator, 1);
12276 void VmaBlockVector::ApplyDefragmentationMovesGpu(
12277 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12278 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12279 VkCommandBuffer commandBuffer)
12281 const size_t blockCount = m_Blocks.size();
12283 pDefragCtx->blockContexts.resize(blockCount);
12284 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
12287 const size_t moveCount = moves.size();
12288 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12290 const VmaDefragmentationMove& move = moves[moveIndex];
12291 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12292 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12295 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12299 VkBufferCreateInfo bufCreateInfo;
12300 VmaFillGpuDefragmentationBufferCreateInfo(bufCreateInfo);
12302 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12304 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
12305 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12306 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
12308 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
12309 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
12310 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
12311 if(pDefragCtx->res == VK_SUCCESS)
12313 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
12314 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
12321 if(pDefragCtx->res == VK_SUCCESS)
12323 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12325 const VmaDefragmentationMove& move = moves[moveIndex];
12327 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
12328 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
12330 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
12332 VkBufferCopy region = {
12336 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
12337 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
12342 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
12344 pDefragCtx->res = VK_NOT_READY;
12350 m_HasEmptyBlock =
false;
12351 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12353 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12354 if(pBlock->m_pMetadata->IsEmpty())
12356 if(m_Blocks.size() > m_MinBlockCount)
12358 if(pDefragmentationStats != VMA_NULL)
12361 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
12364 VmaVectorRemove(m_Blocks, blockIndex);
12365 pBlock->Destroy(m_hAllocator);
12366 vma_delete(m_hAllocator, pBlock);
12370 m_HasEmptyBlock =
true;
12376 #if VMA_STATS_STRING_ENABLED 12378 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
12380 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12382 json.BeginObject();
12386 json.WriteString(
"MemoryTypeIndex");
12387 json.WriteNumber(m_MemoryTypeIndex);
12389 json.WriteString(
"BlockSize");
12390 json.WriteNumber(m_PreferredBlockSize);
12392 json.WriteString(
"BlockCount");
12393 json.BeginObject(
true);
12394 if(m_MinBlockCount > 0)
12396 json.WriteString(
"Min");
12397 json.WriteNumber((uint64_t)m_MinBlockCount);
12399 if(m_MaxBlockCount < SIZE_MAX)
12401 json.WriteString(
"Max");
12402 json.WriteNumber((uint64_t)m_MaxBlockCount);
12404 json.WriteString(
"Cur");
12405 json.WriteNumber((uint64_t)m_Blocks.size());
12408 if(m_FrameInUseCount > 0)
12410 json.WriteString(
"FrameInUseCount");
12411 json.WriteNumber(m_FrameInUseCount);
12414 if(m_Algorithm != 0)
12416 json.WriteString(
"Algorithm");
12417 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
12422 json.WriteString(
"PreferredBlockSize");
12423 json.WriteNumber(m_PreferredBlockSize);
12426 json.WriteString(
"Blocks");
12427 json.BeginObject();
12428 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12430 json.BeginString();
12431 json.ContinueString(m_Blocks[i]->GetId());
12434 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
12441 #endif // #if VMA_STATS_STRING_ENABLED 12443 void VmaBlockVector::Defragment(
12444 class VmaBlockVectorDefragmentationContext* pCtx,
12446 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
12447 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
12448 VkCommandBuffer commandBuffer)
12450 pCtx->res = VK_SUCCESS;
12452 const VkMemoryPropertyFlags memPropFlags =
12453 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
12454 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
12456 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
12458 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
12459 !IsCorruptionDetectionEnabled() &&
12460 ((1u << m_MemoryTypeIndex) & m_hAllocator->GetGpuDefragmentationMemoryTypeBits()) != 0;
12463 if(canDefragmentOnCpu || canDefragmentOnGpu)
12465 bool defragmentOnGpu;
12467 if(canDefragmentOnGpu != canDefragmentOnCpu)
12469 defragmentOnGpu = canDefragmentOnGpu;
12474 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
12475 m_hAllocator->IsIntegratedGpu();
12478 bool overlappingMoveSupported = !defragmentOnGpu;
12480 if(m_hAllocator->m_UseMutex)
12482 m_Mutex.LockWrite();
12483 pCtx->mutexLocked =
true;
12486 pCtx->Begin(overlappingMoveSupported);
12490 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
12491 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
12492 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > moves =
12493 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >(VmaStlAllocator<VmaDefragmentationMove>(m_hAllocator->GetAllocationCallbacks()));
12494 pCtx->res = pCtx->GetAlgorithm()->Defragment(moves, maxBytesToMove, maxAllocationsToMove);
12497 if(pStats != VMA_NULL)
12499 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
12500 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
12503 VMA_ASSERT(bytesMoved <= maxBytesToMove);
12504 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
12505 if(defragmentOnGpu)
12507 maxGpuBytesToMove -= bytesMoved;
12508 maxGpuAllocationsToMove -= allocationsMoved;
12512 maxCpuBytesToMove -= bytesMoved;
12513 maxCpuAllocationsToMove -= allocationsMoved;
12517 if(pCtx->res >= VK_SUCCESS)
12519 if(defragmentOnGpu)
12521 ApplyDefragmentationMovesGpu(pCtx, moves, commandBuffer);
12525 ApplyDefragmentationMovesCpu(pCtx, moves);
12531 void VmaBlockVector::DefragmentationEnd(
12532 class VmaBlockVectorDefragmentationContext* pCtx,
12536 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--; )
12538 VmaBlockDefragmentationContext& blockCtx = pCtx->blockContexts[blockIndex];
12539 if(blockCtx.hBuffer)
12541 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(
12542 m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
12546 if(pCtx->res >= VK_SUCCESS)
12548 FreeEmptyBlocks(pStats);
12551 if(pCtx->mutexLocked)
12553 VMA_ASSERT(m_hAllocator->m_UseMutex);
12554 m_Mutex.UnlockWrite();
12558 size_t VmaBlockVector::CalcAllocationCount()
const 12561 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12563 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
12568 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const 12570 if(m_BufferImageGranularity == 1)
12574 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
12575 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
12577 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
12578 VMA_ASSERT(m_Algorithm == 0);
12579 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
12580 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
12588 void VmaBlockVector::MakePoolAllocationsLost(
12589 uint32_t currentFrameIndex,
12590 size_t* pLostAllocationCount)
12592 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12593 size_t lostAllocationCount = 0;
12594 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12596 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12597 VMA_ASSERT(pBlock);
12598 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
12600 if(pLostAllocationCount != VMA_NULL)
12602 *pLostAllocationCount = lostAllocationCount;
12606 VkResult VmaBlockVector::CheckCorruption()
12608 if(!IsCorruptionDetectionEnabled())
12610 return VK_ERROR_FEATURE_NOT_PRESENT;
12613 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12614 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12616 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12617 VMA_ASSERT(pBlock);
12618 VkResult res = pBlock->CheckCorruption(m_hAllocator);
12619 if(res != VK_SUCCESS)
12627 void VmaBlockVector::AddStats(
VmaStats* pStats)
12629 const uint32_t memTypeIndex = m_MemoryTypeIndex;
12630 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
12632 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12634 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12636 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12637 VMA_ASSERT(pBlock);
12638 VMA_HEAVY_ASSERT(pBlock->Validate());
12640 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
12641 VmaAddStatInfo(pStats->
total, allocationStatInfo);
12642 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
12643 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
12650 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
12652 VmaBlockVector* pBlockVector,
12653 uint32_t currentFrameIndex,
12654 bool overlappingMoveSupported) :
12655 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
12656 m_AllocationCount(0),
12657 m_AllAllocations(false),
12659 m_AllocationsMoved(0),
12660 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
12663 const size_t blockCount = m_pBlockVector->m_Blocks.size();
12664 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12666 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
12667 pBlockInfo->m_OriginalBlockIndex = blockIndex;
12668 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
12669 m_Blocks.push_back(pBlockInfo);
12673 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
12676 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
12678 for(
size_t i = m_Blocks.size(); i--; )
12680 vma_delete(m_hAllocator, m_Blocks[i]);
12684 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
12687 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
12689 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
12690 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
12691 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
12693 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
12694 (*it)->m_Allocations.push_back(allocInfo);
12701 ++m_AllocationCount;
12705 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
12706 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12707 VkDeviceSize maxBytesToMove,
12708 uint32_t maxAllocationsToMove)
12710 if(m_Blocks.empty())
12723 size_t srcBlockMinIndex = 0;
12736 size_t srcBlockIndex = m_Blocks.size() - 1;
12737 size_t srcAllocIndex = SIZE_MAX;
12743 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
12745 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
12748 if(srcBlockIndex == srcBlockMinIndex)
12755 srcAllocIndex = SIZE_MAX;
12760 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
12764 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
12765 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
12767 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
12768 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
12769 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
12770 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
12773 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
12775 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
12776 VmaAllocationRequest dstAllocRequest;
12777 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
12778 m_CurrentFrameIndex,
12779 m_pBlockVector->GetFrameInUseCount(),
12780 m_pBlockVector->GetBufferImageGranularity(),
12787 &dstAllocRequest) &&
12789 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
12791 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
12794 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
12795 (m_BytesMoved + size > maxBytesToMove))
12800 VmaDefragmentationMove move;
12801 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
12802 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
12803 move.srcOffset = srcOffset;
12804 move.dstOffset = dstAllocRequest.offset;
12806 moves.push_back(move);
12808 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
12812 allocInfo.m_hAllocation);
12813 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
12815 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
12817 if(allocInfo.m_pChanged != VMA_NULL)
12819 *allocInfo.m_pChanged = VK_TRUE;
12822 ++m_AllocationsMoved;
12823 m_BytesMoved += size;
12825 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
12833 if(srcAllocIndex > 0)
12839 if(srcBlockIndex > 0)
12842 srcAllocIndex = SIZE_MAX;
12852 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const 12855 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12857 if(m_Blocks[i]->m_HasNonMovableAllocations)
12865 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
12866 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12867 VkDeviceSize maxBytesToMove,
12868 uint32_t maxAllocationsToMove)
12870 if(!m_AllAllocations && m_AllocationCount == 0)
12875 const size_t blockCount = m_Blocks.size();
12876 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12878 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
12880 if(m_AllAllocations)
12882 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
12883 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
12884 it != pMetadata->m_Suballocations.end();
12887 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
12889 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
12890 pBlockInfo->m_Allocations.push_back(allocInfo);
12895 pBlockInfo->CalcHasNonMovableAllocations();
12899 pBlockInfo->SortAllocationsByOffsetDescending();
12905 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
12908 const uint32_t roundCount = 2;
12911 VkResult result = VK_SUCCESS;
12912 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
12914 result = DefragmentRound(moves, maxBytesToMove, maxAllocationsToMove);
12920 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
12921 size_t dstBlockIndex, VkDeviceSize dstOffset,
12922 size_t srcBlockIndex, VkDeviceSize srcOffset)
12924 if(dstBlockIndex < srcBlockIndex)
12928 if(dstBlockIndex > srcBlockIndex)
12932 if(dstOffset < srcOffset)
12942 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
12944 VmaBlockVector* pBlockVector,
12945 uint32_t currentFrameIndex,
12946 bool overlappingMoveSupported) :
12947 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
12948 m_OverlappingMoveSupported(overlappingMoveSupported),
12949 m_AllocationCount(0),
12950 m_AllAllocations(false),
12952 m_AllocationsMoved(0),
12953 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
12955 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
12959 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
12963 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
12964 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12965 VkDeviceSize maxBytesToMove,
12966 uint32_t maxAllocationsToMove)
12968 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
12970 const size_t blockCount = m_pBlockVector->GetBlockCount();
12971 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
12976 PreprocessMetadata();
12980 m_BlockInfos.resize(blockCount);
12981 for(
size_t i = 0; i < blockCount; ++i)
12983 m_BlockInfos[i].origBlockIndex = i;
12986 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
12987 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
12988 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
12993 FreeSpaceDatabase freeSpaceDb;
12995 size_t dstBlockInfoIndex = 0;
12996 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
12997 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
12998 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
12999 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
13000 VkDeviceSize dstOffset = 0;
13003 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
13005 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
13006 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
13007 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
13008 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
13009 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
13011 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
13012 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
13013 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
13014 if(m_AllocationsMoved == maxAllocationsToMove ||
13015 m_BytesMoved + srcAllocSize > maxBytesToMove)
13020 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
13023 size_t freeSpaceInfoIndex;
13024 VkDeviceSize dstAllocOffset;
13025 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
13026 freeSpaceInfoIndex, dstAllocOffset))
13028 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
13029 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
13030 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
13033 if(freeSpaceInfoIndex == srcBlockInfoIndex)
13035 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13039 VmaSuballocation suballoc = *srcSuballocIt;
13040 suballoc.offset = dstAllocOffset;
13041 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
13042 m_BytesMoved += srcAllocSize;
13043 ++m_AllocationsMoved;
13045 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13047 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13048 srcSuballocIt = nextSuballocIt;
13050 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13052 VmaDefragmentationMove move = {
13053 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
13054 srcAllocOffset, dstAllocOffset,
13056 moves.push_back(move);
13063 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
13065 VmaSuballocation suballoc = *srcSuballocIt;
13066 suballoc.offset = dstAllocOffset;
13067 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
13068 m_BytesMoved += srcAllocSize;
13069 ++m_AllocationsMoved;
13071 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13073 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13074 srcSuballocIt = nextSuballocIt;
13076 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13078 VmaDefragmentationMove move = {
13079 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
13080 srcAllocOffset, dstAllocOffset,
13082 moves.push_back(move);
13087 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
13090 while(dstBlockInfoIndex < srcBlockInfoIndex &&
13091 dstAllocOffset + srcAllocSize > dstBlockSize)
13094 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
13096 ++dstBlockInfoIndex;
13097 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
13098 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
13099 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
13100 dstBlockSize = pDstMetadata->GetSize();
13102 dstAllocOffset = 0;
13106 if(dstBlockInfoIndex == srcBlockInfoIndex)
13108 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13110 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
13112 bool skipOver = overlap;
13113 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
13117 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
13122 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
13124 dstOffset = srcAllocOffset + srcAllocSize;
13130 srcSuballocIt->offset = dstAllocOffset;
13131 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
13132 dstOffset = dstAllocOffset + srcAllocSize;
13133 m_BytesMoved += srcAllocSize;
13134 ++m_AllocationsMoved;
13136 VmaDefragmentationMove move = {
13137 srcOrigBlockIndex, dstOrigBlockIndex,
13138 srcAllocOffset, dstAllocOffset,
13140 moves.push_back(move);
13148 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
13149 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
13151 VmaSuballocation suballoc = *srcSuballocIt;
13152 suballoc.offset = dstAllocOffset;
13153 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
13154 dstOffset = dstAllocOffset + srcAllocSize;
13155 m_BytesMoved += srcAllocSize;
13156 ++m_AllocationsMoved;
13158 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13160 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13161 srcSuballocIt = nextSuballocIt;
13163 pDstMetadata->m_Suballocations.push_back(suballoc);
13165 VmaDefragmentationMove move = {
13166 srcOrigBlockIndex, dstOrigBlockIndex,
13167 srcAllocOffset, dstAllocOffset,
13169 moves.push_back(move);
13175 m_BlockInfos.clear();
13177 PostprocessMetadata();
13182 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
13184 const size_t blockCount = m_pBlockVector->GetBlockCount();
13185 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13187 VmaBlockMetadata_Generic*
const pMetadata =
13188 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13189 pMetadata->m_FreeCount = 0;
13190 pMetadata->m_SumFreeSize = pMetadata->GetSize();
13191 pMetadata->m_FreeSuballocationsBySize.clear();
13192 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13193 it != pMetadata->m_Suballocations.end(); )
13195 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
13197 VmaSuballocationList::iterator nextIt = it;
13199 pMetadata->m_Suballocations.erase(it);
13210 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
13212 const size_t blockCount = m_pBlockVector->GetBlockCount();
13213 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13215 VmaBlockMetadata_Generic*
const pMetadata =
13216 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13217 const VkDeviceSize blockSize = pMetadata->GetSize();
13220 if(pMetadata->m_Suballocations.empty())
13222 pMetadata->m_FreeCount = 1;
13224 VmaSuballocation suballoc = {
13228 VMA_SUBALLOCATION_TYPE_FREE };
13229 pMetadata->m_Suballocations.push_back(suballoc);
13230 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
13235 VkDeviceSize offset = 0;
13236 VmaSuballocationList::iterator it;
13237 for(it = pMetadata->m_Suballocations.begin();
13238 it != pMetadata->m_Suballocations.end();
13241 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
13242 VMA_ASSERT(it->offset >= offset);
13245 if(it->offset > offset)
13247 ++pMetadata->m_FreeCount;
13248 const VkDeviceSize freeSize = it->offset - offset;
13249 VmaSuballocation suballoc = {
13253 VMA_SUBALLOCATION_TYPE_FREE };
13254 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13255 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13257 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
13261 pMetadata->m_SumFreeSize -= it->size;
13262 offset = it->offset + it->size;
13266 if(offset < blockSize)
13268 ++pMetadata->m_FreeCount;
13269 const VkDeviceSize freeSize = blockSize - offset;
13270 VmaSuballocation suballoc = {
13274 VMA_SUBALLOCATION_TYPE_FREE };
13275 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
13276 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13277 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13279 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
13284 pMetadata->m_FreeSuballocationsBySize.begin(),
13285 pMetadata->m_FreeSuballocationsBySize.end(),
13286 VmaSuballocationItemSizeLess());
13289 VMA_HEAVY_ASSERT(pMetadata->Validate());
13293 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
13296 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13297 while(it != pMetadata->m_Suballocations.end())
13299 if(it->offset < suballoc.offset)
13304 pMetadata->m_Suballocations.insert(it, suballoc);
13310 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
13313 VmaBlockVector* pBlockVector,
13314 uint32_t currFrameIndex) :
13316 mutexLocked(false),
13317 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
13318 m_hAllocator(hAllocator),
13319 m_hCustomPool(hCustomPool),
13320 m_pBlockVector(pBlockVector),
13321 m_CurrFrameIndex(currFrameIndex),
13322 m_pAlgorithm(VMA_NULL),
13323 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
13324 m_AllAllocations(false)
13328 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
13330 vma_delete(m_hAllocator, m_pAlgorithm);
13333 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
13335 AllocInfo info = { hAlloc, pChanged };
13336 m_Allocations.push_back(info);
13339 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported)
13341 const bool allAllocations = m_AllAllocations ||
13342 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
13354 if(VMA_DEBUG_MARGIN == 0 &&
13356 !m_pBlockVector->IsBufferImageGranularityConflictPossible())
13358 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
13359 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13363 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
13364 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13369 m_pAlgorithm->AddAll();
13373 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
13375 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
13383 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
13385 uint32_t currFrameIndex,
13388 m_hAllocator(hAllocator),
13389 m_CurrFrameIndex(currFrameIndex),
13392 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
13394 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
13397 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
13399 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13401 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
13402 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13403 vma_delete(m_hAllocator, pBlockVectorCtx);
13405 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
13407 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
13408 if(pBlockVectorCtx)
13410 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13411 vma_delete(m_hAllocator, pBlockVectorCtx);
13416 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
VmaPool* pPools)
13418 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
13420 VmaPool pool = pPools[poolIndex];
13423 if(pool->m_BlockVector.GetAlgorithm() == 0)
13425 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13427 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13429 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
13431 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13436 if(!pBlockVectorDefragCtx)
13438 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13441 &pool->m_BlockVector,
13443 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13446 pBlockVectorDefragCtx->AddAll();
13451 void VmaDefragmentationContext_T::AddAllocations(
13452 uint32_t allocationCount,
13454 VkBool32* pAllocationsChanged)
13457 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
13460 VMA_ASSERT(hAlloc);
13462 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
13464 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
13466 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13468 const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
13470 if(hAllocPool != VK_NULL_HANDLE)
13473 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
13475 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13477 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
13479 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13483 if(!pBlockVectorDefragCtx)
13485 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13488 &hAllocPool->m_BlockVector,
13490 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13497 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
13498 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
13499 if(!pBlockVectorDefragCtx)
13501 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13504 m_hAllocator->m_pBlockVectors[memTypeIndex],
13506 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
13510 if(pBlockVectorDefragCtx)
13512 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
13513 &pAllocationsChanged[allocIndex] : VMA_NULL;
13514 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
13520 VkResult VmaDefragmentationContext_T::Defragment(
13521 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
13522 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
13530 if(commandBuffer == VK_NULL_HANDLE)
13532 maxGpuBytesToMove = 0;
13533 maxGpuAllocationsToMove = 0;
13536 VkResult res = VK_SUCCESS;
13539 for(uint32_t memTypeIndex = 0;
13540 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
13543 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
13544 if(pBlockVectorCtx)
13546 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
13547 pBlockVectorCtx->GetBlockVector()->Defragment(
13550 maxCpuBytesToMove, maxCpuAllocationsToMove,
13551 maxGpuBytesToMove, maxGpuAllocationsToMove,
13553 if(pBlockVectorCtx->res != VK_SUCCESS)
13555 res = pBlockVectorCtx->res;
13561 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
13562 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
13565 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
13566 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
13567 pBlockVectorCtx->GetBlockVector()->Defragment(
13570 maxCpuBytesToMove, maxCpuAllocationsToMove,
13571 maxGpuBytesToMove, maxGpuAllocationsToMove,
13573 if(pBlockVectorCtx->res != VK_SUCCESS)
13575 res = pBlockVectorCtx->res;
13585 #if VMA_RECORDING_ENABLED 13587 VmaRecorder::VmaRecorder() :
13592 m_StartCounter(INT64_MAX)
13598 m_UseMutex = useMutex;
13599 m_Flags = settings.
flags;
13601 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
13602 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
13605 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
13608 return VK_ERROR_INITIALIZATION_FAILED;
13612 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
13613 fprintf(m_File,
"%s\n",
"1,5");
13618 VmaRecorder::~VmaRecorder()
13620 if(m_File != VMA_NULL)
13626 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
13628 CallParams callParams;
13629 GetBasicParams(callParams);
13631 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13632 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
13636 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
13638 CallParams callParams;
13639 GetBasicParams(callParams);
13641 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13642 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
13648 CallParams callParams;
13649 GetBasicParams(callParams);
13651 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13652 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
13663 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
13665 CallParams callParams;
13666 GetBasicParams(callParams);
13668 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13669 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
13674 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
13675 const VkMemoryRequirements& vkMemReq,
13679 CallParams callParams;
13680 GetBasicParams(callParams);
13682 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13683 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13684 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13686 vkMemReq.alignment,
13687 vkMemReq.memoryTypeBits,
13695 userDataStr.GetString());
13699 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
13700 const VkMemoryRequirements& vkMemReq,
13702 uint64_t allocationCount,
13705 CallParams callParams;
13706 GetBasicParams(callParams);
13708 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13709 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13710 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
13712 vkMemReq.alignment,
13713 vkMemReq.memoryTypeBits,
13720 PrintPointerList(allocationCount, pAllocations);
13721 fprintf(m_File,
",%s\n", userDataStr.GetString());
13725 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
13726 const VkMemoryRequirements& vkMemReq,
13727 bool requiresDedicatedAllocation,
13728 bool prefersDedicatedAllocation,
13732 CallParams callParams;
13733 GetBasicParams(callParams);
13735 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13736 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13737 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13739 vkMemReq.alignment,
13740 vkMemReq.memoryTypeBits,
13741 requiresDedicatedAllocation ? 1 : 0,
13742 prefersDedicatedAllocation ? 1 : 0,
13750 userDataStr.GetString());
13754 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
13755 const VkMemoryRequirements& vkMemReq,
13756 bool requiresDedicatedAllocation,
13757 bool prefersDedicatedAllocation,
13761 CallParams callParams;
13762 GetBasicParams(callParams);
13764 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13765 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13766 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13768 vkMemReq.alignment,
13769 vkMemReq.memoryTypeBits,
13770 requiresDedicatedAllocation ? 1 : 0,
13771 prefersDedicatedAllocation ? 1 : 0,
13779 userDataStr.GetString());
13783 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
13786 CallParams callParams;
13787 GetBasicParams(callParams);
13789 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13790 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13795 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
13796 uint64_t allocationCount,
13799 CallParams callParams;
13800 GetBasicParams(callParams);
13802 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13803 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
13804 PrintPointerList(allocationCount, pAllocations);
13805 fprintf(m_File,
"\n");
13809 void VmaRecorder::RecordResizeAllocation(
13810 uint32_t frameIndex,
13812 VkDeviceSize newSize)
13814 CallParams callParams;
13815 GetBasicParams(callParams);
13817 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13818 fprintf(m_File,
"%u,%.3f,%u,vmaResizeAllocation,%p,%llu\n", callParams.threadId, callParams.time, frameIndex,
13819 allocation, newSize);
13823 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
13825 const void* pUserData)
13827 CallParams callParams;
13828 GetBasicParams(callParams);
13830 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13831 UserDataString userDataStr(
13834 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13836 userDataStr.GetString());
13840 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
13843 CallParams callParams;
13844 GetBasicParams(callParams);
13846 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13847 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
13852 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
13855 CallParams callParams;
13856 GetBasicParams(callParams);
13858 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13859 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13864 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
13867 CallParams callParams;
13868 GetBasicParams(callParams);
13870 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13871 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13876 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
13877 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
13879 CallParams callParams;
13880 GetBasicParams(callParams);
13882 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13883 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
13890 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
13891 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
13893 CallParams callParams;
13894 GetBasicParams(callParams);
13896 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13897 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
13904 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
13905 const VkBufferCreateInfo& bufCreateInfo,
13909 CallParams callParams;
13910 GetBasicParams(callParams);
13912 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13913 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
13914 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13915 bufCreateInfo.flags,
13916 bufCreateInfo.size,
13917 bufCreateInfo.usage,
13918 bufCreateInfo.sharingMode,
13919 allocCreateInfo.
flags,
13920 allocCreateInfo.
usage,
13924 allocCreateInfo.
pool,
13926 userDataStr.GetString());
13930 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
13931 const VkImageCreateInfo& imageCreateInfo,
13935 CallParams callParams;
13936 GetBasicParams(callParams);
13938 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13939 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
13940 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13941 imageCreateInfo.flags,
13942 imageCreateInfo.imageType,
13943 imageCreateInfo.format,
13944 imageCreateInfo.extent.width,
13945 imageCreateInfo.extent.height,
13946 imageCreateInfo.extent.depth,
13947 imageCreateInfo.mipLevels,
13948 imageCreateInfo.arrayLayers,
13949 imageCreateInfo.samples,
13950 imageCreateInfo.tiling,
13951 imageCreateInfo.usage,
13952 imageCreateInfo.sharingMode,
13953 imageCreateInfo.initialLayout,
13954 allocCreateInfo.
flags,
13955 allocCreateInfo.
usage,
13959 allocCreateInfo.
pool,
13961 userDataStr.GetString());
13965 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
13968 CallParams callParams;
13969 GetBasicParams(callParams);
13971 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13972 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
13977 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
13980 CallParams callParams;
13981 GetBasicParams(callParams);
13983 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13984 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
13989 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
13992 CallParams callParams;
13993 GetBasicParams(callParams);
13995 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13996 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
14001 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
14004 CallParams callParams;
14005 GetBasicParams(callParams);
14007 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14008 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
14013 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
14016 CallParams callParams;
14017 GetBasicParams(callParams);
14019 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14020 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
14025 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
14029 CallParams callParams;
14030 GetBasicParams(callParams);
14032 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14033 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
14036 fprintf(m_File,
",");
14038 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
14048 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
14051 CallParams callParams;
14052 GetBasicParams(callParams);
14054 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14055 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
14062 if(pUserData != VMA_NULL)
14066 m_Str = (
const char*)pUserData;
14070 sprintf_s(m_PtrStr,
"%p", pUserData);
14080 void VmaRecorder::WriteConfiguration(
14081 const VkPhysicalDeviceProperties& devProps,
14082 const VkPhysicalDeviceMemoryProperties& memProps,
14083 bool dedicatedAllocationExtensionEnabled)
14085 fprintf(m_File,
"Config,Begin\n");
14087 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
14088 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
14089 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
14090 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
14091 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
14092 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
14094 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
14095 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
14096 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
14098 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
14099 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
14101 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
14102 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
14104 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
14105 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
14107 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
14108 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
14111 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
14113 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
14114 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
14115 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
14116 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
14117 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
14118 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
14119 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
14120 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
14121 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
14123 fprintf(m_File,
"Config,End\n");
14126 void VmaRecorder::GetBasicParams(CallParams& outParams)
14128 outParams.threadId = GetCurrentThreadId();
14130 LARGE_INTEGER counter;
14131 QueryPerformanceCounter(&counter);
14132 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
14135 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
14139 fprintf(m_File,
"%p", pItems[0]);
14140 for(uint64_t i = 1; i < count; ++i)
14142 fprintf(m_File,
" %p", pItems[i]);
14147 void VmaRecorder::Flush()
14155 #endif // #if VMA_RECORDING_ENABLED 14160 VmaAllocationObjectAllocator::VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks) :
14161 m_Allocator(pAllocationCallbacks, 1024)
14167 VmaMutexLock mutexLock(m_Mutex);
14168 return m_Allocator.Alloc();
14171 void VmaAllocationObjectAllocator::Free(
VmaAllocation hAlloc)
14173 VmaMutexLock mutexLock(m_Mutex);
14174 m_Allocator.Free(hAlloc);
14183 m_hDevice(pCreateInfo->device),
14184 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
14185 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
14186 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
14187 m_AllocationObjectAllocator(&m_AllocationCallbacks),
14188 m_PreferredLargeHeapBlockSize(0),
14189 m_PhysicalDevice(pCreateInfo->physicalDevice),
14190 m_CurrentFrameIndex(0),
14191 m_GpuDefragmentationMemoryTypeBits(UINT32_MAX),
14192 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
14195 ,m_pRecorder(VMA_NULL)
14198 if(VMA_DEBUG_DETECT_CORRUPTION)
14201 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
14206 #if !(VMA_DEDICATED_ALLOCATION) 14209 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
14213 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
14214 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
14215 memset(&m_MemProps, 0,
sizeof(m_MemProps));
14217 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
14218 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
14220 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
14222 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
14233 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
14234 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
14236 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
14237 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
14238 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
14239 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
14246 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
14248 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
14249 if(limit != VK_WHOLE_SIZE)
14251 m_HeapSizeLimit[heapIndex] = limit;
14252 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
14254 m_MemProps.memoryHeaps[heapIndex].size = limit;
14260 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14262 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
14264 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
14268 preferredBlockSize,
14271 GetBufferImageGranularity(),
14278 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
14285 VkResult res = VK_SUCCESS;
14290 #if VMA_RECORDING_ENABLED 14291 m_pRecorder = vma_new(
this, VmaRecorder)();
14293 if(res != VK_SUCCESS)
14297 m_pRecorder->WriteConfiguration(
14298 m_PhysicalDeviceProperties,
14300 m_UseKhrDedicatedAllocation);
14301 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
14303 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
14304 return VK_ERROR_FEATURE_NOT_PRESENT;
14311 VmaAllocator_T::~VmaAllocator_T()
14313 #if VMA_RECORDING_ENABLED 14314 if(m_pRecorder != VMA_NULL)
14316 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
14317 vma_delete(
this, m_pRecorder);
14321 VMA_ASSERT(m_Pools.empty());
14323 for(
size_t i = GetMemoryTypeCount(); i--; )
14325 if(m_pDedicatedAllocations[i] != VMA_NULL && !m_pDedicatedAllocations[i]->empty())
14327 VMA_ASSERT(0 &&
"Unfreed dedicated allocations found.");
14330 vma_delete(
this, m_pDedicatedAllocations[i]);
14331 vma_delete(
this, m_pBlockVectors[i]);
14335 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
14337 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 14338 m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
14339 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
14340 m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
14341 m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
14342 m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
14343 m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
14344 m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
14345 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
14346 m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
14347 m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
14348 m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
14349 m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
14350 m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
14351 m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
14352 m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
14353 m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
14354 m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
14355 #if VMA_DEDICATED_ALLOCATION 14356 if(m_UseKhrDedicatedAllocation)
14358 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
14359 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
14360 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
14361 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
14363 #endif // #if VMA_DEDICATED_ALLOCATION 14364 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 14366 #define VMA_COPY_IF_NOT_NULL(funcName) \ 14367 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 14369 if(pVulkanFunctions != VMA_NULL)
14371 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
14372 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
14373 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
14374 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
14375 VMA_COPY_IF_NOT_NULL(vkMapMemory);
14376 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
14377 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
14378 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
14379 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
14380 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
14381 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
14382 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
14383 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
14384 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
14385 VMA_COPY_IF_NOT_NULL(vkCreateImage);
14386 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
14387 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
14388 #if VMA_DEDICATED_ALLOCATION 14389 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
14390 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
14394 #undef VMA_COPY_IF_NOT_NULL 14398 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
14399 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
14400 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
14401 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
14402 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
14403 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
14404 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
14405 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
14406 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
14407 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
14408 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
14409 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
14410 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
14411 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
14412 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
14413 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
14414 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
14415 #if VMA_DEDICATED_ALLOCATION 14416 if(m_UseKhrDedicatedAllocation)
14418 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
14419 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
14424 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
14426 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14427 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
14428 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
14429 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
14432 VkResult VmaAllocator_T::AllocateMemoryOfType(
14434 VkDeviceSize alignment,
14435 bool dedicatedAllocation,
14436 VkBuffer dedicatedBuffer,
14437 VkImage dedicatedImage,
14439 uint32_t memTypeIndex,
14440 VmaSuballocationType suballocType,
14441 size_t allocationCount,
14444 VMA_ASSERT(pAllocations != VMA_NULL);
14445 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
14451 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
14456 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
14457 VMA_ASSERT(blockVector);
14459 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
14460 bool preferDedicatedMemory =
14461 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
14462 dedicatedAllocation ||
14464 size > preferredBlockSize / 2;
14466 if(preferDedicatedMemory &&
14468 finalCreateInfo.
pool == VK_NULL_HANDLE)
14477 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14481 return AllocateDedicatedMemory(
14496 VkResult res = blockVector->Allocate(
14497 m_CurrentFrameIndex.load(),
14504 if(res == VK_SUCCESS)
14512 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14516 res = AllocateDedicatedMemory(
14522 finalCreateInfo.pUserData,
14527 if(res == VK_SUCCESS)
14530 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
14536 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14543 VkResult VmaAllocator_T::AllocateDedicatedMemory(
14545 VmaSuballocationType suballocType,
14546 uint32_t memTypeIndex,
14548 bool isUserDataString,
14550 VkBuffer dedicatedBuffer,
14551 VkImage dedicatedImage,
14552 size_t allocationCount,
14555 VMA_ASSERT(allocationCount > 0 && pAllocations);
14557 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
14558 allocInfo.memoryTypeIndex = memTypeIndex;
14559 allocInfo.allocationSize = size;
14561 #if VMA_DEDICATED_ALLOCATION 14562 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
14563 if(m_UseKhrDedicatedAllocation)
14565 if(dedicatedBuffer != VK_NULL_HANDLE)
14567 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
14568 dedicatedAllocInfo.buffer = dedicatedBuffer;
14569 allocInfo.pNext = &dedicatedAllocInfo;
14571 else if(dedicatedImage != VK_NULL_HANDLE)
14573 dedicatedAllocInfo.image = dedicatedImage;
14574 allocInfo.pNext = &dedicatedAllocInfo;
14577 #endif // #if VMA_DEDICATED_ALLOCATION 14580 VkResult res = VK_SUCCESS;
14581 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14583 res = AllocateDedicatedMemoryPage(
14591 pAllocations + allocIndex);
14592 if(res != VK_SUCCESS)
14598 if(res == VK_SUCCESS)
14602 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
14603 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
14604 VMA_ASSERT(pDedicatedAllocations);
14605 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14607 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, pAllocations[allocIndex]);
14611 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
14616 while(allocIndex--)
14619 VkDeviceMemory hMemory = currAlloc->GetMemory();
14631 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
14633 currAlloc->SetUserData(
this, VMA_NULL);
14635 m_AllocationObjectAllocator.Free(currAlloc);
14638 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
14644 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
14646 VmaSuballocationType suballocType,
14647 uint32_t memTypeIndex,
14648 const VkMemoryAllocateInfo& allocInfo,
14650 bool isUserDataString,
14654 VkDeviceMemory hMemory = VK_NULL_HANDLE;
14655 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
14658 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14662 void* pMappedData = VMA_NULL;
14665 res = (*m_VulkanFunctions.vkMapMemory)(
14674 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
14675 FreeVulkanMemory(memTypeIndex, size, hMemory);
14680 *pAllocation = m_AllocationObjectAllocator.Allocate();
14681 (*pAllocation)->Ctor(m_CurrentFrameIndex.load(), isUserDataString);
14682 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
14683 (*pAllocation)->SetUserData(
this, pUserData);
14684 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
14686 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
14692 void VmaAllocator_T::GetBufferMemoryRequirements(
14694 VkMemoryRequirements& memReq,
14695 bool& requiresDedicatedAllocation,
14696 bool& prefersDedicatedAllocation)
const 14698 #if VMA_DEDICATED_ALLOCATION 14699 if(m_UseKhrDedicatedAllocation)
14701 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
14702 memReqInfo.buffer = hBuffer;
14704 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
14706 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
14707 memReq2.pNext = &memDedicatedReq;
14709 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14711 memReq = memReq2.memoryRequirements;
14712 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
14713 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
14716 #endif // #if VMA_DEDICATED_ALLOCATION 14718 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
14719 requiresDedicatedAllocation =
false;
14720 prefersDedicatedAllocation =
false;
14724 void VmaAllocator_T::GetImageMemoryRequirements(
14726 VkMemoryRequirements& memReq,
14727 bool& requiresDedicatedAllocation,
14728 bool& prefersDedicatedAllocation)
const 14730 #if VMA_DEDICATED_ALLOCATION 14731 if(m_UseKhrDedicatedAllocation)
14733 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
14734 memReqInfo.image = hImage;
14736 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
14738 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
14739 memReq2.pNext = &memDedicatedReq;
14741 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14743 memReq = memReq2.memoryRequirements;
14744 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
14745 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
14748 #endif // #if VMA_DEDICATED_ALLOCATION 14750 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
14751 requiresDedicatedAllocation =
false;
14752 prefersDedicatedAllocation =
false;
14756 VkResult VmaAllocator_T::AllocateMemory(
14757 const VkMemoryRequirements& vkMemReq,
14758 bool requiresDedicatedAllocation,
14759 bool prefersDedicatedAllocation,
14760 VkBuffer dedicatedBuffer,
14761 VkImage dedicatedImage,
14763 VmaSuballocationType suballocType,
14764 size_t allocationCount,
14767 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
14769 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
14771 if(vkMemReq.size == 0)
14773 return VK_ERROR_VALIDATION_FAILED_EXT;
14778 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
14779 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14784 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
14785 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14787 if(requiresDedicatedAllocation)
14791 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
14792 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14794 if(createInfo.
pool != VK_NULL_HANDLE)
14796 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
14797 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14800 if((createInfo.
pool != VK_NULL_HANDLE) &&
14803 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
14804 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14807 if(createInfo.
pool != VK_NULL_HANDLE)
14809 const VkDeviceSize alignmentForPool = VMA_MAX(
14810 vkMemReq.alignment,
14811 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
14816 (m_MemProps.memoryTypes[createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
14821 return createInfo.
pool->m_BlockVector.Allocate(
14822 m_CurrentFrameIndex.load(),
14833 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
14834 uint32_t memTypeIndex = UINT32_MAX;
14836 if(res == VK_SUCCESS)
14838 VkDeviceSize alignmentForMemType = VMA_MAX(
14839 vkMemReq.alignment,
14840 GetMemoryTypeMinAlignment(memTypeIndex));
14842 res = AllocateMemoryOfType(
14844 alignmentForMemType,
14845 requiresDedicatedAllocation || prefersDedicatedAllocation,
14854 if(res == VK_SUCCESS)
14864 memoryTypeBits &= ~(1u << memTypeIndex);
14867 if(res == VK_SUCCESS)
14869 alignmentForMemType = VMA_MAX(
14870 vkMemReq.alignment,
14871 GetMemoryTypeMinAlignment(memTypeIndex));
14873 res = AllocateMemoryOfType(
14875 alignmentForMemType,
14876 requiresDedicatedAllocation || prefersDedicatedAllocation,
14885 if(res == VK_SUCCESS)
14895 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14906 void VmaAllocator_T::FreeMemory(
14907 size_t allocationCount,
14910 VMA_ASSERT(pAllocations);
14912 for(
size_t allocIndex = allocationCount; allocIndex--; )
14916 if(allocation != VK_NULL_HANDLE)
14918 if(TouchAllocation(allocation))
14920 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
14922 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
14925 switch(allocation->GetType())
14927 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14929 VmaBlockVector* pBlockVector = VMA_NULL;
14930 VmaPool hPool = allocation->GetBlock()->GetParentPool();
14931 if(hPool != VK_NULL_HANDLE)
14933 pBlockVector = &hPool->m_BlockVector;
14937 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
14938 pBlockVector = m_pBlockVectors[memTypeIndex];
14940 pBlockVector->Free(allocation);
14943 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14944 FreeDedicatedMemory(allocation);
14951 allocation->SetUserData(
this, VMA_NULL);
14952 allocation->Dtor();
14953 m_AllocationObjectAllocator.Free(allocation);
14958 VkResult VmaAllocator_T::ResizeAllocation(
14960 VkDeviceSize newSize)
14962 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
14964 return VK_ERROR_VALIDATION_FAILED_EXT;
14966 if(newSize == alloc->GetSize())
14971 switch(alloc->GetType())
14973 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14974 return VK_ERROR_FEATURE_NOT_PRESENT;
14975 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14976 if(alloc->GetBlock()->m_pMetadata->ResizeAllocation(alloc, newSize))
14978 alloc->ChangeSize(newSize);
14979 VMA_HEAVY_ASSERT(alloc->GetBlock()->m_pMetadata->Validate());
14984 return VK_ERROR_OUT_OF_POOL_MEMORY;
14988 return VK_ERROR_VALIDATION_FAILED_EXT;
14992 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
14995 InitStatInfo(pStats->
total);
14996 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
14998 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
15002 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15004 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
15005 VMA_ASSERT(pBlockVector);
15006 pBlockVector->AddStats(pStats);
15011 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15012 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
15014 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
15019 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15021 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
15022 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15023 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
15024 VMA_ASSERT(pDedicatedAllocVector);
15025 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
15028 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
15029 VmaAddStatInfo(pStats->
total, allocationStatInfo);
15030 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
15031 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
15036 VmaPostprocessCalcStatInfo(pStats->
total);
15037 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
15038 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
15039 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
15040 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
15043 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
15045 VkResult VmaAllocator_T::DefragmentationBegin(
15055 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
15056 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
15059 (*pContext)->AddAllocations(
15062 VkResult res = (*pContext)->Defragment(
15067 if(res != VK_NOT_READY)
15069 vma_delete(
this, *pContext);
15070 *pContext = VMA_NULL;
15076 VkResult VmaAllocator_T::DefragmentationEnd(
15079 vma_delete(
this, context);
15085 if(hAllocation->CanBecomeLost())
15091 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15092 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15095 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
15099 pAllocationInfo->
offset = 0;
15100 pAllocationInfo->
size = hAllocation->GetSize();
15102 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15105 else if(localLastUseFrameIndex == localCurrFrameIndex)
15107 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
15108 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
15109 pAllocationInfo->
offset = hAllocation->GetOffset();
15110 pAllocationInfo->
size = hAllocation->GetSize();
15112 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15117 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15119 localLastUseFrameIndex = localCurrFrameIndex;
15126 #if VMA_STATS_STRING_ENABLED 15127 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15128 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15131 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
15132 if(localLastUseFrameIndex == localCurrFrameIndex)
15138 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15140 localLastUseFrameIndex = localCurrFrameIndex;
15146 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
15147 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
15148 pAllocationInfo->
offset = hAllocation->GetOffset();
15149 pAllocationInfo->
size = hAllocation->GetSize();
15150 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
15151 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15155 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
15158 if(hAllocation->CanBecomeLost())
15160 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15161 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15164 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
15168 else if(localLastUseFrameIndex == localCurrFrameIndex)
15174 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15176 localLastUseFrameIndex = localCurrFrameIndex;
15183 #if VMA_STATS_STRING_ENABLED 15184 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15185 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15188 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
15189 if(localLastUseFrameIndex == localCurrFrameIndex)
15195 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15197 localLastUseFrameIndex = localCurrFrameIndex;
15209 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
15219 return VK_ERROR_INITIALIZATION_FAILED;
15222 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
15224 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
15226 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
15227 if(res != VK_SUCCESS)
15229 vma_delete(
this, *pPool);
15236 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15237 (*pPool)->SetId(m_NextPoolId++);
15238 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
15244 void VmaAllocator_T::DestroyPool(
VmaPool pool)
15248 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15249 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
15250 VMA_ASSERT(success &&
"Pool not found in Allocator.");
15253 vma_delete(
this, pool);
15258 pool->m_BlockVector.GetPoolStats(pPoolStats);
15261 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
15263 m_CurrentFrameIndex.store(frameIndex);
15266 void VmaAllocator_T::MakePoolAllocationsLost(
15268 size_t* pLostAllocationCount)
15270 hPool->m_BlockVector.MakePoolAllocationsLost(
15271 m_CurrentFrameIndex.load(),
15272 pLostAllocationCount);
15275 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
15277 return hPool->m_BlockVector.CheckCorruption();
15280 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
15282 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
15285 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15287 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
15289 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
15290 VMA_ASSERT(pBlockVector);
15291 VkResult localRes = pBlockVector->CheckCorruption();
15294 case VK_ERROR_FEATURE_NOT_PRESENT:
15297 finalRes = VK_SUCCESS;
15307 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15308 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
15310 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
15312 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
15315 case VK_ERROR_FEATURE_NOT_PRESENT:
15318 finalRes = VK_SUCCESS;
15330 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
15332 *pAllocation = m_AllocationObjectAllocator.Allocate();
15333 (*pAllocation)->Ctor(VMA_FRAME_INDEX_LOST,
false);
15334 (*pAllocation)->InitLost();
15337 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
15339 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
15342 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
15344 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
15345 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
15347 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
15348 if(res == VK_SUCCESS)
15350 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
15355 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
15360 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
15363 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
15365 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
15371 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
15373 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
15375 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
15378 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
15380 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
15381 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
15383 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
15384 m_HeapSizeLimit[heapIndex] += size;
15388 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
15390 if(hAllocation->CanBecomeLost())
15392 return VK_ERROR_MEMORY_MAP_FAILED;
15395 switch(hAllocation->GetType())
15397 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15399 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15400 char *pBytes = VMA_NULL;
15401 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
15402 if(res == VK_SUCCESS)
15404 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
15405 hAllocation->BlockAllocMap();
15409 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15410 return hAllocation->DedicatedAllocMap(
this, ppData);
15413 return VK_ERROR_MEMORY_MAP_FAILED;
15419 switch(hAllocation->GetType())
15421 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15423 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15424 hAllocation->BlockAllocUnmap();
15425 pBlock->Unmap(
this, 1);
15428 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15429 hAllocation->DedicatedAllocUnmap(
this);
15436 VkResult VmaAllocator_T::BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer)
15438 VkResult res = VK_SUCCESS;
15439 switch(hAllocation->GetType())
15441 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15442 res = GetVulkanFunctions().vkBindBufferMemory(
15445 hAllocation->GetMemory(),
15448 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15450 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
15451 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
15452 res = pBlock->BindBufferMemory(
this, hAllocation, hBuffer);
15461 VkResult VmaAllocator_T::BindImageMemory(
VmaAllocation hAllocation, VkImage hImage)
15463 VkResult res = VK_SUCCESS;
15464 switch(hAllocation->GetType())
15466 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15467 res = GetVulkanFunctions().vkBindImageMemory(
15470 hAllocation->GetMemory(),
15473 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15475 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
15476 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
15477 res = pBlock->BindImageMemory(
this, hAllocation, hImage);
15486 void VmaAllocator_T::FlushOrInvalidateAllocation(
15488 VkDeviceSize offset, VkDeviceSize size,
15489 VMA_CACHE_OPERATION op)
15491 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
15492 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
15494 const VkDeviceSize allocationSize = hAllocation->GetSize();
15495 VMA_ASSERT(offset <= allocationSize);
15497 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
15499 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
15500 memRange.memory = hAllocation->GetMemory();
15502 switch(hAllocation->GetType())
15504 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15505 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
15506 if(size == VK_WHOLE_SIZE)
15508 memRange.size = allocationSize - memRange.offset;
15512 VMA_ASSERT(offset + size <= allocationSize);
15513 memRange.size = VMA_MIN(
15514 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
15515 allocationSize - memRange.offset);
15519 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15522 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
15523 if(size == VK_WHOLE_SIZE)
15525 size = allocationSize - offset;
15529 VMA_ASSERT(offset + size <= allocationSize);
15531 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
15534 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
15535 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
15536 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
15537 memRange.offset += allocationOffset;
15538 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
15549 case VMA_CACHE_FLUSH:
15550 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
15552 case VMA_CACHE_INVALIDATE:
15553 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
15562 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
15564 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
15566 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
15568 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15569 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
15570 VMA_ASSERT(pDedicatedAllocations);
15571 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
15572 VMA_ASSERT(success);
15575 VkDeviceMemory hMemory = allocation->GetMemory();
15587 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
15589 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
15592 uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits()
const 15594 VkBufferCreateInfo dummyBufCreateInfo;
15595 VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo);
15597 uint32_t memoryTypeBits = 0;
15600 VkBuffer buf = VK_NULL_HANDLE;
15601 VkResult res = (*GetVulkanFunctions().vkCreateBuffer)(
15602 m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf);
15603 if(res == VK_SUCCESS)
15606 VkMemoryRequirements memReq;
15607 (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq);
15608 memoryTypeBits = memReq.memoryTypeBits;
15611 (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks());
15614 return memoryTypeBits;
15617 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
15619 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
15620 !hAllocation->CanBecomeLost() &&
15621 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
15623 void* pData = VMA_NULL;
15624 VkResult res = Map(hAllocation, &pData);
15625 if(res == VK_SUCCESS)
15627 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
15628 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
15629 Unmap(hAllocation);
15633 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
15638 uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits()
15640 uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load();
15641 if(memoryTypeBits == UINT32_MAX)
15643 memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits();
15644 m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits);
15646 return memoryTypeBits;
15649 #if VMA_STATS_STRING_ENABLED 15651 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
15653 bool dedicatedAllocationsStarted =
false;
15654 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15656 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15657 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
15658 VMA_ASSERT(pDedicatedAllocVector);
15659 if(pDedicatedAllocVector->empty() ==
false)
15661 if(dedicatedAllocationsStarted ==
false)
15663 dedicatedAllocationsStarted =
true;
15664 json.WriteString(
"DedicatedAllocations");
15665 json.BeginObject();
15668 json.BeginString(
"Type ");
15669 json.ContinueString(memTypeIndex);
15674 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
15676 json.BeginObject(
true);
15678 hAlloc->PrintParameters(json);
15685 if(dedicatedAllocationsStarted)
15691 bool allocationsStarted =
false;
15692 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15694 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
15696 if(allocationsStarted ==
false)
15698 allocationsStarted =
true;
15699 json.WriteString(
"DefaultPools");
15700 json.BeginObject();
15703 json.BeginString(
"Type ");
15704 json.ContinueString(memTypeIndex);
15707 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
15710 if(allocationsStarted)
15718 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15719 const size_t poolCount = m_Pools.size();
15722 json.WriteString(
"Pools");
15723 json.BeginObject();
15724 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
15726 json.BeginString();
15727 json.ContinueString(m_Pools[poolIndex]->GetId());
15730 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
15737 #endif // #if VMA_STATS_STRING_ENABLED 15746 VMA_ASSERT(pCreateInfo && pAllocator);
15747 VMA_DEBUG_LOG(
"vmaCreateAllocator");
15749 return (*pAllocator)->Init(pCreateInfo);
15755 if(allocator != VK_NULL_HANDLE)
15757 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
15758 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
15759 vma_delete(&allocationCallbacks, allocator);
15765 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
15767 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
15768 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
15773 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
15775 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
15776 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
15781 uint32_t memoryTypeIndex,
15782 VkMemoryPropertyFlags* pFlags)
15784 VMA_ASSERT(allocator && pFlags);
15785 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
15786 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
15791 uint32_t frameIndex)
15793 VMA_ASSERT(allocator);
15794 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
15796 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15798 allocator->SetCurrentFrameIndex(frameIndex);
15805 VMA_ASSERT(allocator && pStats);
15806 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15807 allocator->CalculateStats(pStats);
15810 #if VMA_STATS_STRING_ENABLED 15814 char** ppStatsString,
15815 VkBool32 detailedMap)
15817 VMA_ASSERT(allocator && ppStatsString);
15818 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15820 VmaStringBuilder sb(allocator);
15822 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
15823 json.BeginObject();
15826 allocator->CalculateStats(&stats);
15828 json.WriteString(
"Total");
15829 VmaPrintStatInfo(json, stats.
total);
15831 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
15833 json.BeginString(
"Heap ");
15834 json.ContinueString(heapIndex);
15836 json.BeginObject();
15838 json.WriteString(
"Size");
15839 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
15841 json.WriteString(
"Flags");
15842 json.BeginArray(
true);
15843 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
15845 json.WriteString(
"DEVICE_LOCAL");
15851 json.WriteString(
"Stats");
15852 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
15855 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
15857 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
15859 json.BeginString(
"Type ");
15860 json.ContinueString(typeIndex);
15863 json.BeginObject();
15865 json.WriteString(
"Flags");
15866 json.BeginArray(
true);
15867 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
15868 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
15870 json.WriteString(
"DEVICE_LOCAL");
15872 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
15874 json.WriteString(
"HOST_VISIBLE");
15876 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
15878 json.WriteString(
"HOST_COHERENT");
15880 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
15882 json.WriteString(
"HOST_CACHED");
15884 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
15886 json.WriteString(
"LAZILY_ALLOCATED");
15892 json.WriteString(
"Stats");
15893 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
15902 if(detailedMap == VK_TRUE)
15904 allocator->PrintDetailedMap(json);
15910 const size_t len = sb.GetLength();
15911 char*
const pChars = vma_new_array(allocator,
char, len + 1);
15914 memcpy(pChars, sb.GetData(), len);
15916 pChars[len] =
'\0';
15917 *ppStatsString = pChars;
15922 char* pStatsString)
15924 if(pStatsString != VMA_NULL)
15926 VMA_ASSERT(allocator);
15927 size_t len = strlen(pStatsString);
15928 vma_delete_array(allocator, pStatsString, len + 1);
15932 #endif // #if VMA_STATS_STRING_ENABLED 15939 uint32_t memoryTypeBits,
15941 uint32_t* pMemoryTypeIndex)
15943 VMA_ASSERT(allocator != VK_NULL_HANDLE);
15944 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
15945 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
15952 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
15953 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
15956 switch(pAllocationCreateInfo->
usage)
15961 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15963 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
15967 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
15970 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15971 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15973 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
15977 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15978 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
15984 *pMemoryTypeIndex = UINT32_MAX;
15985 uint32_t minCost = UINT32_MAX;
15986 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
15987 memTypeIndex < allocator->GetMemoryTypeCount();
15988 ++memTypeIndex, memTypeBit <<= 1)
15991 if((memTypeBit & memoryTypeBits) != 0)
15993 const VkMemoryPropertyFlags currFlags =
15994 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
15996 if((requiredFlags & ~currFlags) == 0)
15999 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
16001 if(currCost < minCost)
16003 *pMemoryTypeIndex = memTypeIndex;
16008 minCost = currCost;
16013 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
16018 const VkBufferCreateInfo* pBufferCreateInfo,
16020 uint32_t* pMemoryTypeIndex)
16022 VMA_ASSERT(allocator != VK_NULL_HANDLE);
16023 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
16024 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
16025 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
16027 const VkDevice hDev = allocator->m_hDevice;
16028 VkBuffer hBuffer = VK_NULL_HANDLE;
16029 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
16030 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
16031 if(res == VK_SUCCESS)
16033 VkMemoryRequirements memReq = {};
16034 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
16035 hDev, hBuffer, &memReq);
16039 memReq.memoryTypeBits,
16040 pAllocationCreateInfo,
16043 allocator->GetVulkanFunctions().vkDestroyBuffer(
16044 hDev, hBuffer, allocator->GetAllocationCallbacks());
16051 const VkImageCreateInfo* pImageCreateInfo,
16053 uint32_t* pMemoryTypeIndex)
16055 VMA_ASSERT(allocator != VK_NULL_HANDLE);
16056 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
16057 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
16058 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
16060 const VkDevice hDev = allocator->m_hDevice;
16061 VkImage hImage = VK_NULL_HANDLE;
16062 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
16063 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
16064 if(res == VK_SUCCESS)
16066 VkMemoryRequirements memReq = {};
16067 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
16068 hDev, hImage, &memReq);
16072 memReq.memoryTypeBits,
16073 pAllocationCreateInfo,
16076 allocator->GetVulkanFunctions().vkDestroyImage(
16077 hDev, hImage, allocator->GetAllocationCallbacks());
16087 VMA_ASSERT(allocator && pCreateInfo && pPool);
16089 VMA_DEBUG_LOG(
"vmaCreatePool");
16091 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16093 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
16095 #if VMA_RECORDING_ENABLED 16096 if(allocator->GetRecorder() != VMA_NULL)
16098 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
16109 VMA_ASSERT(allocator);
16111 if(pool == VK_NULL_HANDLE)
16116 VMA_DEBUG_LOG(
"vmaDestroyPool");
16118 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16120 #if VMA_RECORDING_ENABLED 16121 if(allocator->GetRecorder() != VMA_NULL)
16123 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
16127 allocator->DestroyPool(pool);
16135 VMA_ASSERT(allocator && pool && pPoolStats);
16137 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16139 allocator->GetPoolStats(pool, pPoolStats);
16145 size_t* pLostAllocationCount)
16147 VMA_ASSERT(allocator && pool);
16149 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16151 #if VMA_RECORDING_ENABLED 16152 if(allocator->GetRecorder() != VMA_NULL)
16154 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
16158 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
16163 VMA_ASSERT(allocator && pool);
16165 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16167 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
16169 return allocator->CheckPoolCorruption(pool);
16174 const VkMemoryRequirements* pVkMemoryRequirements,
16179 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
16181 VMA_DEBUG_LOG(
"vmaAllocateMemory");
16183 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16185 VkResult result = allocator->AllocateMemory(
16186 *pVkMemoryRequirements,
16192 VMA_SUBALLOCATION_TYPE_UNKNOWN,
16196 #if VMA_RECORDING_ENABLED 16197 if(allocator->GetRecorder() != VMA_NULL)
16199 allocator->GetRecorder()->RecordAllocateMemory(
16200 allocator->GetCurrentFrameIndex(),
16201 *pVkMemoryRequirements,
16207 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
16209 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16217 const VkMemoryRequirements* pVkMemoryRequirements,
16219 size_t allocationCount,
16223 if(allocationCount == 0)
16228 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
16230 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
16232 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16234 VkResult result = allocator->AllocateMemory(
16235 *pVkMemoryRequirements,
16241 VMA_SUBALLOCATION_TYPE_UNKNOWN,
16245 #if VMA_RECORDING_ENABLED 16246 if(allocator->GetRecorder() != VMA_NULL)
16248 allocator->GetRecorder()->RecordAllocateMemoryPages(
16249 allocator->GetCurrentFrameIndex(),
16250 *pVkMemoryRequirements,
16252 (uint64_t)allocationCount,
16257 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
16259 for(
size_t i = 0; i < allocationCount; ++i)
16261 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
16275 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16277 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
16279 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16281 VkMemoryRequirements vkMemReq = {};
16282 bool requiresDedicatedAllocation =
false;
16283 bool prefersDedicatedAllocation =
false;
16284 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
16285 requiresDedicatedAllocation,
16286 prefersDedicatedAllocation);
16288 VkResult result = allocator->AllocateMemory(
16290 requiresDedicatedAllocation,
16291 prefersDedicatedAllocation,
16295 VMA_SUBALLOCATION_TYPE_BUFFER,
16299 #if VMA_RECORDING_ENABLED 16300 if(allocator->GetRecorder() != VMA_NULL)
16302 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
16303 allocator->GetCurrentFrameIndex(),
16305 requiresDedicatedAllocation,
16306 prefersDedicatedAllocation,
16312 if(pAllocationInfo && result == VK_SUCCESS)
16314 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16327 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16329 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
16331 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16333 VkMemoryRequirements vkMemReq = {};
16334 bool requiresDedicatedAllocation =
false;
16335 bool prefersDedicatedAllocation =
false;
16336 allocator->GetImageMemoryRequirements(image, vkMemReq,
16337 requiresDedicatedAllocation, prefersDedicatedAllocation);
16339 VkResult result = allocator->AllocateMemory(
16341 requiresDedicatedAllocation,
16342 prefersDedicatedAllocation,
16346 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
16350 #if VMA_RECORDING_ENABLED 16351 if(allocator->GetRecorder() != VMA_NULL)
16353 allocator->GetRecorder()->RecordAllocateMemoryForImage(
16354 allocator->GetCurrentFrameIndex(),
16356 requiresDedicatedAllocation,
16357 prefersDedicatedAllocation,
16363 if(pAllocationInfo && result == VK_SUCCESS)
16365 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16375 VMA_ASSERT(allocator);
16377 if(allocation == VK_NULL_HANDLE)
16382 VMA_DEBUG_LOG(
"vmaFreeMemory");
16384 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16386 #if VMA_RECORDING_ENABLED 16387 if(allocator->GetRecorder() != VMA_NULL)
16389 allocator->GetRecorder()->RecordFreeMemory(
16390 allocator->GetCurrentFrameIndex(),
16395 allocator->FreeMemory(
16402 size_t allocationCount,
16405 if(allocationCount == 0)
16410 VMA_ASSERT(allocator);
16412 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
16414 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16416 #if VMA_RECORDING_ENABLED 16417 if(allocator->GetRecorder() != VMA_NULL)
16419 allocator->GetRecorder()->RecordFreeMemoryPages(
16420 allocator->GetCurrentFrameIndex(),
16421 (uint64_t)allocationCount,
16426 allocator->FreeMemory(allocationCount, pAllocations);
16432 VkDeviceSize newSize)
16434 VMA_ASSERT(allocator && allocation);
16436 VMA_DEBUG_LOG(
"vmaResizeAllocation");
16438 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16440 #if VMA_RECORDING_ENABLED 16441 if(allocator->GetRecorder() != VMA_NULL)
16443 allocator->GetRecorder()->RecordResizeAllocation(
16444 allocator->GetCurrentFrameIndex(),
16450 return allocator->ResizeAllocation(allocation, newSize);
16458 VMA_ASSERT(allocator && allocation && pAllocationInfo);
16460 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16462 #if VMA_RECORDING_ENABLED 16463 if(allocator->GetRecorder() != VMA_NULL)
16465 allocator->GetRecorder()->RecordGetAllocationInfo(
16466 allocator->GetCurrentFrameIndex(),
16471 allocator->GetAllocationInfo(allocation, pAllocationInfo);
16478 VMA_ASSERT(allocator && allocation);
16480 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16482 #if VMA_RECORDING_ENABLED 16483 if(allocator->GetRecorder() != VMA_NULL)
16485 allocator->GetRecorder()->RecordTouchAllocation(
16486 allocator->GetCurrentFrameIndex(),
16491 return allocator->TouchAllocation(allocation);
16499 VMA_ASSERT(allocator && allocation);
16501 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16503 allocation->SetUserData(allocator, pUserData);
16505 #if VMA_RECORDING_ENABLED 16506 if(allocator->GetRecorder() != VMA_NULL)
16508 allocator->GetRecorder()->RecordSetAllocationUserData(
16509 allocator->GetCurrentFrameIndex(),
16520 VMA_ASSERT(allocator && pAllocation);
16522 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
16524 allocator->CreateLostAllocation(pAllocation);
16526 #if VMA_RECORDING_ENABLED 16527 if(allocator->GetRecorder() != VMA_NULL)
16529 allocator->GetRecorder()->RecordCreateLostAllocation(
16530 allocator->GetCurrentFrameIndex(),
16541 VMA_ASSERT(allocator && allocation && ppData);
16543 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16545 VkResult res = allocator->Map(allocation, ppData);
16547 #if VMA_RECORDING_ENABLED 16548 if(allocator->GetRecorder() != VMA_NULL)
16550 allocator->GetRecorder()->RecordMapMemory(
16551 allocator->GetCurrentFrameIndex(),
16563 VMA_ASSERT(allocator && allocation);
16565 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16567 #if VMA_RECORDING_ENABLED 16568 if(allocator->GetRecorder() != VMA_NULL)
16570 allocator->GetRecorder()->RecordUnmapMemory(
16571 allocator->GetCurrentFrameIndex(),
16576 allocator->Unmap(allocation);
16581 VMA_ASSERT(allocator && allocation);
16583 VMA_DEBUG_LOG(
"vmaFlushAllocation");
16585 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16587 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
16589 #if VMA_RECORDING_ENABLED 16590 if(allocator->GetRecorder() != VMA_NULL)
16592 allocator->GetRecorder()->RecordFlushAllocation(
16593 allocator->GetCurrentFrameIndex(),
16594 allocation, offset, size);
16601 VMA_ASSERT(allocator && allocation);
16603 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
16605 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16607 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
16609 #if VMA_RECORDING_ENABLED 16610 if(allocator->GetRecorder() != VMA_NULL)
16612 allocator->GetRecorder()->RecordInvalidateAllocation(
16613 allocator->GetCurrentFrameIndex(),
16614 allocation, offset, size);
16621 VMA_ASSERT(allocator);
16623 VMA_DEBUG_LOG(
"vmaCheckCorruption");
16625 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16627 return allocator->CheckCorruption(memoryTypeBits);
16633 size_t allocationCount,
16634 VkBool32* pAllocationsChanged,
16644 if(pDefragmentationInfo != VMA_NULL)
16658 if(res == VK_NOT_READY)
16671 VMA_ASSERT(allocator && pInfo && pContext);
16682 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
16684 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
16686 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16688 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
16690 #if VMA_RECORDING_ENABLED 16691 if(allocator->GetRecorder() != VMA_NULL)
16693 allocator->GetRecorder()->RecordDefragmentationBegin(
16694 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
16705 VMA_ASSERT(allocator);
16707 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
16709 if(context != VK_NULL_HANDLE)
16711 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16713 #if VMA_RECORDING_ENABLED 16714 if(allocator->GetRecorder() != VMA_NULL)
16716 allocator->GetRecorder()->RecordDefragmentationEnd(
16717 allocator->GetCurrentFrameIndex(), context);
16721 return allocator->DefragmentationEnd(context);
16734 VMA_ASSERT(allocator && allocation && buffer);
16736 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
16738 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16740 return allocator->BindBufferMemory(allocation, buffer);
16748 VMA_ASSERT(allocator && allocation && image);
16750 VMA_DEBUG_LOG(
"vmaBindImageMemory");
16752 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16754 return allocator->BindImageMemory(allocation, image);
16759 const VkBufferCreateInfo* pBufferCreateInfo,
16765 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
16767 if(pBufferCreateInfo->size == 0)
16769 return VK_ERROR_VALIDATION_FAILED_EXT;
16772 VMA_DEBUG_LOG(
"vmaCreateBuffer");
16774 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16776 *pBuffer = VK_NULL_HANDLE;
16777 *pAllocation = VK_NULL_HANDLE;
16780 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
16781 allocator->m_hDevice,
16783 allocator->GetAllocationCallbacks(),
16788 VkMemoryRequirements vkMemReq = {};
16789 bool requiresDedicatedAllocation =
false;
16790 bool prefersDedicatedAllocation =
false;
16791 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
16792 requiresDedicatedAllocation, prefersDedicatedAllocation);
16796 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
16798 VMA_ASSERT(vkMemReq.alignment %
16799 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
16801 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
16803 VMA_ASSERT(vkMemReq.alignment %
16804 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
16806 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
16808 VMA_ASSERT(vkMemReq.alignment %
16809 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
16813 res = allocator->AllocateMemory(
16815 requiresDedicatedAllocation,
16816 prefersDedicatedAllocation,
16819 *pAllocationCreateInfo,
16820 VMA_SUBALLOCATION_TYPE_BUFFER,
16824 #if VMA_RECORDING_ENABLED 16825 if(allocator->GetRecorder() != VMA_NULL)
16827 allocator->GetRecorder()->RecordCreateBuffer(
16828 allocator->GetCurrentFrameIndex(),
16829 *pBufferCreateInfo,
16830 *pAllocationCreateInfo,
16840 res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
16845 #if VMA_STATS_STRING_ENABLED 16846 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
16848 if(pAllocationInfo != VMA_NULL)
16850 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16855 allocator->FreeMemory(
16858 *pAllocation = VK_NULL_HANDLE;
16859 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
16860 *pBuffer = VK_NULL_HANDLE;
16863 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
16864 *pBuffer = VK_NULL_HANDLE;
16875 VMA_ASSERT(allocator);
16877 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
16882 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
16884 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16886 #if VMA_RECORDING_ENABLED 16887 if(allocator->GetRecorder() != VMA_NULL)
16889 allocator->GetRecorder()->RecordDestroyBuffer(
16890 allocator->GetCurrentFrameIndex(),
16895 if(buffer != VK_NULL_HANDLE)
16897 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
16900 if(allocation != VK_NULL_HANDLE)
16902 allocator->FreeMemory(
16910 const VkImageCreateInfo* pImageCreateInfo,
16916 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
16918 if(pImageCreateInfo->extent.width == 0 ||
16919 pImageCreateInfo->extent.height == 0 ||
16920 pImageCreateInfo->extent.depth == 0 ||
16921 pImageCreateInfo->mipLevels == 0 ||
16922 pImageCreateInfo->arrayLayers == 0)
16924 return VK_ERROR_VALIDATION_FAILED_EXT;
16927 VMA_DEBUG_LOG(
"vmaCreateImage");
16929 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16931 *pImage = VK_NULL_HANDLE;
16932 *pAllocation = VK_NULL_HANDLE;
16935 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
16936 allocator->m_hDevice,
16938 allocator->GetAllocationCallbacks(),
16942 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
16943 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
16944 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
16947 VkMemoryRequirements vkMemReq = {};
16948 bool requiresDedicatedAllocation =
false;
16949 bool prefersDedicatedAllocation =
false;
16950 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
16951 requiresDedicatedAllocation, prefersDedicatedAllocation);
16953 res = allocator->AllocateMemory(
16955 requiresDedicatedAllocation,
16956 prefersDedicatedAllocation,
16959 *pAllocationCreateInfo,
16964 #if VMA_RECORDING_ENABLED 16965 if(allocator->GetRecorder() != VMA_NULL)
16967 allocator->GetRecorder()->RecordCreateImage(
16968 allocator->GetCurrentFrameIndex(),
16970 *pAllocationCreateInfo,
16980 res = allocator->BindImageMemory(*pAllocation, *pImage);
16985 #if VMA_STATS_STRING_ENABLED 16986 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
16988 if(pAllocationInfo != VMA_NULL)
16990 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16995 allocator->FreeMemory(
16998 *pAllocation = VK_NULL_HANDLE;
16999 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
17000 *pImage = VK_NULL_HANDLE;
17003 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
17004 *pImage = VK_NULL_HANDLE;
17015 VMA_ASSERT(allocator);
17017 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
17022 VMA_DEBUG_LOG(
"vmaDestroyImage");
17024 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17026 #if VMA_RECORDING_ENABLED 17027 if(allocator->GetRecorder() != VMA_NULL)
17029 allocator->GetRecorder()->RecordDestroyImage(
17030 allocator->GetCurrentFrameIndex(),
17035 if(image != VK_NULL_HANDLE)
17037 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
17039 if(allocation != VK_NULL_HANDLE)
17041 allocator->FreeMemory(
17047 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1764
+
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:2064
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
-
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1814
-
uint32_t maxCpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on CPU side,...
Definition: vk_mem_alloc.h:2867
+
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1822
+
uint32_t maxCpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on CPU side,...
Definition: vk_mem_alloc.h:2875
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Deprecated. Compacts memory by moving allocations.
void vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Invalidates memory of given allocation.
Represents single memory allocation.
-
Definition: vk_mem_alloc.h:1788
-
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:2387
-
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1768
+
Definition: vk_mem_alloc.h:1796
+
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:2395
+
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1776
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
-
Definition: vk_mem_alloc.h:2018
-
Definition: vk_mem_alloc.h:2122
-
VmaDefragmentationFlags flags
Reserved for future use. Should be 0.
Definition: vk_mem_alloc.h:2820
-
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1760
-
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2487
-
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1811
-
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2903
-
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:2276
-
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1655
+
Definition: vk_mem_alloc.h:2026
+
Definition: vk_mem_alloc.h:2130
+
VmaDefragmentationFlags flags
Reserved for future use. Should be 0.
Definition: vk_mem_alloc.h:2828
+
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1768
+
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2495
+
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1819
+
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2911
+
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:2284
+
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1663
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
-
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:2368
-
Definition: vk_mem_alloc.h:2093
-
uint32_t allocationCount
Number of allocations in pAllocations array.
Definition: vk_mem_alloc.h:2823
-
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1749
-
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:2175
-
Definition: vk_mem_alloc.h:2045
-
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1823
-
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2304
+
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:2376
+
Definition: vk_mem_alloc.h:2101
+
uint32_t allocationCount
Number of allocations in pAllocations array.
Definition: vk_mem_alloc.h:2831
+
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1757
+
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:2183
+
Definition: vk_mem_alloc.h:2053
+
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1831
+
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2312
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
-
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1.
Definition: vk_mem_alloc.h:1877
-
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1808
+
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1.
Definition: vk_mem_alloc.h:1885
+
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1816
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
-
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:2049
+
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:2057
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
-
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1949
-
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1765
-
VmaPool * pPools
Either null or pointer to array of pools to be defragmented.
Definition: vk_mem_alloc.h:2857
-
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1948
-
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2907
+
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1957
+
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1773
+
VmaPool * pPools
Either null or pointer to array of pools to be defragmented.
Definition: vk_mem_alloc.h:2865
+
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1956
+
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2915
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(),...
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1840
-
VmaStatInfo total
Definition: vk_mem_alloc.h:1958
-
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2915
-
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:2159
-
Definition: vk_mem_alloc.h:2117
-
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places.
Definition: vk_mem_alloc.h:2898
-
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1766
-
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1691
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1848
+
VmaStatInfo total
Definition: vk_mem_alloc.h:1966
+
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2923
+
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:2167
+
Definition: vk_mem_alloc.h:2125
+
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places.
Definition: vk_mem_alloc.h:2906
+
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1774
+
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1699
Represents main object of this library initialized.
-
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1817
+
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1825
void vmaFreeMemoryPages(VmaAllocator allocator, size_t allocationCount, VmaAllocation *pAllocations)
Frees memory and destroys multiple allocations.
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
-
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:2318
-
Definition: vk_mem_alloc.h:2312
-
PFN_vkCmdCopyBuffer vkCmdCopyBuffer
Definition: vk_mem_alloc.h:1772
-
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1884
-
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2497
+
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:2326
+
Definition: vk_mem_alloc.h:2320
+
PFN_vkCmdCopyBuffer vkCmdCopyBuffer
Definition: vk_mem_alloc.h:1780
+
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1892
+
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2505
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
-
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1761
+
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1769
VkResult vmaDefragmentationBegin(VmaAllocator allocator, const VmaDefragmentationInfo2 *pInfo, VmaDefragmentationStats *pStats, VmaDefragmentationContext *pContext)
Begins defragmentation process.
-
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1786
-
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:2196
-
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:2338
-
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost.
Definition: vk_mem_alloc.h:2374
+
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1794
+
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:2204
+
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:2346
+
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost.
Definition: vk_mem_alloc.h:2382
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
-
Definition: vk_mem_alloc.h:1747
-
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:2321
+
Definition: vk_mem_alloc.h:1755
+
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:2329
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
-
VkDeviceSize maxGpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2872
-
VmaMemoryUsage
Definition: vk_mem_alloc.h:1996
+
VkDeviceSize maxGpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2880
+
VmaMemoryUsage
Definition: vk_mem_alloc.h:2004
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
-
VmaAllocation * pAllocations
Pointer to array of allocations that can be defragmented.
Definition: vk_mem_alloc.h:2832
+
VmaAllocation * pAllocations
Pointer to array of allocations that can be defragmented.
Definition: vk_mem_alloc.h:2840
void vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
-
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2893
+
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2901
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
-
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects.
Definition: vk_mem_alloc.h:2911
-
Definition: vk_mem_alloc.h:2035
-
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:2183
-
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1764
+
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects.
Definition: vk_mem_alloc.h:2919
+
Definition: vk_mem_alloc.h:2043
+
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:2191
+
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1772
Represents custom memory pool.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
VkResult vmaDefragmentationEnd(VmaAllocator allocator, VmaDefragmentationContext context)
Ends defragmentation process.
-
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1954
-
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1697
-
VkFlags VmaDefragmentationFlags
Definition: vk_mem_alloc.h:2811
+
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1962
+
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1705
+
VkFlags VmaDefragmentationFlags
Definition: vk_mem_alloc.h:2819
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
-
Definition: vk_mem_alloc.h:2809
-
Definition: vk_mem_alloc.h:2143
-
VkBool32 * pAllocationsChanged
Optional, output. Pointer to array that will be filled with information whether the allocation at cer...
Definition: vk_mem_alloc.h:2838
+
Definition: vk_mem_alloc.h:2817
+
Definition: vk_mem_alloc.h:2151
+
VkBool32 * pAllocationsChanged
Optional, output. Pointer to array that will be filled with information whether the allocation at cer...
Definition: vk_mem_alloc.h:2846
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
-
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1718
+
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1726
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
-
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1790
-
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1723
-
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2913
+
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1798
+
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1731
+
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2921
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
-
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:2170
-
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:2384
+
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:2178
+
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:2392
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
-
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1757
-
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1937
-
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes....
Definition: vk_mem_alloc.h:2333
-
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1710
-
Definition: vk_mem_alloc.h:2308
+
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1765
+
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1945
+
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes....
Definition: vk_mem_alloc.h:2341
+
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1718
+
Definition: vk_mem_alloc.h:2316
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
Definition: vk_mem_alloc.h:2100
+
Definition: vk_mem_alloc.h:2108
Represents Opaque object that represents started defragmentation process.
-
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1950
-
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1714
-
Definition: vk_mem_alloc.h:2133
-
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:2324
-
Definition: vk_mem_alloc.h:2044
-
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1763
+
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1958
+
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1722
+
Definition: vk_mem_alloc.h:2141
+
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:2332
+
Definition: vk_mem_alloc.h:2052
+
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1771
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
-
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:2165
-
Definition: vk_mem_alloc.h:2156
+
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:2173
+
Definition: vk_mem_alloc.h:2164
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
-
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1940
-
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1759
-
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:2346
-
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1826
-
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2377
-
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:2154
-
VkDeviceSize maxCpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2862
-
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:2189
+
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1948
+
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1767
+
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:2354
+
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1834
+
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2385
+
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:2162
+
VkDeviceSize maxCpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2870
+
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:2197
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
-
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1865
-
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1956
-
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it.
Definition: vk_mem_alloc.h:2080
-
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1949
+
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1873
+
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1964
+
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it.
Definition: vk_mem_alloc.h:2088
+
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1957
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
-
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1770
-
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1796
-
VmaDefragmentationFlagBits
Flags to be used in vmaDefragmentationBegin(). None at the moment. Reserved for future use.
Definition: vk_mem_alloc.h:2808
-
VkCommandBuffer commandBuffer
Optional. Command buffer where GPU copy commands will be posted.
Definition: vk_mem_alloc.h:2886
-
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1712
-
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1769
+
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1778
+
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1804
+
VmaDefragmentationFlagBits
Flags to be used in vmaDefragmentationBegin(). None at the moment. Reserved for future use.
Definition: vk_mem_alloc.h:2816
+
VkCommandBuffer commandBuffer
Optional. Command buffer where GPU copy commands will be posted.
Definition: vk_mem_alloc.h:2894
+
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1720
+
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1777
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2360
-
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1762
-
Definition: vk_mem_alloc.h:2111
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2368
+
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1770
+
Definition: vk_mem_alloc.h:2119
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
-
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1804
-
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2511
-
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB....
Definition: vk_mem_alloc.h:1820
-
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1949
+
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1812
+
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2519
+
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB....
Definition: vk_mem_alloc.h:1828
+
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1957
VkResult vmaAllocateMemoryPages(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, size_t allocationCount, VmaAllocation *pAllocations, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation for multiple allocation objects at once.
-
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1946
+
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1954
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits)
Checks magic number in margins around all allocations in given memory types (in both default and cust...
-
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:2365
-
Parameters for defragmentation.
Definition: vk_mem_alloc.h:2817
+
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:2373
+
Parameters for defragmentation.
Definition: vk_mem_alloc.h:2825
VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool)
Checks magic number in margins around all allocations in given memory pool in search for corruptions.
-
Definition: vk_mem_alloc.h:2126
-
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory,...
Definition: vk_mem_alloc.h:2492
-
Definition: vk_mem_alloc.h:2140
-
Definition: vk_mem_alloc.h:2152
-
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places.
Definition: vk_mem_alloc.h:2909
-
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1755
+
Definition: vk_mem_alloc.h:2134
+
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory,...
Definition: vk_mem_alloc.h:2500
+
Definition: vk_mem_alloc.h:2148
+
Definition: vk_mem_alloc.h:2160
+
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places.
Definition: vk_mem_alloc.h:2917
+
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1763
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
-
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1944
-
Definition: vk_mem_alloc.h:2001
-
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:2314
+
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1952
+
Definition: vk_mem_alloc.h:2009
+
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:2322
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
-
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1793
-
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1942
-
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1767
-
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1771
-
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:2067
-
Definition: vk_mem_alloc.h:2147
-
Definition: vk_mem_alloc.h:2028
-
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2506
+
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1801
+
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1950
+
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1775
+
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1779
+
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:2075
+
Definition: vk_mem_alloc.h:2155
+
Definition: vk_mem_alloc.h:2036
+
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2514
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
-
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1745
+
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1753
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
-
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1758
-
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2293
+
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1766
+
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2301
VkResult vmaResizeAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize newSize)
Tries to resize an allocation in place, if there is enough free memory after it.
-
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2473
+
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2481
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
Definition: vk_mem_alloc.h:2137
-
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:2258
-
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1950
+
Definition: vk_mem_alloc.h:2145
+
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:2266
+
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1958
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame.
-
Definition: vk_mem_alloc.h:2106
-
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1780
-
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1957
+
Definition: vk_mem_alloc.h:2114
+
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1788
+
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1965
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
-
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2371
-
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1950
+
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2379
+
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1958
struct VmaDefragmentationInfo2 VmaDefragmentationInfo2
Parameters for defragmentation.
-
uint32_t maxGpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on GPU side,...
Definition: vk_mem_alloc.h:2877
+
uint32_t maxGpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on GPU side,...
Definition: vk_mem_alloc.h:2885
struct VmaRecordSettings VmaRecordSettings
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
-
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2478
-
uint32_t poolCount
Numer of pools in pPools array.
Definition: vk_mem_alloc.h:2841
+
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2486
+
uint32_t poolCount
Numer of pools in pPools array.
Definition: vk_mem_alloc.h:2849