tracy/TracyVulkan.hpp

483 lines
20 KiB
C++
Raw Normal View History

2018-06-17 16:14:37 +00:00
#ifndef __TRACYVULKAN_HPP__
#define __TRACYVULKAN_HPP__
#if !defined TRACY_ENABLE
#define TracyVkContext(x,y,z,w) nullptr
2020-07-07 18:03:03 +00:00
#define TracyVkContextCalibrated(x,y,z,w,a,b) nullptr
#define TracyVkDestroy(x)
2020-01-25 16:44:46 +00:00
#define TracyVkNamedZone(c,x,y,z,w)
#define TracyVkNamedZoneC(c,x,y,z,w,a)
#define TracyVkZone(c,x,y)
#define TracyVkZoneC(c,x,y,z)
2021-01-15 20:52:20 +00:00
#define TracyVkZoneTransient(c,x,y,z,w)
#define TracyVkCollect(c,x)
2020-01-25 16:44:46 +00:00
#define TracyVkNamedZoneS(c,x,y,z,w,a)
#define TracyVkNamedZoneCS(c,x,y,z,w,v,a)
#define TracyVkZoneS(c,x,y,z)
#define TracyVkZoneCS(c,x,y,z,w)
2021-01-15 20:52:20 +00:00
#define TracyVkZoneTransientS(c,x,y,z,w,a)
namespace tracy
{
class VkCtxScope {};
}
using TracyVkCtx = void*;
2018-06-17 16:14:37 +00:00
#else
2018-06-22 13:10:23 +00:00
#include <assert.h>
2018-06-17 16:52:20 +00:00
#include <stdlib.h>
2018-06-17 16:14:37 +00:00
#include <vulkan/vulkan.h>
#include "Tracy.hpp"
#include "client/TracyProfiler.hpp"
2018-06-21 23:47:08 +00:00
#include "client/TracyCallstack.hpp"
2018-06-17 16:14:37 +00:00
namespace tracy
{
class VkCtx
{
friend class VkCtxScope;
enum { QueryCount = 64 * 1024 };
public:
2020-07-07 18:03:03 +00:00
VkCtx( VkPhysicalDevice physdev, VkDevice device, VkQueue queue, VkCommandBuffer cmdbuf, PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT _vkGetPhysicalDeviceCalibrateableTimeDomainsEXT, PFN_vkGetCalibratedTimestampsEXT _vkGetCalibratedTimestampsEXT )
2018-06-17 16:14:37 +00:00
: m_device( device )
, m_timeDomain( VK_TIME_DOMAIN_DEVICE_EXT )
2019-02-19 18:33:37 +00:00
, m_context( GetGpuCtxCounter().fetch_add( 1, std::memory_order_relaxed ) )
2018-06-17 16:14:37 +00:00
, m_head( 0 )
, m_tail( 0 )
, m_oldCnt( 0 )
2019-06-26 14:42:51 +00:00
, m_queryCount( QueryCount )
, m_vkGetCalibratedTimestampsEXT( _vkGetCalibratedTimestampsEXT )
2018-06-17 16:14:37 +00:00
{
2018-06-22 13:10:23 +00:00
assert( m_context != 255 );
if( _vkGetPhysicalDeviceCalibrateableTimeDomainsEXT && _vkGetCalibratedTimestampsEXT )
{
uint32_t num;
_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( physdev, &num, nullptr );
if( num > 4 ) num = 4;
VkTimeDomainEXT data[4];
_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( physdev, &num, data );
for( uint32_t i=0; i<num; i++ )
{
// TODO VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_EXT
if( data[i] == VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT )
{
m_timeDomain = data[i];
break;
}
}
}
2018-06-17 16:14:37 +00:00
VkPhysicalDeviceProperties prop;
vkGetPhysicalDeviceProperties( physdev, &prop );
const float period = prop.limits.timestampPeriod;
2018-06-17 16:14:37 +00:00
VkQueryPoolCreateInfo poolInfo = {};
poolInfo.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
2019-06-26 14:42:51 +00:00
poolInfo.queryCount = m_queryCount;
2018-06-17 16:14:37 +00:00
poolInfo.queryType = VK_QUERY_TYPE_TIMESTAMP;
while( vkCreateQueryPool( device, &poolInfo, nullptr, &m_query ) != VK_SUCCESS )
{
m_queryCount /= 2;
poolInfo.queryCount = m_queryCount;
}
2018-06-17 16:14:37 +00:00
VkCommandBufferBeginInfo beginInfo = {};
beginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
beginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
VkSubmitInfo submitInfo = {};
submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
submitInfo.commandBufferCount = 1;
submitInfo.pCommandBuffers = &cmdbuf;
vkBeginCommandBuffer( cmdbuf, &beginInfo );
2019-06-26 14:42:51 +00:00
vkCmdResetQueryPool( cmdbuf, m_query, 0, m_queryCount );
2018-06-17 16:14:37 +00:00
vkEndCommandBuffer( cmdbuf );
vkQueueSubmit( queue, 1, &submitInfo, VK_NULL_HANDLE );
vkQueueWaitIdle( queue );
2020-07-07 18:32:25 +00:00
int64_t tcpu, tgpu;
if( m_timeDomain == VK_TIME_DOMAIN_DEVICE_EXT )
{
vkBeginCommandBuffer( cmdbuf, &beginInfo );
vkCmdWriteTimestamp( cmdbuf, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, m_query, 0 );
vkEndCommandBuffer( cmdbuf );
vkQueueSubmit( queue, 1, &submitInfo, VK_NULL_HANDLE );
vkQueueWaitIdle( queue );
tcpu = Profiler::GetTime();
vkGetQueryPoolResults( device, m_query, 0, 1, sizeof( tgpu ), &tgpu, sizeof( tgpu ), VK_QUERY_RESULT_64_BIT | VK_QUERY_RESULT_WAIT_BIT );
vkBeginCommandBuffer( cmdbuf, &beginInfo );
vkCmdResetQueryPool( cmdbuf, m_query, 0, 1 );
vkEndCommandBuffer( cmdbuf );
vkQueueSubmit( queue, 1, &submitInfo, VK_NULL_HANDLE );
vkQueueWaitIdle( queue );
}
else
{
enum { NumProbes = 32 };
VkCalibratedTimestampInfoEXT spec[2] = {
{ VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT, nullptr, VK_TIME_DOMAIN_DEVICE_EXT },
{ VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT, nullptr, m_timeDomain },
};
uint64_t ts[2];
uint64_t deviation[NumProbes];
for( int i=0; i<NumProbes; i++ )
{
_vkGetCalibratedTimestampsEXT( device, 2, spec, ts, deviation+i );
}
uint64_t minDeviation = deviation[0];
for( int i=1; i<NumProbes; i++ )
{
if( minDeviation > deviation[i] )
{
minDeviation = deviation[i];
}
}
m_deviation = minDeviation * 3 / 2;
2018-06-17 16:14:37 +00:00
2020-07-07 18:32:25 +00:00
m_qpcToNs = int64_t( 1000000000. / GetFrequencyQpc() );
2018-06-17 16:14:37 +00:00
2020-07-07 18:32:25 +00:00
Calibrate( device, m_prevCalibration, tgpu );
tcpu = Profiler::GetTime();
}
uint8_t flags = 0;
if( m_timeDomain != VK_TIME_DOMAIN_DEVICE_EXT ) flags |= GpuContextCalibration;
2018-06-17 16:14:37 +00:00
auto item = Profiler::QueueSerial();
2018-06-17 16:14:37 +00:00
MemWrite( &item->hdr.type, QueueType::GpuNewContext );
MemWrite( &item->gpuNewContext.cpuTime, tcpu );
MemWrite( &item->gpuNewContext.gpuTime, tgpu );
2018-06-17 16:52:20 +00:00
memset( &item->gpuNewContext.thread, 0, sizeof( item->gpuNewContext.thread ) );
MemWrite( &item->gpuNewContext.period, period );
2018-06-17 16:14:37 +00:00
MemWrite( &item->gpuNewContext.context, m_context );
2020-07-07 18:32:25 +00:00
MemWrite( &item->gpuNewContext.flags, flags );
2020-05-27 16:16:53 +00:00
MemWrite( &item->gpuNewContext.type, GpuContextType::Vulkan );
2018-07-11 13:00:30 +00:00
#ifdef TRACY_ON_DEMAND
2019-02-19 17:38:08 +00:00
GetProfiler().DeferItem( *item );
#endif
Profiler::QueueSerialFinish();
2019-06-26 14:42:51 +00:00
m_res = (int64_t*)tracy_malloc( sizeof( int64_t ) * m_queryCount );
2018-06-17 16:14:37 +00:00
}
~VkCtx()
{
2019-06-26 14:42:51 +00:00
tracy_free( m_res );
2018-06-17 16:14:37 +00:00
vkDestroyQueryPool( m_device, m_query, nullptr );
}
void Collect( VkCommandBuffer cmdbuf )
{
ZoneScopedC( Color::Red4 );
if( m_tail == m_head ) return;
2018-07-11 15:03:00 +00:00
#ifdef TRACY_ON_DEMAND
2019-02-19 17:38:08 +00:00
if( !GetProfiler().IsConnected() )
2018-07-11 15:03:00 +00:00
{
2019-06-26 14:42:51 +00:00
vkCmdResetQueryPool( cmdbuf, m_query, 0, m_queryCount );
2018-07-11 15:03:00 +00:00
m_head = m_tail = 0;
2020-07-07 18:32:25 +00:00
int64_t tgpu;
if( m_timeDomain != VK_TIME_DOMAIN_DEVICE_EXT ) Calibrate( m_device, m_prevCalibration, tgpu );
2018-07-11 15:03:00 +00:00
return;
}
#endif
unsigned int cnt;
if( m_oldCnt != 0 )
{
cnt = m_oldCnt;
m_oldCnt = 0;
}
else
{
2019-06-26 14:42:51 +00:00
cnt = m_head < m_tail ? m_queryCount - m_tail : m_head - m_tail;
}
2019-06-26 14:42:51 +00:00
if( vkGetQueryPoolResults( m_device, m_query, m_tail, cnt, sizeof( int64_t ) * m_queryCount, m_res, sizeof( int64_t ), VK_QUERY_RESULT_64_BIT ) == VK_NOT_READY )
{
m_oldCnt = cnt;
return;
}
2018-06-17 16:14:37 +00:00
for( unsigned int idx=0; idx<cnt; idx++ )
2018-06-17 16:14:37 +00:00
{
auto item = Profiler::QueueSerial();
2018-06-17 16:14:37 +00:00
MemWrite( &item->hdr.type, QueueType::GpuTime );
2019-06-26 14:42:51 +00:00
MemWrite( &item->gpuTime.gpuTime, m_res[idx] );
2018-06-22 14:19:53 +00:00
MemWrite( &item->gpuTime.queryId, uint16_t( m_tail + idx ) );
2018-06-17 16:14:37 +00:00
MemWrite( &item->gpuTime.context, m_context );
Profiler::QueueSerialFinish();
2018-06-17 16:14:37 +00:00
}
2020-07-07 18:32:25 +00:00
if( m_timeDomain != VK_TIME_DOMAIN_DEVICE_EXT )
{
int64_t tgpu, tcpu;
Calibrate( m_device, tcpu, tgpu );
const auto refCpu = Profiler::GetTime();
const auto delta = tcpu - m_prevCalibration;
if( delta > 0 )
{
m_prevCalibration = tcpu;
auto item = Profiler::QueueSerial();
MemWrite( &item->hdr.type, QueueType::GpuCalibration );
MemWrite( &item->gpuCalibration.gpuTime, tgpu );
MemWrite( &item->gpuCalibration.cpuTime, refCpu );
MemWrite( &item->gpuCalibration.cpuDelta, delta );
MemWrite( &item->gpuCalibration.context, m_context );
Profiler::QueueSerialFinish();
}
}
vkCmdResetQueryPool( cmdbuf, m_query, m_tail, cnt );
2018-06-17 16:14:37 +00:00
m_tail += cnt;
2019-06-26 14:42:51 +00:00
if( m_tail == m_queryCount ) m_tail = 0;
2018-06-17 16:14:37 +00:00
}
private:
tracy_force_inline unsigned int NextQueryId()
{
const auto id = m_head;
2019-06-26 14:42:51 +00:00
m_head = ( m_head + 1 ) % m_queryCount;
2018-06-17 16:14:37 +00:00
assert( m_head != m_tail );
return id;
}
2018-06-22 13:10:23 +00:00
tracy_force_inline uint8_t GetId() const
2018-06-17 16:14:37 +00:00
{
return m_context;
}
2020-07-07 18:21:38 +00:00
tracy_force_inline void Calibrate( VkDevice device, int64_t& tCpu, int64_t& tGpu )
{
assert( m_timeDomain != VK_TIME_DOMAIN_DEVICE_EXT );
VkCalibratedTimestampInfoEXT spec[2] = {
{ VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT, nullptr, VK_TIME_DOMAIN_DEVICE_EXT },
{ VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT, nullptr, m_timeDomain },
};
uint64_t ts[2];
uint64_t deviation;
do
{
m_vkGetCalibratedTimestampsEXT( device, 2, spec, ts, &deviation );
}
while( deviation > m_deviation );
#if defined _WIN32 || defined __CYGWIN__
tGpu = ts[0];
tCpu = ts[1] * m_qpcToNs;
#else
assert( false );
#endif
}
2018-06-17 16:14:37 +00:00
VkDevice m_device;
VkQueryPool m_query;
VkTimeDomainEXT m_timeDomain;
uint64_t m_deviation;
2020-07-07 18:21:38 +00:00
int64_t m_qpcToNs;
int64_t m_prevCalibration;
2018-06-22 13:10:23 +00:00
uint8_t m_context;
2018-06-17 16:14:37 +00:00
unsigned int m_head;
unsigned int m_tail;
unsigned int m_oldCnt;
2019-06-26 14:42:51 +00:00
unsigned int m_queryCount;
int64_t* m_res;
PFN_vkGetCalibratedTimestampsEXT m_vkGetCalibratedTimestampsEXT;
2018-06-17 16:14:37 +00:00
};
class VkCtxScope
{
public:
2020-01-25 16:44:46 +00:00
tracy_force_inline VkCtxScope( VkCtx* ctx, const SourceLocationData* srcloc, VkCommandBuffer cmdbuf, bool is_active )
2018-07-11 15:03:00 +00:00
#ifdef TRACY_ON_DEMAND
2020-01-25 16:44:46 +00:00
: m_active( is_active && GetProfiler().IsConnected() )
#else
: m_active( is_active )
2018-07-11 15:03:00 +00:00
#endif
2018-06-17 16:14:37 +00:00
{
2018-07-11 15:03:00 +00:00
if( !m_active ) return;
2020-01-25 16:44:46 +00:00
m_cmdbuf = cmdbuf;
m_ctx = ctx;
2018-06-22 13:57:54 +00:00
const auto queryId = ctx->NextQueryId();
vkCmdWriteTimestamp( cmdbuf, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, ctx->m_query, queryId );
2018-06-17 16:14:37 +00:00
auto item = Profiler::QueueSerial();
MemWrite( &item->hdr.type, QueueType::GpuZoneBeginSerial );
2018-06-17 16:14:37 +00:00
MemWrite( &item->gpuZoneBegin.cpuTime, Profiler::GetTime() );
MemWrite( &item->gpuZoneBegin.srcloc, (uint64_t)srcloc );
MemWrite( &item->gpuZoneBegin.thread, GetThreadHandle() );
2018-06-22 13:57:54 +00:00
MemWrite( &item->gpuZoneBegin.queryId, uint16_t( queryId ) );
2018-06-17 16:14:37 +00:00
MemWrite( &item->gpuZoneBegin.context, ctx->GetId() );
Profiler::QueueSerialFinish();
2018-06-17 16:14:37 +00:00
}
2020-01-25 16:44:46 +00:00
tracy_force_inline VkCtxScope( VkCtx* ctx, const SourceLocationData* srcloc, VkCommandBuffer cmdbuf, int depth, bool is_active )
2018-07-11 15:03:00 +00:00
#ifdef TRACY_ON_DEMAND
2020-01-25 16:44:46 +00:00
: m_active( is_active && GetProfiler().IsConnected() )
#else
: m_active( is_active )
2018-07-11 15:03:00 +00:00
#endif
2018-06-21 23:47:08 +00:00
{
2018-07-11 15:03:00 +00:00
if( !m_active ) return;
2020-01-25 16:44:46 +00:00
m_cmdbuf = cmdbuf;
m_ctx = ctx;
2018-06-21 23:47:08 +00:00
2018-06-22 13:57:54 +00:00
const auto queryId = ctx->NextQueryId();
vkCmdWriteTimestamp( cmdbuf, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, ctx->m_query, queryId );
2018-06-21 23:47:08 +00:00
auto item = Profiler::QueueSerialCallstack( Callstack( depth ) );
MemWrite( &item->hdr.type, QueueType::GpuZoneBeginCallstackSerial );
2018-06-21 23:47:08 +00:00
MemWrite( &item->gpuZoneBegin.cpuTime, Profiler::GetTime() );
MemWrite( &item->gpuZoneBegin.srcloc, (uint64_t)srcloc );
MemWrite( &item->gpuZoneBegin.thread, GetThreadHandle() );
2018-06-22 13:57:54 +00:00
MemWrite( &item->gpuZoneBegin.queryId, uint16_t( queryId ) );
2018-06-21 23:47:08 +00:00
MemWrite( &item->gpuZoneBegin.context, ctx->GetId() );
Profiler::QueueSerialFinish();
2018-06-21 23:47:08 +00:00
}
2021-01-15 20:52:20 +00:00
tracy_force_inline VkCtxScope( VkCtx* ctx, uint32_t line, const char* source, size_t sourceSz, const char* function, size_t functionSz, const char* name, size_t nameSz, VkCommandBuffer cmdbuf, bool is_active )
#ifdef TRACY_ON_DEMAND
: m_active( is_active && GetProfiler().IsConnected() )
#else
: m_active( is_active )
#endif
{
if( !m_active ) return;
m_cmdbuf = cmdbuf;
m_ctx = ctx;
const auto queryId = ctx->NextQueryId();
vkCmdWriteTimestamp( cmdbuf, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, ctx->m_query, queryId );
const auto srcloc = Profiler::AllocSourceLocation( line, source, sourceSz, function, functionSz, name, nameSz );
auto item = Profiler::QueueSerial();
MemWrite( &item->hdr.type, QueueType::GpuZoneBeginAllocSrcLocSerial );
MemWrite( &item->gpuZoneBegin.cpuTime, Profiler::GetTime() );
MemWrite( &item->gpuZoneBegin.srcloc, srcloc );
MemWrite( &item->gpuZoneBegin.thread, GetThreadHandle() );
MemWrite( &item->gpuZoneBegin.queryId, uint16_t( queryId ) );
MemWrite( &item->gpuZoneBegin.context, ctx->GetId() );
Profiler::QueueSerialFinish();
}
tracy_force_inline VkCtxScope( VkCtx* ctx, uint32_t line, const char* source, size_t sourceSz, const char* function, size_t functionSz, const char* name, size_t nameSz, VkCommandBuffer cmdbuf, int depth, bool is_active )
#ifdef TRACY_ON_DEMAND
: m_active( is_active && GetProfiler().IsConnected() )
#else
: m_active( is_active )
#endif
{
if( !m_active ) return;
m_cmdbuf = cmdbuf;
m_ctx = ctx;
const auto queryId = ctx->NextQueryId();
vkCmdWriteTimestamp( cmdbuf, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, ctx->m_query, queryId );
const auto srcloc = Profiler::AllocSourceLocation( line, source, sourceSz, function, functionSz, name, nameSz );
auto item = Profiler::QueueSerialCallstack( Callstack( depth ) );
MemWrite( &item->hdr.type, QueueType::GpuZoneBeginAllocSrcLocCallstackSerial );
MemWrite( &item->gpuZoneBegin.cpuTime, Profiler::GetTime() );
MemWrite( &item->gpuZoneBegin.srcloc, srcloc );
MemWrite( &item->gpuZoneBegin.thread, GetThreadHandle() );
MemWrite( &item->gpuZoneBegin.queryId, uint16_t( queryId ) );
MemWrite( &item->gpuZoneBegin.context, ctx->GetId() );
Profiler::QueueSerialFinish();
}
2018-06-17 16:14:37 +00:00
tracy_force_inline ~VkCtxScope()
{
2018-07-11 15:03:00 +00:00
if( !m_active ) return;
2020-01-25 16:44:46 +00:00
const auto queryId = m_ctx->NextQueryId();
vkCmdWriteTimestamp( m_cmdbuf, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, m_ctx->m_query, queryId );
2018-06-17 16:14:37 +00:00
auto item = Profiler::QueueSerial();
MemWrite( &item->hdr.type, QueueType::GpuZoneEndSerial );
2018-06-17 16:14:37 +00:00
MemWrite( &item->gpuZoneEnd.cpuTime, Profiler::GetTime() );
MemWrite( &item->gpuZoneEnd.thread, GetThreadHandle() );
2018-06-22 13:57:54 +00:00
MemWrite( &item->gpuZoneEnd.queryId, uint16_t( queryId ) );
MemWrite( &item->gpuZoneEnd.context, m_ctx->GetId() );
Profiler::QueueSerialFinish();
2018-06-17 16:14:37 +00:00
}
private:
2020-01-25 16:44:46 +00:00
const bool m_active;
2018-06-17 16:14:37 +00:00
VkCommandBuffer m_cmdbuf;
VkCtx* m_ctx;
2018-06-17 16:14:37 +00:00
};
2020-07-07 18:03:03 +00:00
static inline VkCtx* CreateVkContext( VkPhysicalDevice physdev, VkDevice device, VkQueue queue, VkCommandBuffer cmdbuf, PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT gpdctd, PFN_vkGetCalibratedTimestampsEXT gct )
{
InitRPMallocThread();
auto ctx = (VkCtx*)tracy_malloc( sizeof( VkCtx ) );
2020-07-07 18:03:03 +00:00
new(ctx) VkCtx( physdev, device, queue, cmdbuf, gpdctd, gct );
return ctx;
}
static inline void DestroyVkContext( VkCtx* ctx )
{
ctx->~VkCtx();
tracy_free( ctx );
}
2018-06-17 16:14:37 +00:00
}
using TracyVkCtx = tracy::VkCtx*;
2020-07-07 18:03:03 +00:00
#define TracyVkContext( physdev, device, queue, cmdbuf ) tracy::CreateVkContext( physdev, device, queue, cmdbuf, nullptr, nullptr );
#define TracyVkContextCalibrated( physdev, device, queue, cmdbuf, gpdctd, gct ) tracy::CreateVkContext( physdev, device, queue, cmdbuf, gpdctd, gct );
#define TracyVkDestroy( ctx ) tracy::DestroyVkContext( ctx );
#if defined TRACY_HAS_CALLSTACK && defined TRACY_CALLSTACK
# define TracyVkNamedZone( ctx, varname, cmdbuf, name, active ) static constexpr tracy::SourceLocationData TracyConcat(__tracy_gpu_source_location,__LINE__) { name, __FUNCTION__, __FILE__, (uint32_t)__LINE__, 0 }; tracy::VkCtxScope varname( ctx, &TracyConcat(__tracy_gpu_source_location,__LINE__), cmdbuf, TRACY_CALLSTACK, active );
# define TracyVkNamedZoneC( ctx, varname, cmdbuf, name, color, active ) static constexpr tracy::SourceLocationData TracyConcat(__tracy_gpu_source_location,__LINE__) { name, __FUNCTION__, __FILE__, (uint32_t)__LINE__, color }; tracy::VkCtxScope varname( ctx, &TracyConcat(__tracy_gpu_source_location,__LINE__), cmdbuf, TRACY_CALLSTACK, active );
2020-01-25 16:44:46 +00:00
# define TracyVkZone( ctx, cmdbuf, name ) TracyVkNamedZoneS( ctx, ___tracy_gpu_zone, cmdbuf, name, TRACY_CALLSTACK, true )
# define TracyVkZoneC( ctx, cmdbuf, name, color ) TracyVkNamedZoneCS( ctx, ___tracy_gpu_zone, cmdbuf, name, color, TRACY_CALLSTACK, true )
2021-01-15 20:52:20 +00:00
# define TracyVkZoneTransient( ctx, varname, cmdbuf, name, active ) TracyVkZoneTransientS( ctx, varname, cmdbuf, name, TRACY_CALLSTACK, active )
#else
# define TracyVkNamedZone( ctx, varname, cmdbuf, name, active ) static constexpr tracy::SourceLocationData TracyConcat(__tracy_gpu_source_location,__LINE__) { name, __FUNCTION__, __FILE__, (uint32_t)__LINE__, 0 }; tracy::VkCtxScope varname( ctx, &TracyConcat(__tracy_gpu_source_location,__LINE__), cmdbuf, active );
# define TracyVkNamedZoneC( ctx, varname, cmdbuf, name, color, active ) static constexpr tracy::SourceLocationData TracyConcat(__tracy_gpu_source_location,__LINE__) { name, __FUNCTION__, __FILE__, (uint32_t)__LINE__, color }; tracy::VkCtxScope varname( ctx, &TracyConcat(__tracy_gpu_source_location,__LINE__), cmdbuf, active );
2020-01-25 16:44:46 +00:00
# define TracyVkZone( ctx, cmdbuf, name ) TracyVkNamedZone( ctx, ___tracy_gpu_zone, cmdbuf, name, true )
# define TracyVkZoneC( ctx, cmdbuf, name, color ) TracyVkNamedZoneC( ctx, ___tracy_gpu_zone, cmdbuf, name, color, true )
2021-01-15 20:52:20 +00:00
# define TracyVkZoneTransient( ctx, varname, cmdbuf, name, active ) tracy::VkCtxScope varname( ctx, __LINE__, __FILE__, strlen( __FILE__ ), __FUNCTION__, strlen( __FUNCTION__ ), name, strlen( name ), cmdbuf, active );
#endif
#define TracyVkCollect( ctx, cmdbuf ) ctx->Collect( cmdbuf );
#ifdef TRACY_HAS_CALLSTACK
# define TracyVkNamedZoneS( ctx, varname, cmdbuf, name, depth, active ) static constexpr tracy::SourceLocationData TracyConcat(__tracy_gpu_source_location,__LINE__) { name, __FUNCTION__, __FILE__, (uint32_t)__LINE__, 0 }; tracy::VkCtxScope varname( ctx, &TracyConcat(__tracy_gpu_source_location,__LINE__), cmdbuf, depth, active );
# define TracyVkNamedZoneCS( ctx, varname, cmdbuf, name, color, depth, active ) static constexpr tracy::SourceLocationData TracyConcat(__tracy_gpu_source_location,__LINE__) { name, __FUNCTION__, __FILE__, (uint32_t)__LINE__, color }; tracy::VkCtxScope varname( ctx, &TracyConcat(__tracy_gpu_source_location,__LINE__), cmdbuf, depth, active );
2020-01-25 16:44:46 +00:00
# define TracyVkZoneS( ctx, cmdbuf, name, depth ) TracyVkNamedZoneS( ctx, ___tracy_gpu_zone, cmdbuf, name, depth, true )
# define TracyVkZoneCS( ctx, cmdbuf, name, color, depth ) TracyVkNamedZoneCS( ctx, ___tracy_gpu_zone, cmdbuf, name, color, depth, true )
2021-01-15 20:52:20 +00:00
# define TracyVkZoneTransientS( ctx, varname, cmdbuf, name, depth, active ) tracy::VkCtxScope varname( ctx, __LINE__, __FILE__, strlen( __FILE__ ), __FUNCTION__, strlen( __FUNCTION__ ), name, strlen( name ), cmdbuf, depth, active );
#else
2020-01-25 16:44:46 +00:00
# define TracyVkNamedZoneS( ctx, varname, cmdbuf, name, depth, active ) TracyVkNamedZone( ctx, varname, cmdbuf, name, active )
# define TracyVkNamedZoneCS( ctx, varname, cmdbuf, name, color, depth, active ) TracyVkNamedZoneC( ctx, varname, cmdbuf, name, color, active )
# define TracyVkZoneS( ctx, cmdbuf, name, depth ) TracyVkZone( ctx, cmdbuf, name )
# define TracyVkZoneCS( ctx, cmdbuf, name, color, depth ) TracyVkZoneC( ctx, cmdbuf, name, color )
2021-01-15 20:52:20 +00:00
# define TracyVkZoneTransientS( ctx, varname, cmdbuf, name, depth, active ) TracyVkZoneTransient( ctx, varname, cmdbuf, name, active )
#endif
2018-06-17 16:14:37 +00:00
#endif
#endif