mirror of
https://github.com/wolfpld/tracy.git
synced 2024-11-27 08:14:34 +00:00
a56c47a6a0
This saves us a non-inlineable function call. Thread local block is accessed anyway, since we need to get the token, so we already have the pointer and don't need to get it a second time (which is done inside Windows' GetCurrentThreadId()). We also don't need to store the thread id in ScopedZone anymore, as it was a micro-optimization to save us the second GetThreadHandle() call. This change has a measurable effect of reducing enqueue time from ~10 to ~8 ns. A further optimization would be to completely skip thread handle retrieval during zone capture and do it instead on retrieval of data from the queue. Since each thread has its own producer ("token"), the thread handle should be accessible during the dequeue operation. This is a much more invasive change, that would require a) modification of the queue, b) additional processing of dequeued data to inject the thread handle.
148 lines
4.8 KiB
C++
148 lines
4.8 KiB
C++
#ifndef __TRACYSCOPED_HPP__
|
|
#define __TRACYSCOPED_HPP__
|
|
|
|
#include <stdint.h>
|
|
#include <string.h>
|
|
|
|
#include "../common/TracySystem.hpp"
|
|
#include "../common/TracyAlign.hpp"
|
|
#include "../common/TracyAlloc.hpp"
|
|
#include "TracyProfiler.hpp"
|
|
|
|
namespace tracy
|
|
{
|
|
|
|
class ScopedZone
|
|
{
|
|
public:
|
|
tracy_force_inline ScopedZone( const SourceLocationData* srcloc, bool is_active = true )
|
|
#ifdef TRACY_ON_DEMAND
|
|
: m_active( is_active && GetProfiler().IsConnected() )
|
|
, m_connectionId( GetProfiler().ConnectionId() )
|
|
#else
|
|
: m_active( is_active )
|
|
#endif
|
|
{
|
|
if( !m_active ) return;
|
|
Magic magic;
|
|
const auto thread = GetThreadHandle();
|
|
auto token = GetToken();
|
|
auto& tail = token->get_tail_index();
|
|
auto item = token->enqueue_begin<tracy::moodycamel::CanAlloc>( magic );
|
|
MemWrite( &item->hdr.type, QueueType::ZoneBegin );
|
|
#ifdef TRACY_RDTSCP_OPT
|
|
MemWrite( &item->zoneBegin.time, Profiler::GetTime( item->zoneBegin.cpu ) );
|
|
#else
|
|
uint32_t cpu;
|
|
MemWrite( &item->zoneBegin.time, Profiler::GetTime( cpu ) );
|
|
MemWrite( &item->zoneBegin.cpu, cpu );
|
|
#endif
|
|
MemWrite( &item->zoneBegin.thread, thread );
|
|
MemWrite( &item->zoneBegin.srcloc, (uint64_t)srcloc );
|
|
tail.store( magic + 1, std::memory_order_release );
|
|
}
|
|
|
|
tracy_force_inline ScopedZone( const SourceLocationData* srcloc, int depth, bool is_active = true )
|
|
#ifdef TRACY_ON_DEMAND
|
|
: m_active( is_active && GetProfiler().IsConnected() )
|
|
, m_connectionId( GetProfiler().ConnectionId() )
|
|
#else
|
|
: m_active( is_active )
|
|
#endif
|
|
{
|
|
if( !m_active ) return;
|
|
const auto thread = GetThreadHandle();
|
|
Magic magic;
|
|
auto token = GetToken();
|
|
auto& tail = token->get_tail_index();
|
|
auto item = token->enqueue_begin<tracy::moodycamel::CanAlloc>( magic );
|
|
MemWrite( &item->hdr.type, QueueType::ZoneBeginCallstack );
|
|
#ifdef TRACY_RDTSCP_OPT
|
|
MemWrite( &item->zoneBegin.time, Profiler::GetTime( item->zoneBegin.cpu ) );
|
|
#else
|
|
uint32_t cpu;
|
|
MemWrite( &item->zoneBegin.time, Profiler::GetTime( cpu ) );
|
|
MemWrite( &item->zoneBegin.cpu, cpu );
|
|
#endif
|
|
MemWrite( &item->zoneBegin.thread, thread );
|
|
MemWrite( &item->zoneBegin.srcloc, (uint64_t)srcloc );
|
|
tail.store( magic + 1, std::memory_order_release );
|
|
|
|
GetProfiler().SendCallstack( depth, thread );
|
|
}
|
|
|
|
tracy_force_inline ~ScopedZone()
|
|
{
|
|
if( !m_active ) return;
|
|
#ifdef TRACY_ON_DEMAND
|
|
if( GetProfiler().ConnectionId() != m_connectionId ) return;
|
|
#endif
|
|
Magic magic;
|
|
const auto thread = GetThreadHandle();
|
|
auto token = GetToken();
|
|
auto& tail = token->get_tail_index();
|
|
auto item = token->enqueue_begin<tracy::moodycamel::CanAlloc>( magic );
|
|
MemWrite( &item->hdr.type, QueueType::ZoneEnd );
|
|
#ifdef TRACY_RDTSCP_OPT
|
|
MemWrite( &item->zoneEnd.time, Profiler::GetTime( item->zoneEnd.cpu ) );
|
|
#else
|
|
uint32_t cpu;
|
|
MemWrite( &item->zoneEnd.time, Profiler::GetTime( cpu ) );
|
|
MemWrite( &item->zoneEnd.cpu, cpu );
|
|
#endif
|
|
MemWrite( &item->zoneEnd.thread, thread );
|
|
tail.store( magic + 1, std::memory_order_release );
|
|
}
|
|
|
|
tracy_force_inline void Text( const char* txt, size_t size )
|
|
{
|
|
if( !m_active ) return;
|
|
#ifdef TRACY_ON_DEMAND
|
|
if( GetProfiler().ConnectionId() != m_connectionId ) return;
|
|
#endif
|
|
Magic magic;
|
|
const auto thread = GetThreadHandle();
|
|
auto token = GetToken();
|
|
auto ptr = (char*)tracy_malloc( size+1 );
|
|
memcpy( ptr, txt, size );
|
|
ptr[size] = '\0';
|
|
auto& tail = token->get_tail_index();
|
|
auto item = token->enqueue_begin<tracy::moodycamel::CanAlloc>( magic );
|
|
MemWrite( &item->hdr.type, QueueType::ZoneText );
|
|
MemWrite( &item->zoneText.thread, thread );
|
|
MemWrite( &item->zoneText.text, (uint64_t)ptr );
|
|
tail.store( magic + 1, std::memory_order_release );
|
|
}
|
|
|
|
tracy_force_inline void Name( const char* txt, size_t size )
|
|
{
|
|
if( !m_active ) return;
|
|
#ifdef TRACY_ON_DEMAND
|
|
if( GetProfiler().ConnectionId() != m_connectionId ) return;
|
|
#endif
|
|
Magic magic;
|
|
const auto thread = GetThreadHandle();
|
|
auto token = GetToken();
|
|
auto ptr = (char*)tracy_malloc( size+1 );
|
|
memcpy( ptr, txt, size );
|
|
ptr[size] = '\0';
|
|
auto& tail = token->get_tail_index();
|
|
auto item = token->enqueue_begin<tracy::moodycamel::CanAlloc>( magic );
|
|
MemWrite( &item->hdr.type, QueueType::ZoneName );
|
|
MemWrite( &item->zoneText.thread, thread );
|
|
MemWrite( &item->zoneText.text, (uint64_t)ptr );
|
|
tail.store( magic + 1, std::memory_order_release );
|
|
}
|
|
|
|
private:
|
|
const bool m_active;
|
|
|
|
#ifdef TRACY_ON_DEMAND
|
|
uint64_t m_connectionId;
|
|
#endif
|
|
};
|
|
|
|
}
|
|
|
|
#endif
|