2019-06-22 11:40:00 +00:00
|
|
|
#ifdef _MSC_VER
|
2020-09-23 13:04:59 +00:00
|
|
|
# pragma warning( disable: 4244 4267 ) // conversion from don't care to whatever, possible loss of data
|
2019-06-22 11:40:00 +00:00
|
|
|
#endif
|
|
|
|
|
2019-01-19 11:03:30 +00:00
|
|
|
#ifdef _WIN32
|
2018-12-18 15:52:05 +00:00
|
|
|
# include <malloc.h>
|
|
|
|
#else
|
|
|
|
# include <alloca.h>
|
|
|
|
#endif
|
|
|
|
|
|
|
|
#include <cctype>
|
2018-02-22 11:10:15 +00:00
|
|
|
#include <chrono>
|
2020-11-23 21:58:12 +00:00
|
|
|
#include <math.h>
|
2018-06-22 14:37:54 +00:00
|
|
|
#include <string.h>
|
2020-05-05 09:26:22 +00:00
|
|
|
|
|
|
|
#ifdef __MINGW32__
|
|
|
|
# define __STDC_FORMAT_MACROS
|
|
|
|
#endif
|
2019-01-20 18:11:48 +00:00
|
|
|
#include <inttypes.h>
|
2020-05-23 13:53:18 +00:00
|
|
|
#include <sys/stat.h>
|
2018-02-13 13:57:47 +00:00
|
|
|
|
2021-01-26 19:35:57 +00:00
|
|
|
#include <capstone.h>
|
2020-04-01 19:43:03 +00:00
|
|
|
|
2021-05-15 14:47:47 +00:00
|
|
|
#define ZDICT_STATIC_LINKING_ONLY
|
|
|
|
#include "../zstd/zdict.h"
|
|
|
|
|
2022-07-17 11:41:40 +00:00
|
|
|
#include "../public/common/TracyProtocol.hpp"
|
|
|
|
#include "../public/common/TracySystem.hpp"
|
|
|
|
#include "../public/common/TracyYield.hpp"
|
|
|
|
#include "../public/common/TracyStackFrames.hpp"
|
2018-02-13 13:57:47 +00:00
|
|
|
#include "TracyFileRead.hpp"
|
|
|
|
#include "TracyFileWrite.hpp"
|
2020-01-20 22:16:33 +00:00
|
|
|
#include "TracySort.hpp"
|
2019-09-20 21:03:12 +00:00
|
|
|
#include "TracyTaskDispatch.hpp"
|
2018-07-29 12:20:44 +00:00
|
|
|
#include "TracyVersion.hpp"
|
2018-02-13 13:57:47 +00:00
|
|
|
#include "TracyWorker.hpp"
|
|
|
|
|
|
|
|
namespace tracy
|
|
|
|
{
|
|
|
|
|
2020-04-01 22:31:53 +00:00
|
|
|
static tracy_force_inline uint64_t PackFileLine( uint32_t fileIdx, uint32_t line )
|
|
|
|
{
|
|
|
|
return ( uint64_t( fileIdx ) << 32 ) | line;
|
|
|
|
}
|
|
|
|
|
|
|
|
static tracy_force_inline uint32_t UnpackFileLine( uint64_t packed, uint32_t& line )
|
|
|
|
{
|
|
|
|
line = packed & 0xFFFFFFFF;
|
|
|
|
return packed >> 32;
|
|
|
|
}
|
|
|
|
|
2020-05-23 13:53:18 +00:00
|
|
|
static bool SourceFileValid( const char* fn, uint64_t olderThan )
|
|
|
|
{
|
|
|
|
struct stat buf;
|
|
|
|
if( stat( fn, &buf ) == 0 && ( buf.st_mode & S_IFREG ) != 0 )
|
|
|
|
{
|
|
|
|
return (uint64_t)buf.st_mtime < olderThan;
|
|
|
|
}
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2020-04-01 22:31:53 +00:00
|
|
|
|
2018-07-29 12:20:44 +00:00
|
|
|
static const uint8_t FileHeader[8] { 't', 'r', 'a', 'c', 'y', Version::Major, Version::Minor, Version::Patch };
|
2018-04-21 12:18:13 +00:00
|
|
|
enum { FileHeaderMagic = 5 };
|
2018-07-29 12:20:44 +00:00
|
|
|
static const int CurrentVersion = FileVersion( Version::Major, Version::Minor, Version::Patch );
|
2022-04-18 11:59:48 +00:00
|
|
|
static const int MinSupportedVersion = FileVersion( 0, 7, 0 );
|
2018-04-21 12:18:13 +00:00
|
|
|
|
2018-04-21 13:42:08 +00:00
|
|
|
|
|
|
|
static void UpdateLockCountLockable( LockMap& lockmap, size_t pos )
|
|
|
|
{
|
|
|
|
auto& timeline = lockmap.timeline;
|
2019-05-12 14:17:17 +00:00
|
|
|
bool isContended = lockmap.isContended;
|
2018-04-21 13:42:08 +00:00
|
|
|
uint8_t lockingThread;
|
|
|
|
uint8_t lockCount;
|
|
|
|
uint64_t waitList;
|
|
|
|
|
|
|
|
if( pos == 0 )
|
|
|
|
{
|
|
|
|
lockingThread = 0;
|
|
|
|
lockCount = 0;
|
|
|
|
waitList = 0;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2019-03-16 13:18:43 +00:00
|
|
|
const auto& tl = timeline[pos-1];
|
|
|
|
lockingThread = tl.lockingThread;
|
|
|
|
lockCount = tl.lockCount;
|
|
|
|
waitList = tl.waitList;
|
2018-04-21 13:42:08 +00:00
|
|
|
}
|
|
|
|
const auto end = timeline.size();
|
|
|
|
|
|
|
|
while( pos != end )
|
|
|
|
{
|
2019-03-16 13:18:43 +00:00
|
|
|
auto& tl = timeline[pos];
|
|
|
|
const auto tbit = uint64_t( 1 ) << tl.ptr->thread;
|
|
|
|
switch( (LockEvent::Type)tl.ptr->type )
|
2018-04-21 13:42:08 +00:00
|
|
|
{
|
|
|
|
case LockEvent::Type::Wait:
|
|
|
|
waitList |= tbit;
|
|
|
|
break;
|
|
|
|
case LockEvent::Type::Obtain:
|
|
|
|
assert( lockCount < std::numeric_limits<uint8_t>::max() );
|
|
|
|
assert( ( waitList & tbit ) != 0 );
|
|
|
|
waitList &= ~tbit;
|
2019-03-16 13:18:43 +00:00
|
|
|
lockingThread = tl.ptr->thread;
|
2018-04-21 13:42:08 +00:00
|
|
|
lockCount++;
|
|
|
|
break;
|
|
|
|
case LockEvent::Type::Release:
|
|
|
|
assert( lockCount > 0 );
|
|
|
|
lockCount--;
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
2019-03-16 13:18:43 +00:00
|
|
|
tl.lockingThread = lockingThread;
|
|
|
|
tl.waitList = waitList;
|
|
|
|
tl.lockCount = lockCount;
|
2019-05-12 14:17:17 +00:00
|
|
|
if( !isContended ) isContended = lockCount != 0 && waitList != 0;
|
2018-04-21 13:42:08 +00:00
|
|
|
pos++;
|
|
|
|
}
|
2019-05-12 14:17:17 +00:00
|
|
|
|
|
|
|
lockmap.isContended = isContended;
|
2018-04-21 13:42:08 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
static void UpdateLockCountSharedLockable( LockMap& lockmap, size_t pos )
|
|
|
|
{
|
|
|
|
auto& timeline = lockmap.timeline;
|
2019-05-12 14:17:17 +00:00
|
|
|
bool isContended = lockmap.isContended;
|
2018-04-21 13:42:08 +00:00
|
|
|
uint8_t lockingThread;
|
|
|
|
uint8_t lockCount;
|
|
|
|
uint64_t waitShared;
|
|
|
|
uint64_t waitList;
|
|
|
|
uint64_t sharedList;
|
|
|
|
|
|
|
|
if( pos == 0 )
|
|
|
|
{
|
|
|
|
lockingThread = 0;
|
|
|
|
lockCount = 0;
|
|
|
|
waitShared = 0;
|
|
|
|
waitList = 0;
|
|
|
|
sharedList = 0;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2019-03-16 13:18:43 +00:00
|
|
|
const auto& tl = timeline[pos-1];
|
2019-11-02 14:40:06 +00:00
|
|
|
const auto tlp = (const LockEventShared*)(const LockEvent*)tl.ptr;
|
2019-03-16 13:18:43 +00:00
|
|
|
lockingThread = tl.lockingThread;
|
|
|
|
lockCount = tl.lockCount;
|
|
|
|
waitShared = tlp->waitShared;
|
|
|
|
waitList = tl.waitList;
|
|
|
|
sharedList = tlp->sharedList;
|
2018-04-21 13:42:08 +00:00
|
|
|
}
|
|
|
|
const auto end = timeline.size();
|
|
|
|
|
|
|
|
// ObtainShared and ReleaseShared should assert on lockCount == 0, but
|
2019-05-12 13:59:53 +00:00
|
|
|
// due to the async retrieval of data from threads that's not possible.
|
2018-04-21 13:42:08 +00:00
|
|
|
while( pos != end )
|
|
|
|
{
|
2019-03-16 13:18:43 +00:00
|
|
|
auto& tl = timeline[pos];
|
2019-11-02 14:40:06 +00:00
|
|
|
const auto tlp = (LockEventShared*)(LockEvent*)tl.ptr;
|
2019-03-16 13:18:43 +00:00
|
|
|
const auto tbit = uint64_t( 1 ) << tlp->thread;
|
|
|
|
switch( (LockEvent::Type)tlp->type )
|
2018-04-21 13:42:08 +00:00
|
|
|
{
|
|
|
|
case LockEvent::Type::Wait:
|
|
|
|
waitList |= tbit;
|
|
|
|
break;
|
|
|
|
case LockEvent::Type::WaitShared:
|
|
|
|
waitShared |= tbit;
|
|
|
|
break;
|
|
|
|
case LockEvent::Type::Obtain:
|
|
|
|
assert( lockCount < std::numeric_limits<uint8_t>::max() );
|
|
|
|
assert( ( waitList & tbit ) != 0 );
|
|
|
|
waitList &= ~tbit;
|
2019-03-16 13:18:43 +00:00
|
|
|
lockingThread = tlp->thread;
|
2018-04-21 13:42:08 +00:00
|
|
|
lockCount++;
|
|
|
|
break;
|
|
|
|
case LockEvent::Type::Release:
|
|
|
|
assert( lockCount > 0 );
|
|
|
|
lockCount--;
|
|
|
|
break;
|
|
|
|
case LockEvent::Type::ObtainShared:
|
|
|
|
assert( ( waitShared & tbit ) != 0 );
|
|
|
|
assert( ( sharedList & tbit ) == 0 );
|
|
|
|
waitShared &= ~tbit;
|
|
|
|
sharedList |= tbit;
|
|
|
|
break;
|
|
|
|
case LockEvent::Type::ReleaseShared:
|
|
|
|
assert( ( sharedList & tbit ) != 0 );
|
|
|
|
sharedList &= ~tbit;
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
2019-03-16 13:18:43 +00:00
|
|
|
tl.lockingThread = lockingThread;
|
|
|
|
tlp->waitShared = waitShared;
|
|
|
|
tl.waitList = waitList;
|
|
|
|
tlp->sharedList = sharedList;
|
|
|
|
tl.lockCount = lockCount;
|
2019-05-12 14:17:17 +00:00
|
|
|
if( !isContended ) isContended = ( lockCount != 0 && ( waitList != 0 || waitShared != 0 ) ) || ( sharedList != 0 && waitList != 0 );
|
2018-04-21 13:42:08 +00:00
|
|
|
pos++;
|
|
|
|
}
|
2019-05-12 14:17:17 +00:00
|
|
|
|
|
|
|
lockmap.isContended = isContended;
|
2018-04-21 13:42:08 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
static inline void UpdateLockCount( LockMap& lockmap, size_t pos )
|
|
|
|
{
|
|
|
|
if( lockmap.type == LockType::Lockable )
|
|
|
|
{
|
|
|
|
UpdateLockCountLockable( lockmap, pos );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
UpdateLockCountSharedLockable( lockmap, pos );
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-13 23:17:50 +00:00
|
|
|
static tracy_force_inline void WriteTimeOffset( FileWrite& f, int64_t& refTime, int64_t time )
|
2018-12-30 22:06:03 +00:00
|
|
|
{
|
|
|
|
int64_t timeOffset = time - refTime;
|
|
|
|
refTime += timeOffset;
|
|
|
|
f.Write( &timeOffset, sizeof( timeOffset ) );
|
|
|
|
}
|
|
|
|
|
2019-02-13 23:17:50 +00:00
|
|
|
static tracy_force_inline int64_t ReadTimeOffset( FileRead& f, int64_t& refTime )
|
2018-12-30 22:06:03 +00:00
|
|
|
{
|
|
|
|
int64_t timeOffset;
|
|
|
|
f.Read( timeOffset );
|
|
|
|
refTime += timeOffset;
|
|
|
|
return refTime;
|
|
|
|
}
|
2018-04-21 13:42:08 +00:00
|
|
|
|
2019-11-15 21:50:08 +00:00
|
|
|
static tracy_force_inline void UpdateLockRange( LockMap& lockmap, const LockEvent& ev, int64_t lt )
|
2019-03-16 01:28:32 +00:00
|
|
|
{
|
|
|
|
auto& range = lockmap.range[ev.thread];
|
|
|
|
if( range.start > lt ) range.start = lt;
|
|
|
|
if( range.end < lt ) range.end = lt;
|
|
|
|
}
|
|
|
|
|
2021-06-04 13:16:44 +00:00
|
|
|
template<size_t U>
|
2022-04-01 16:46:46 +00:00
|
|
|
static uint64_t ReadHwSampleVec( FileRead& f, SortedVector<Int48, Int48Sort>& vec, Slab<U>& slab )
|
2021-06-04 11:38:45 +00:00
|
|
|
{
|
|
|
|
uint64_t sz;
|
|
|
|
f.Read( sz );
|
|
|
|
if( sz != 0 )
|
|
|
|
{
|
|
|
|
int64_t refTime = 0;
|
|
|
|
vec.reserve_exact( sz, slab );
|
|
|
|
for( uint64_t i=0; i<sz; i++ )
|
|
|
|
{
|
|
|
|
vec[i] = ReadTimeOffset( f, refTime );
|
|
|
|
}
|
|
|
|
}
|
2022-04-01 16:46:46 +00:00
|
|
|
return sz;
|
2021-06-04 11:38:45 +00:00
|
|
|
}
|
|
|
|
|
2022-03-30 22:02:02 +00:00
|
|
|
static bool IsQueryPrio( ServerQuery type )
|
|
|
|
{
|
2022-04-18 11:39:49 +00:00
|
|
|
return type < ServerQuery::ServerQueryDisconnect;
|
2022-03-30 22:02:02 +00:00
|
|
|
}
|
|
|
|
|
2021-06-04 11:38:45 +00:00
|
|
|
|
2018-07-28 15:59:17 +00:00
|
|
|
LoadProgress Worker::s_loadProgress;
|
|
|
|
|
2020-10-02 16:51:54 +00:00
|
|
|
Worker::Worker( const char* addr, uint16_t port )
|
2018-02-13 13:57:47 +00:00
|
|
|
: m_addr( addr )
|
2019-09-21 13:43:01 +00:00
|
|
|
, m_port( port )
|
2018-02-13 13:57:47 +00:00
|
|
|
, m_hasData( false )
|
|
|
|
, m_stream( LZ4_createStreamDecode() )
|
|
|
|
, m_buffer( new char[TargetFrameSize*3 + 1] )
|
|
|
|
, m_bufferOffset( 0 )
|
2021-10-16 14:15:19 +00:00
|
|
|
, m_inconsistentSamples( false )
|
2018-02-13 13:57:47 +00:00
|
|
|
, m_pendingStrings( 0 )
|
|
|
|
, m_pendingThreads( 0 )
|
2021-11-03 17:57:30 +00:00
|
|
|
, m_pendingFibers( 0 )
|
2019-08-16 17:22:23 +00:00
|
|
|
, m_pendingExternalNames( 0 )
|
2018-02-13 13:57:47 +00:00
|
|
|
, m_pendingSourceLocation( 0 )
|
2018-06-20 21:42:00 +00:00
|
|
|
, m_pendingCallstackFrames( 0 )
|
2019-01-20 18:11:48 +00:00
|
|
|
, m_pendingCallstackSubframes( 0 )
|
2020-04-01 20:37:19 +00:00
|
|
|
, m_pendingCodeInformation( 0 )
|
2021-11-25 21:35:43 +00:00
|
|
|
, m_pendingSymbolCode( 0 )
|
2019-01-20 18:11:48 +00:00
|
|
|
, m_callstackFrameStaging( nullptr )
|
2018-07-29 13:33:48 +00:00
|
|
|
, m_traceVersion( CurrentVersion )
|
2019-01-06 18:09:50 +00:00
|
|
|
, m_loadTime( 0 )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
|
|
|
m_data.sourceLocationExpand.push_back( 0 );
|
2019-08-19 20:56:02 +00:00
|
|
|
m_data.localThreadCompress.InitZero();
|
2018-06-19 19:52:54 +00:00
|
|
|
m_data.callstackPayload.push_back( nullptr );
|
2020-01-26 14:57:55 +00:00
|
|
|
m_data.zoneExtra.push_back( ZoneExtra {} );
|
2020-08-20 16:50:20 +00:00
|
|
|
m_data.symbolLocInline.push_back( std::numeric_limits<uint64_t>::max() );
|
2020-09-23 13:53:17 +00:00
|
|
|
m_data.memory = m_slab.AllocInit<MemData>();
|
|
|
|
m_data.memNameMap.emplace( 0, m_data.memory );
|
2018-02-13 13:57:47 +00:00
|
|
|
|
2020-08-15 00:14:29 +00:00
|
|
|
memset( (char*)m_gpuCtxMap, 0, sizeof( m_gpuCtxMap ) );
|
2018-06-22 13:14:44 +00:00
|
|
|
|
2018-04-30 01:54:09 +00:00
|
|
|
#ifndef TRACY_NO_STATISTICS
|
|
|
|
m_data.sourceLocationZonesReady = true;
|
2022-01-23 14:46:30 +00:00
|
|
|
m_data.gpuSourceLocationZonesReady = true;
|
2020-02-27 15:48:50 +00:00
|
|
|
m_data.callstackSamplesReady = true;
|
2020-03-10 20:06:38 +00:00
|
|
|
m_data.ghostZonesReady = true;
|
2019-11-04 23:40:41 +00:00
|
|
|
m_data.ctxUsageReady = true;
|
2020-08-07 12:53:39 +00:00
|
|
|
m_data.symbolSamplesReady = true;
|
2018-04-30 01:54:09 +00:00
|
|
|
#endif
|
|
|
|
|
2019-08-14 00:26:54 +00:00
|
|
|
m_thread = std::thread( [this] { SetThreadName( "Tracy Worker" ); Exec(); } );
|
2019-10-28 21:45:10 +00:00
|
|
|
m_threadNet = std::thread( [this] { SetThreadName( "Tracy Network" ); Network(); } );
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
2021-01-22 02:32:03 +00:00
|
|
|
Worker::Worker( const char* name, const char* program, const std::vector<ImportEventTimeline>& timeline, const std::vector<ImportEventMessages>& messages, const std::vector<ImportEventPlots>& plots, const std::unordered_map<uint64_t, std::string>& threadNames )
|
2019-12-16 17:55:02 +00:00
|
|
|
: m_hasData( true )
|
2020-01-28 20:56:56 +00:00
|
|
|
, m_delay( 0 )
|
|
|
|
, m_resolution( 0 )
|
2021-01-22 02:32:03 +00:00
|
|
|
, m_captureName( name )
|
2019-12-16 17:55:02 +00:00
|
|
|
, m_captureProgram( program )
|
2019-12-16 19:42:24 +00:00
|
|
|
, m_captureTime( 0 )
|
2021-01-31 16:51:16 +00:00
|
|
|
, m_executableTime( 0 )
|
2019-12-16 19:42:24 +00:00
|
|
|
, m_pid( 0 )
|
2020-02-25 22:46:16 +00:00
|
|
|
, m_samplingPeriod( 0 )
|
2020-01-28 20:56:56 +00:00
|
|
|
, m_stream( nullptr )
|
|
|
|
, m_buffer( nullptr )
|
2022-04-26 19:28:26 +00:00
|
|
|
, m_inconsistentSamples( false )
|
2020-01-28 20:56:56 +00:00
|
|
|
, m_traceVersion( CurrentVersion )
|
2019-12-16 17:55:02 +00:00
|
|
|
{
|
|
|
|
m_data.sourceLocationExpand.push_back( 0 );
|
|
|
|
m_data.localThreadCompress.InitZero();
|
|
|
|
m_data.callstackPayload.push_back( nullptr );
|
2020-01-26 14:57:55 +00:00
|
|
|
m_data.zoneExtra.push_back( ZoneExtra {} );
|
2020-08-20 16:50:20 +00:00
|
|
|
m_data.symbolLocInline.push_back( std::numeric_limits<uint64_t>::max() );
|
2020-09-23 13:53:17 +00:00
|
|
|
m_data.memory = m_slab.AllocInit<MemData>();
|
|
|
|
m_data.memNameMap.emplace( 0, m_data.memory );
|
2019-12-16 17:55:02 +00:00
|
|
|
|
|
|
|
m_data.lastTime = 0;
|
|
|
|
if( !timeline.empty() )
|
|
|
|
{
|
|
|
|
m_data.lastTime = timeline.back().timestamp;
|
|
|
|
}
|
|
|
|
if( !messages.empty() )
|
|
|
|
{
|
2020-03-01 00:48:20 +00:00
|
|
|
if( m_data.lastTime < (int64_t)messages.back().timestamp ) m_data.lastTime = messages.back().timestamp;
|
2019-12-16 17:55:02 +00:00
|
|
|
}
|
2020-06-20 13:30:06 +00:00
|
|
|
if( !plots.empty() )
|
|
|
|
{
|
|
|
|
for( auto& v : plots )
|
|
|
|
{
|
|
|
|
if( m_data.lastTime < v.data.back().first ) m_data.lastTime = v.data.back().first;
|
|
|
|
}
|
|
|
|
}
|
2019-12-16 17:55:02 +00:00
|
|
|
|
|
|
|
for( auto& v : timeline )
|
|
|
|
{
|
|
|
|
if( !v.isEnd )
|
|
|
|
{
|
2022-06-16 13:29:39 +00:00
|
|
|
SourceLocation srcloc {{
|
2019-12-16 17:55:02 +00:00
|
|
|
StringRef(),
|
|
|
|
StringRef( StringRef::Idx, StoreString( v.name.c_str(), v.name.size() ).idx ),
|
2021-05-10 23:35:26 +00:00
|
|
|
StringRef( StringRef::Idx, StoreString( v.locFile.c_str(), v.locFile.size() ).idx ),
|
|
|
|
v.locLine,
|
2019-12-16 17:55:02 +00:00
|
|
|
0
|
2022-06-16 13:29:39 +00:00
|
|
|
}};
|
2019-12-16 17:55:02 +00:00
|
|
|
int key;
|
|
|
|
auto it = m_data.sourceLocationPayloadMap.find( &srcloc );
|
|
|
|
if( it == m_data.sourceLocationPayloadMap.end() )
|
|
|
|
{
|
|
|
|
auto slptr = m_slab.Alloc<SourceLocation>();
|
|
|
|
memcpy( slptr, &srcloc, sizeof( srcloc ) );
|
|
|
|
uint32_t idx = m_data.sourceLocationPayload.size();
|
|
|
|
m_data.sourceLocationPayloadMap.emplace( slptr, idx );
|
|
|
|
m_data.sourceLocationPayload.push_back( slptr );
|
|
|
|
key = -int16_t( idx + 1 );
|
|
|
|
#ifndef TRACY_NO_STATISTICS
|
|
|
|
auto res = m_data.sourceLocationZones.emplace( key, SourceLocationZones() );
|
|
|
|
m_data.srclocZonesLast.first = key;
|
|
|
|
m_data.srclocZonesLast.second = &res.first->second;
|
|
|
|
|
|
|
|
#else
|
|
|
|
auto res = m_data.sourceLocationZonesCnt.emplace( key, 0 );
|
|
|
|
m_data.srclocCntLast.first = key;
|
|
|
|
m_data.srclocCntLast.second = &res.first->second;
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
key = -int16_t( it->second + 1 );
|
|
|
|
}
|
|
|
|
|
|
|
|
auto zone = AllocZoneEvent();
|
2020-02-12 19:16:14 +00:00
|
|
|
zone->SetStartSrcLoc( v.timestamp, key );
|
2019-12-16 17:55:02 +00:00
|
|
|
zone->SetEnd( -1 );
|
|
|
|
zone->SetChild( -1 );
|
|
|
|
|
2020-05-12 09:44:36 +00:00
|
|
|
if( !v.text.empty() )
|
|
|
|
{
|
|
|
|
auto& extra = RequestZoneExtra( *zone );
|
|
|
|
extra.text = StringIdx( StoreString( v.text.c_str(), v.text.size() ).idx );
|
|
|
|
}
|
|
|
|
|
2021-11-06 17:55:29 +00:00
|
|
|
if( m_threadCtx != v.tid )
|
|
|
|
{
|
|
|
|
m_threadCtx = v.tid;
|
|
|
|
m_threadCtxData = NoticeThread( v.tid );
|
|
|
|
}
|
|
|
|
NewZone( zone );
|
2019-12-16 17:55:02 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
auto td = NoticeThread( v.tid );
|
2021-06-23 18:43:46 +00:00
|
|
|
if( td->zoneIdStack.empty() ) continue;
|
2020-02-23 14:53:23 +00:00
|
|
|
td->zoneIdStack.pop_back();
|
2019-12-16 17:55:02 +00:00
|
|
|
auto& stack = td->stack;
|
|
|
|
auto zone = stack.back_and_pop();
|
2021-06-05 18:13:57 +00:00
|
|
|
td->DecStackCount( zone->SrcLoc() );
|
2019-12-16 17:55:02 +00:00
|
|
|
zone->SetEnd( v.timestamp );
|
2020-01-23 18:03:03 +00:00
|
|
|
|
|
|
|
#ifndef TRACY_NO_STATISTICS
|
2021-02-07 18:35:53 +00:00
|
|
|
ZoneThreadData ztd;
|
2020-01-23 18:03:03 +00:00
|
|
|
ztd.SetZone( zone );
|
|
|
|
ztd.SetThread( CompressThread( v.tid ) );
|
2021-02-07 18:35:53 +00:00
|
|
|
auto slz = GetSourceLocationZones( zone->SrcLoc() );
|
|
|
|
slz->zones.push_back( ztd );
|
2020-01-23 18:03:03 +00:00
|
|
|
#else
|
|
|
|
CountZoneStatistics( zone );
|
|
|
|
#endif
|
2019-12-16 17:55:02 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for( auto& v : messages )
|
|
|
|
{
|
|
|
|
auto msg = m_slab.Alloc<MessageData>();
|
|
|
|
msg->time = v.timestamp;
|
|
|
|
msg->ref = StringRef( StringRef::Type::Idx, StoreString( v.message.c_str(), v.message.size() ).idx );
|
|
|
|
msg->thread = CompressThread( v.tid );
|
|
|
|
msg->color = 0xFFFFFFFF;
|
|
|
|
msg->callstack.SetVal( 0 );
|
2021-09-23 16:08:03 +00:00
|
|
|
|
|
|
|
if( m_threadCtx != v.tid )
|
|
|
|
{
|
|
|
|
m_threadCtx = v.tid;
|
|
|
|
m_threadCtxData = nullptr;
|
|
|
|
}
|
2019-12-16 17:55:02 +00:00
|
|
|
InsertMessageData( msg );
|
|
|
|
}
|
|
|
|
|
2020-06-20 13:30:06 +00:00
|
|
|
for( auto& v : plots )
|
|
|
|
{
|
|
|
|
uint64_t nptr = (uint64_t)&v.name;
|
|
|
|
auto it = m_data.strings.find( nptr );
|
|
|
|
if( it == m_data.strings.end() )
|
|
|
|
{
|
|
|
|
const auto sl = StoreString( v.name.c_str(), v.name.size() );
|
|
|
|
m_data.strings.emplace( nptr, sl.ptr );
|
|
|
|
}
|
|
|
|
|
|
|
|
auto plot = m_slab.AllocInit<PlotData>();
|
|
|
|
plot->name = nptr;
|
|
|
|
plot->type = PlotType::User;
|
|
|
|
plot->format = v.format;
|
|
|
|
|
2021-10-17 11:07:37 +00:00
|
|
|
double sum = 0;
|
2020-06-20 21:57:30 +00:00
|
|
|
double min = v.data.begin()->second;
|
|
|
|
double max = v.data.begin()->second;
|
2020-06-20 13:30:06 +00:00
|
|
|
plot->data.reserve_exact( v.data.size(), m_slab );
|
|
|
|
size_t idx = 0;
|
|
|
|
for( auto& p : v.data )
|
|
|
|
{
|
|
|
|
plot->data[idx].time.SetVal( p.first );
|
|
|
|
plot->data[idx].val = p.second;
|
|
|
|
idx++;
|
|
|
|
if( min > p.second ) min = p.second;
|
|
|
|
else if( max < p.second ) max = p.second;
|
2021-10-17 11:07:37 +00:00
|
|
|
sum += p.second;
|
2020-06-20 13:30:06 +00:00
|
|
|
}
|
|
|
|
plot->min = min;
|
|
|
|
plot->max = max;
|
2021-10-17 11:07:37 +00:00
|
|
|
plot->sum = sum;
|
2020-06-20 13:30:06 +00:00
|
|
|
|
|
|
|
m_data.plots.Data().push_back( plot );
|
|
|
|
}
|
|
|
|
|
2019-12-16 17:55:02 +00:00
|
|
|
for( auto& t : m_threadMap )
|
|
|
|
{
|
2021-01-22 01:07:31 +00:00
|
|
|
auto name = threadNames.find(t.first);
|
2021-05-15 11:03:42 +00:00
|
|
|
if( name != threadNames.end() )
|
2021-01-22 01:07:31 +00:00
|
|
|
{
|
|
|
|
char buf[128];
|
2021-05-15 11:08:51 +00:00
|
|
|
int len;
|
|
|
|
if( t.first <= std::numeric_limits<uint32_t>::max() )
|
|
|
|
{
|
|
|
|
len = snprintf( buf, sizeof( buf ), "(%" PRIu64 ") %s", t.first, name->second.c_str() );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
len = snprintf( buf, sizeof( buf ), "(PID %" PRIu64 " TID %" PRIu64 ") %s", t.first >> 32, t.first & 0xFFFFFFFF, name->second.c_str() );
|
|
|
|
}
|
2021-05-15 11:03:42 +00:00
|
|
|
AddThreadString( t.first, buf, len );
|
2021-01-22 01:07:31 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
char buf[64];
|
2021-05-15 11:08:51 +00:00
|
|
|
int len;
|
|
|
|
if( t.first <= std::numeric_limits<uint32_t>::max() )
|
|
|
|
{
|
|
|
|
len = sprintf( buf, "%" PRIu64, t.first );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
len = sprintf( buf, "PID %" PRIu64 " TID %" PRIu64, t.first >> 32, t.first & 0xFFFFFFFF );
|
|
|
|
}
|
|
|
|
AddThreadString( t.first, buf, len );
|
2021-01-22 01:07:31 +00:00
|
|
|
}
|
2019-12-16 17:55:02 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
m_data.framesBase = m_data.frames.Retrieve( 0, [this] ( uint64_t name ) {
|
|
|
|
auto fd = m_slab.AllocInit<FrameData>();
|
|
|
|
fd->name = name;
|
|
|
|
fd->continuous = 1;
|
|
|
|
return fd;
|
|
|
|
}, [this] ( uint64_t name ) {
|
|
|
|
assert( name == 0 );
|
|
|
|
char tmp[6] = "Frame";
|
|
|
|
HandleFrameName( name, tmp, 5 );
|
|
|
|
} );
|
|
|
|
|
|
|
|
m_data.framesBase->frames.push_back( FrameEvent{ 0, -1, -1 } );
|
|
|
|
m_data.framesBase->frames.push_back( FrameEvent{ 0, -1, -1 } );
|
|
|
|
}
|
|
|
|
|
2019-09-29 18:52:25 +00:00
|
|
|
Worker::Worker( FileRead& f, EventType::Type eventMask, bool bgTasks )
|
2019-01-06 18:04:50 +00:00
|
|
|
: m_hasData( true )
|
2018-02-13 13:57:47 +00:00
|
|
|
, m_stream( nullptr )
|
|
|
|
, m_buffer( nullptr )
|
2021-10-16 14:15:19 +00:00
|
|
|
, m_inconsistentSamples( false )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2019-01-06 18:09:50 +00:00
|
|
|
auto loadStart = std::chrono::high_resolution_clock::now();
|
|
|
|
|
2018-06-19 19:52:54 +00:00
|
|
|
m_data.callstackPayload.push_back( nullptr );
|
2018-06-17 17:03:06 +00:00
|
|
|
|
2018-04-21 11:45:48 +00:00
|
|
|
int fileVer = 0;
|
|
|
|
|
|
|
|
uint8_t hdr[8];
|
|
|
|
f.Read( hdr, sizeof( hdr ) );
|
|
|
|
if( memcmp( FileHeader, hdr, FileHeaderMagic ) == 0 )
|
|
|
|
{
|
|
|
|
fileVer = FileVersion( hdr[FileHeaderMagic], hdr[FileHeaderMagic+1], hdr[FileHeaderMagic+2] );
|
2018-04-21 12:18:13 +00:00
|
|
|
if( fileVer > CurrentVersion )
|
|
|
|
{
|
|
|
|
throw UnsupportedVersion( fileVer );
|
|
|
|
}
|
2019-08-12 10:27:35 +00:00
|
|
|
if( fileVer < MinSupportedVersion )
|
|
|
|
{
|
|
|
|
throw LegacyVersion( fileVer );
|
|
|
|
}
|
2018-04-21 12:18:13 +00:00
|
|
|
|
2018-04-30 23:47:56 +00:00
|
|
|
f.Read( m_delay );
|
2018-04-21 11:45:48 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2019-08-12 10:27:35 +00:00
|
|
|
throw LegacyVersion( FileVersion( 0, 2, 0 ) );
|
2018-04-21 11:45:48 +00:00
|
|
|
}
|
2018-07-29 13:33:48 +00:00
|
|
|
m_traceVersion = fileVer;
|
2018-04-21 11:45:48 +00:00
|
|
|
|
2020-06-14 13:52:52 +00:00
|
|
|
s_loadProgress.total.store( 11, std::memory_order_relaxed );
|
2018-07-28 16:56:52 +00:00
|
|
|
s_loadProgress.subTotal.store( 0, std::memory_order_relaxed );
|
2018-07-29 14:56:46 +00:00
|
|
|
s_loadProgress.progress.store( LoadProgress::Initialization, std::memory_order_relaxed );
|
2022-04-18 11:59:48 +00:00
|
|
|
f.Read8( m_resolution, m_timerMul, m_data.lastTime, m_data.frameOffset, m_pid, m_samplingPeriod, m_data.cpuArch, m_data.cpuId );
|
|
|
|
f.Read( m_data.cpuManufacturer, 12 );
|
|
|
|
m_data.cpuManufacturer[12] = '\0';
|
2020-02-25 22:46:16 +00:00
|
|
|
|
2018-02-13 13:57:47 +00:00
|
|
|
uint64_t sz;
|
|
|
|
{
|
2018-04-30 23:47:56 +00:00
|
|
|
f.Read( sz );
|
2018-02-13 13:57:47 +00:00
|
|
|
assert( sz < 1024 );
|
|
|
|
char tmp[1024];
|
|
|
|
f.Read( tmp, sz );
|
|
|
|
m_captureName = std::string( tmp, tmp+sz );
|
2021-06-23 18:43:46 +00:00
|
|
|
if( m_captureName.empty() ) m_captureName = f.GetFilename();
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
2018-08-28 22:57:11 +00:00
|
|
|
{
|
|
|
|
f.Read( sz );
|
|
|
|
assert( sz < 1024 );
|
|
|
|
char tmp[1024];
|
|
|
|
f.Read( tmp, sz );
|
|
|
|
m_captureProgram = std::string( tmp, tmp+sz );
|
|
|
|
f.Read( m_captureTime );
|
|
|
|
}
|
2021-01-31 16:51:16 +00:00
|
|
|
if( fileVer >= FileVersion( 0, 7, 6 ) )
|
|
|
|
{
|
|
|
|
f.Read( m_executableTime );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
m_executableTime = 0;
|
|
|
|
}
|
2018-08-19 16:28:48 +00:00
|
|
|
{
|
|
|
|
f.Read( sz );
|
|
|
|
assert( sz < 1024 );
|
|
|
|
char tmp[1024];
|
|
|
|
f.Read( tmp, sz );
|
|
|
|
m_hostInfo = std::string( tmp, tmp+sz );
|
|
|
|
}
|
|
|
|
|
2022-04-18 11:59:48 +00:00
|
|
|
f.Read( sz );
|
|
|
|
m_data.cpuTopology.reserve( sz );
|
|
|
|
for( uint64_t i=0; i<sz; i++ )
|
2019-11-29 21:41:41 +00:00
|
|
|
{
|
2022-04-18 11:59:48 +00:00
|
|
|
uint32_t packageId;
|
|
|
|
uint64_t psz;
|
|
|
|
f.Read2( packageId, psz );
|
|
|
|
auto& package = *m_data.cpuTopology.emplace( packageId, unordered_flat_map<uint32_t, std::vector<uint32_t>> {} ).first;
|
|
|
|
package.second.reserve( psz );
|
|
|
|
for( uint64_t j=0; j<psz; j++ )
|
2019-11-29 21:41:41 +00:00
|
|
|
{
|
2022-04-18 11:59:48 +00:00
|
|
|
uint32_t coreId;
|
|
|
|
uint64_t csz;
|
|
|
|
f.Read2( coreId, csz );
|
|
|
|
auto& core = *package.second.emplace( coreId, std::vector<uint32_t> {} ).first;
|
|
|
|
core.second.reserve( csz );
|
|
|
|
for( uint64_t k=0; k<csz; k++ )
|
2019-11-29 21:41:41 +00:00
|
|
|
{
|
2022-04-18 11:59:48 +00:00
|
|
|
uint32_t thread;
|
|
|
|
f.Read( thread );
|
|
|
|
core.second.emplace_back( thread );
|
2019-11-29 21:46:57 +00:00
|
|
|
|
2022-04-18 11:59:48 +00:00
|
|
|
m_data.cpuTopologyMap.emplace( thread, CpuThreadTopology { packageId, coreId } );
|
2019-11-29 21:41:41 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-08-12 10:27:35 +00:00
|
|
|
f.Read( &m_data.crashEvent, sizeof( m_data.crashEvent ) );
|
2018-08-20 00:27:24 +00:00
|
|
|
|
2019-08-12 10:27:35 +00:00
|
|
|
f.Read( sz );
|
|
|
|
m_data.frames.Data().reserve_exact( sz, m_slab );
|
|
|
|
for( uint64_t i=0; i<sz; i++ )
|
2018-08-04 17:47:09 +00:00
|
|
|
{
|
2019-08-12 10:27:35 +00:00
|
|
|
auto ptr = m_slab.AllocInit<FrameData>();
|
|
|
|
uint64_t fsz;
|
2019-11-08 23:43:06 +00:00
|
|
|
f.Read3( ptr->name, ptr->continuous, fsz );
|
2019-08-12 10:27:35 +00:00
|
|
|
ptr->frames.reserve_exact( fsz, m_slab );
|
2019-11-21 20:48:35 +00:00
|
|
|
int64_t refTime = 0;
|
|
|
|
if( ptr->continuous )
|
2019-08-12 10:27:35 +00:00
|
|
|
{
|
2019-11-21 20:48:35 +00:00
|
|
|
for( uint64_t j=0; j<fsz; j++ )
|
2018-08-05 00:09:59 +00:00
|
|
|
{
|
2019-11-21 20:48:35 +00:00
|
|
|
ptr->frames[j].start = ReadTimeOffset( f, refTime );
|
|
|
|
ptr->frames[j].end = -1;
|
|
|
|
f.Read( &ptr->frames[j].frameImage, sizeof( int32_t ) );
|
2018-08-05 00:09:59 +00:00
|
|
|
}
|
2019-08-12 10:27:35 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2019-11-21 20:48:35 +00:00
|
|
|
for( uint64_t j=0; j<fsz; j++ )
|
2018-08-05 00:09:59 +00:00
|
|
|
{
|
2019-11-21 20:48:35 +00:00
|
|
|
ptr->frames[j].start = ReadTimeOffset( f, refTime );
|
|
|
|
ptr->frames[j].end = ReadTimeOffset( f, refTime );
|
|
|
|
f.Read( &ptr->frames[j].frameImage, sizeof( int32_t ) );
|
2018-08-05 00:09:59 +00:00
|
|
|
}
|
|
|
|
}
|
2019-09-16 19:31:43 +00:00
|
|
|
for( uint64_t j=0; j<fsz; j++ )
|
|
|
|
{
|
|
|
|
const auto timeSpan = GetFrameTime( *ptr, j );
|
|
|
|
if( timeSpan > 0 )
|
|
|
|
{
|
|
|
|
ptr->min = std::min( ptr->min, timeSpan );
|
|
|
|
ptr->max = std::max( ptr->max, timeSpan );
|
|
|
|
ptr->total += timeSpan;
|
|
|
|
ptr->sumSq += double( timeSpan ) * timeSpan;
|
|
|
|
}
|
|
|
|
}
|
2019-08-12 10:27:35 +00:00
|
|
|
m_data.frames.Data()[i] = ptr;
|
2018-08-04 17:47:09 +00:00
|
|
|
}
|
2019-08-12 10:27:35 +00:00
|
|
|
m_data.framesBase = m_data.frames.Data()[0];
|
|
|
|
assert( m_data.framesBase->name == 0 );
|
2018-02-13 13:57:47 +00:00
|
|
|
|
2020-01-28 20:49:36 +00:00
|
|
|
unordered_flat_map<uint64_t, const char*> pointerMap;
|
2018-02-13 13:57:47 +00:00
|
|
|
|
2018-04-30 23:47:56 +00:00
|
|
|
f.Read( sz );
|
2020-04-02 00:15:00 +00:00
|
|
|
m_data.stringMap.reserve( sz );
|
2019-02-15 00:58:23 +00:00
|
|
|
m_data.stringData.reserve_exact( sz, m_slab );
|
2018-02-13 13:57:47 +00:00
|
|
|
for( uint64_t i=0; i<sz; i++ )
|
|
|
|
{
|
2018-04-30 23:47:56 +00:00
|
|
|
uint64_t ptr, ssz;
|
|
|
|
f.Read2( ptr, ssz );
|
2018-02-13 13:57:47 +00:00
|
|
|
auto dst = m_slab.Alloc<char>( ssz+1 );
|
|
|
|
f.Read( dst, ssz );
|
|
|
|
dst[ssz] = '\0';
|
2020-04-02 00:15:00 +00:00
|
|
|
m_data.stringMap.emplace( charutil::StringKey { dst, ssz }, i );
|
2018-08-09 17:37:37 +00:00
|
|
|
m_data.stringData[i] = ( dst );
|
2018-02-13 13:57:47 +00:00
|
|
|
pointerMap.emplace( ptr, dst );
|
|
|
|
}
|
|
|
|
|
2018-04-30 23:47:56 +00:00
|
|
|
f.Read( sz );
|
2018-02-13 13:57:47 +00:00
|
|
|
for( uint64_t i=0; i<sz; i++ )
|
|
|
|
{
|
|
|
|
uint64_t id, ptr;
|
2018-04-30 23:47:56 +00:00
|
|
|
f.Read2( id, ptr );
|
2018-10-21 14:38:20 +00:00
|
|
|
auto it = pointerMap.find( ptr );
|
|
|
|
if( it != pointerMap.end() )
|
|
|
|
{
|
|
|
|
m_data.strings.emplace( id, it->second );
|
|
|
|
}
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
2018-04-30 23:47:56 +00:00
|
|
|
f.Read( sz );
|
2018-02-13 13:57:47 +00:00
|
|
|
for( uint64_t i=0; i<sz; i++ )
|
|
|
|
{
|
|
|
|
uint64_t id, ptr;
|
2018-04-30 23:47:56 +00:00
|
|
|
f.Read2( id, ptr );
|
2018-10-21 14:38:20 +00:00
|
|
|
auto it = pointerMap.find( ptr );
|
|
|
|
if( it != pointerMap.end() )
|
|
|
|
{
|
|
|
|
m_data.threadNames.emplace( id, it->second );
|
|
|
|
}
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
2020-06-14 13:52:52 +00:00
|
|
|
f.Read( sz );
|
|
|
|
for( uint64_t i=0; i<sz; i++ )
|
2019-08-16 17:24:38 +00:00
|
|
|
{
|
2020-06-14 13:52:52 +00:00
|
|
|
uint64_t id, ptr, ptr2;
|
|
|
|
f.Read3( id, ptr, ptr2 );
|
|
|
|
auto it = pointerMap.find( ptr );
|
|
|
|
auto it2 = pointerMap.find( ptr2 );
|
|
|
|
if( it != pointerMap.end() && it2 != pointerMap.end() )
|
2019-08-16 17:24:38 +00:00
|
|
|
{
|
2020-06-14 13:52:52 +00:00
|
|
|
m_data.externalNames.emplace( id, std::make_pair( it->second, it2->second ) );
|
2019-08-16 17:24:38 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-08-19 20:56:02 +00:00
|
|
|
m_data.localThreadCompress.Load( f, fileVer );
|
2020-06-14 13:52:52 +00:00
|
|
|
m_data.externalThreadCompress.Load( f, fileVer );
|
2018-07-29 13:19:44 +00:00
|
|
|
|
2018-04-30 23:47:56 +00:00
|
|
|
f.Read( sz );
|
2018-02-13 13:57:47 +00:00
|
|
|
for( uint64_t i=0; i<sz; i++ )
|
|
|
|
{
|
|
|
|
uint64_t ptr;
|
2018-04-30 23:47:56 +00:00
|
|
|
f.Read( ptr );
|
2018-02-13 13:57:47 +00:00
|
|
|
SourceLocation srcloc;
|
2019-11-01 19:17:25 +00:00
|
|
|
f.Read( &srcloc, sizeof( SourceLocationBase ) );
|
|
|
|
srcloc.namehash = 0;
|
2018-02-13 13:57:47 +00:00
|
|
|
m_data.sourceLocation.emplace( ptr, srcloc );
|
|
|
|
}
|
|
|
|
|
2018-04-30 23:47:56 +00:00
|
|
|
f.Read( sz );
|
2019-02-15 00:58:23 +00:00
|
|
|
m_data.sourceLocationExpand.reserve_exact( sz, m_slab );
|
2018-03-15 20:42:00 +00:00
|
|
|
f.Read( m_data.sourceLocationExpand.data(), sizeof( uint64_t ) * sz );
|
2018-03-18 01:05:33 +00:00
|
|
|
const auto sle = sz;
|
2018-02-13 13:57:47 +00:00
|
|
|
|
2018-04-30 23:47:56 +00:00
|
|
|
f.Read( sz );
|
2019-02-15 00:58:23 +00:00
|
|
|
m_data.sourceLocationPayload.reserve_exact( sz, m_slab );
|
2018-02-13 13:57:47 +00:00
|
|
|
for( uint64_t i=0; i<sz; i++ )
|
|
|
|
{
|
|
|
|
auto srcloc = m_slab.Alloc<SourceLocation>();
|
2019-11-01 19:17:25 +00:00
|
|
|
f.Read( srcloc, sizeof( SourceLocationBase ) );
|
|
|
|
srcloc->namehash = 0;
|
2018-08-09 17:37:37 +00:00
|
|
|
m_data.sourceLocationPayload[i] = srcloc;
|
2019-08-15 15:42:26 +00:00
|
|
|
m_data.sourceLocationPayloadMap.emplace( srcloc, int16_t( i ) );
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
2018-03-18 11:55:54 +00:00
|
|
|
#ifndef TRACY_NO_STATISTICS
|
2018-03-18 01:05:33 +00:00
|
|
|
m_data.sourceLocationZones.reserve( sle + sz );
|
2018-07-29 12:58:01 +00:00
|
|
|
|
2019-08-12 10:27:35 +00:00
|
|
|
f.Read( sz );
|
2020-06-14 13:52:52 +00:00
|
|
|
for( uint64_t i=0; i<sz; i++ )
|
2018-03-18 01:05:33 +00:00
|
|
|
{
|
2020-06-14 13:52:52 +00:00
|
|
|
int16_t id;
|
|
|
|
uint64_t cnt;
|
|
|
|
f.Read2( id, cnt );
|
|
|
|
auto status = m_data.sourceLocationZones.emplace( id, SourceLocationZones() );
|
|
|
|
assert( status.second );
|
|
|
|
status.first->second.zones.reserve( cnt );
|
2018-07-29 12:58:01 +00:00
|
|
|
}
|
2022-01-29 14:14:43 +00:00
|
|
|
|
|
|
|
if( fileVer >= FileVersion( 0, 7, 15 ) )
|
|
|
|
{
|
|
|
|
f.Read( sz );
|
|
|
|
for( uint64_t i=0; i<sz; i++ )
|
|
|
|
{
|
|
|
|
int16_t id;
|
|
|
|
uint64_t cnt;
|
|
|
|
f.Read2( id, cnt );
|
|
|
|
auto status = m_data.gpuSourceLocationZones.emplace( id, GpuSourceLocationZones() );
|
|
|
|
assert( status.second );
|
|
|
|
status.first->second.zones.reserve( cnt );
|
|
|
|
}
|
|
|
|
}
|
2018-07-29 12:58:01 +00:00
|
|
|
#else
|
2019-08-12 10:27:35 +00:00
|
|
|
f.Read( sz );
|
2020-06-14 13:52:52 +00:00
|
|
|
for( uint64_t i=0; i<sz; i++ )
|
2019-08-15 15:42:26 +00:00
|
|
|
{
|
2020-06-14 13:52:52 +00:00
|
|
|
int16_t id;
|
|
|
|
f.Read( id );
|
|
|
|
f.Skip( sizeof( uint64_t ) );
|
|
|
|
m_data.sourceLocationZonesCnt.emplace( id, 0 );
|
2018-03-18 01:05:33 +00:00
|
|
|
}
|
2022-01-29 14:14:43 +00:00
|
|
|
|
|
|
|
if( fileVer >= FileVersion( 0, 7, 15 ) )
|
|
|
|
{
|
|
|
|
f.Read( sz );
|
|
|
|
for( uint64_t i=0; i<sz; i++ )
|
|
|
|
{
|
|
|
|
int16_t id;
|
|
|
|
f.Read( id );
|
|
|
|
f.Skip( sizeof( uint64_t ) );
|
|
|
|
m_data.gpuSourceLocationZonesCnt.emplace( id, 0 );
|
|
|
|
}
|
|
|
|
}
|
2018-03-18 11:55:54 +00:00
|
|
|
#endif
|
2018-03-18 01:05:33 +00:00
|
|
|
|
2018-07-29 14:56:46 +00:00
|
|
|
s_loadProgress.progress.store( LoadProgress::Locks, std::memory_order_relaxed );
|
2018-04-30 23:47:56 +00:00
|
|
|
f.Read( sz );
|
2018-04-20 14:03:09 +00:00
|
|
|
if( eventMask & EventType::Locks )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2018-07-28 17:05:01 +00:00
|
|
|
s_loadProgress.subTotal.store( sz, std::memory_order_relaxed );
|
2018-04-20 14:03:09 +00:00
|
|
|
for( uint64_t i=0; i<sz; i++ )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2018-07-28 17:05:01 +00:00
|
|
|
s_loadProgress.subProgress.store( i, std::memory_order_relaxed );
|
2019-03-16 01:09:50 +00:00
|
|
|
auto lockmapPtr = m_slab.AllocInit<LockMap>();
|
|
|
|
auto& lockmap = *lockmapPtr;
|
2018-04-20 14:03:09 +00:00
|
|
|
uint32_t id;
|
|
|
|
uint64_t tsz;
|
2022-04-18 11:59:48 +00:00
|
|
|
f.Read8( id, lockmap.customName, lockmap.srcloc, lockmap.type, lockmap.valid, lockmap.timeAnnounce, lockmap.timeTerminate, tsz );
|
2019-05-12 14:17:17 +00:00
|
|
|
lockmap.isContended = false;
|
2019-11-15 21:44:36 +00:00
|
|
|
lockmap.threadMap.reserve( tsz );
|
|
|
|
lockmap.threadList.reserve( tsz );
|
2018-02-13 13:57:47 +00:00
|
|
|
for( uint64_t i=0; i<tsz; i++ )
|
|
|
|
{
|
2018-04-20 14:03:09 +00:00
|
|
|
uint64_t t;
|
2018-04-30 23:47:56 +00:00
|
|
|
f.Read( t );
|
2019-11-15 21:44:44 +00:00
|
|
|
lockmap.threadMap.emplace( t, i );
|
2018-04-20 14:03:09 +00:00
|
|
|
lockmap.threadList.emplace_back( t );
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
2018-04-30 23:47:56 +00:00
|
|
|
f.Read( tsz );
|
2019-02-15 00:58:23 +00:00
|
|
|
lockmap.timeline.reserve_exact( tsz, m_slab );
|
2018-04-29 01:21:40 +00:00
|
|
|
auto ptr = lockmap.timeline.data();
|
2020-06-14 13:52:52 +00:00
|
|
|
int64_t refTime = lockmap.timeAnnounce;
|
|
|
|
if( lockmap.type == LockType::Lockable )
|
2018-12-30 22:06:03 +00:00
|
|
|
{
|
2020-06-14 13:52:52 +00:00
|
|
|
for( uint64_t i=0; i<tsz; i++ )
|
2018-12-30 22:06:03 +00:00
|
|
|
{
|
2020-06-14 13:52:52 +00:00
|
|
|
auto lev = m_slab.Alloc<LockEvent>();
|
|
|
|
const auto lt = ReadTimeOffset( f, refTime );
|
|
|
|
lev->SetTime( lt );
|
|
|
|
int16_t srcloc;
|
|
|
|
f.Read( srcloc );
|
|
|
|
lev->SetSrcLoc( srcloc );
|
|
|
|
f.Read( &lev->thread, sizeof( LockEvent::thread ) + sizeof( LockEvent::type ) );
|
|
|
|
*ptr++ = { lev };
|
|
|
|
UpdateLockRange( lockmap, *lev, lt );
|
2018-12-30 22:06:03 +00:00
|
|
|
}
|
|
|
|
}
|
2019-11-21 20:48:35 +00:00
|
|
|
else
|
2019-08-15 15:42:26 +00:00
|
|
|
{
|
2020-06-14 13:52:52 +00:00
|
|
|
for( uint64_t i=0; i<tsz; i++ )
|
2019-08-15 15:42:26 +00:00
|
|
|
{
|
2020-06-14 13:52:52 +00:00
|
|
|
auto lev = m_slab.Alloc<LockEventShared>();
|
|
|
|
const auto lt = ReadTimeOffset( f, refTime );
|
|
|
|
lev->SetTime( lt );
|
|
|
|
int16_t srcloc;
|
|
|
|
f.Read( srcloc );
|
|
|
|
lev->SetSrcLoc( srcloc );
|
|
|
|
f.Read( &lev->thread, sizeof( LockEventShared::thread ) + sizeof( LockEventShared::type ) );
|
|
|
|
*ptr++ = { lev };
|
|
|
|
UpdateLockRange( lockmap, *lev, lt );
|
2019-08-15 15:42:26 +00:00
|
|
|
}
|
|
|
|
}
|
2018-04-21 13:42:08 +00:00
|
|
|
UpdateLockCount( lockmap, 0 );
|
2019-03-16 01:09:50 +00:00
|
|
|
m_data.lockMap.emplace( id, lockmapPtr );
|
2018-04-20 14:03:09 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
for( uint64_t i=0; i<sz; i++ )
|
|
|
|
{
|
|
|
|
LockType type;
|
|
|
|
uint64_t tsz;
|
2022-04-18 11:59:48 +00:00
|
|
|
f.Skip( sizeof( LockMap::customName ) + sizeof( uint32_t ) + sizeof( LockMap::srcloc ) );
|
2018-04-30 23:47:56 +00:00
|
|
|
f.Read( type );
|
2019-11-21 20:48:35 +00:00
|
|
|
f.Skip( sizeof( LockMap::valid ) + sizeof( LockMap::timeAnnounce ) + sizeof( LockMap::timeTerminate ) );
|
2018-04-30 23:47:56 +00:00
|
|
|
f.Read( tsz );
|
2018-04-20 14:03:09 +00:00
|
|
|
f.Skip( tsz * sizeof( uint64_t ) );
|
2018-04-30 23:47:56 +00:00
|
|
|
f.Read( tsz );
|
2020-06-14 13:52:52 +00:00
|
|
|
f.Skip( tsz * ( sizeof( int64_t ) + sizeof( int16_t ) + sizeof( LockEvent::thread ) + sizeof( LockEvent::type ) ) );
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-07-28 17:05:01 +00:00
|
|
|
s_loadProgress.subTotal.store( 0, std::memory_order_relaxed );
|
2018-07-29 14:56:46 +00:00
|
|
|
s_loadProgress.progress.store( LoadProgress::Messages, std::memory_order_relaxed );
|
2020-01-28 20:49:36 +00:00
|
|
|
unordered_flat_map<uint64_t, MessageData*> msgMap;
|
2018-04-30 23:47:56 +00:00
|
|
|
f.Read( sz );
|
2018-04-20 14:03:09 +00:00
|
|
|
if( eventMask & EventType::Messages )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2019-02-15 00:58:23 +00:00
|
|
|
m_data.messages.reserve_exact( sz, m_slab );
|
2020-06-14 13:52:52 +00:00
|
|
|
int64_t refTime = 0;
|
|
|
|
for( uint64_t i=0; i<sz; i++ )
|
2018-04-20 14:03:09 +00:00
|
|
|
{
|
2020-06-14 13:52:52 +00:00
|
|
|
uint64_t ptr;
|
|
|
|
f.Read( ptr );
|
|
|
|
auto msgdata = m_slab.Alloc<MessageData>();
|
|
|
|
msgdata->time = ReadTimeOffset( f, refTime );
|
|
|
|
f.Read3( msgdata->ref, msgdata->color, msgdata->callstack );
|
|
|
|
m_data.messages[i] = msgdata;
|
|
|
|
msgMap.emplace( ptr, msgdata );
|
2019-08-15 16:48:52 +00:00
|
|
|
}
|
2018-04-20 14:03:09 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2020-06-14 13:52:52 +00:00
|
|
|
f.Skip( sz * ( sizeof( uint64_t ) + sizeof( MessageData::time ) + sizeof( MessageData::ref ) + sizeof( MessageData::color ) + sizeof( MessageData::callstack ) ) );
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
2020-11-27 11:37:35 +00:00
|
|
|
if( fileVer >= FileVersion( 0, 7, 5 ) )
|
2020-01-26 14:57:55 +00:00
|
|
|
{
|
|
|
|
f.Read( sz );
|
|
|
|
assert( sz != 0 );
|
|
|
|
m_data.zoneExtra.reserve_exact( sz, m_slab );
|
|
|
|
f.Read( m_data.zoneExtra.data(), sz * sizeof( ZoneExtra ) );
|
|
|
|
}
|
2022-04-18 11:59:48 +00:00
|
|
|
else
|
2020-11-27 11:37:35 +00:00
|
|
|
{
|
|
|
|
f.Read( sz );
|
|
|
|
assert( sz != 0 );
|
|
|
|
m_data.zoneExtra.reserve_exact( sz, m_slab );
|
|
|
|
for( uint64_t i=0; i<sz; i++ )
|
|
|
|
{
|
|
|
|
auto* zoneExtra = &m_data.zoneExtra[i];
|
|
|
|
f.Read3( zoneExtra->callstack, zoneExtra->text, zoneExtra->name );
|
|
|
|
zoneExtra->color = 0;
|
|
|
|
}
|
|
|
|
}
|
2020-01-26 14:57:55 +00:00
|
|
|
|
2018-07-29 14:56:46 +00:00
|
|
|
s_loadProgress.progress.store( LoadProgress::Zones, std::memory_order_relaxed );
|
2019-11-21 20:48:35 +00:00
|
|
|
f.Read( sz );
|
|
|
|
s_loadProgress.subTotal.store( sz, std::memory_order_relaxed );
|
|
|
|
s_loadProgress.subProgress.store( 0, std::memory_order_relaxed );
|
2020-06-14 13:52:52 +00:00
|
|
|
f.Read( sz );
|
|
|
|
m_data.zoneChildren.reserve_exact( sz, m_slab );
|
2020-08-15 00:14:29 +00:00
|
|
|
memset( (char*)m_data.zoneChildren.data(), 0, sizeof( Vector<short_ptr<ZoneEvent>> ) * sz );
|
2019-11-02 11:38:07 +00:00
|
|
|
int32_t childIdx = 0;
|
2018-04-30 23:47:56 +00:00
|
|
|
f.Read( sz );
|
2019-02-15 00:58:23 +00:00
|
|
|
m_data.threads.reserve_exact( sz, m_slab );
|
2018-02-13 13:57:47 +00:00
|
|
|
for( uint64_t i=0; i<sz; i++ )
|
|
|
|
{
|
|
|
|
auto td = m_slab.AllocInit<ThreadData>();
|
2020-01-26 15:18:16 +00:00
|
|
|
uint64_t tid;
|
2021-11-02 00:46:29 +00:00
|
|
|
if( fileVer >= FileVersion( 0, 7, 11 ) )
|
|
|
|
{
|
|
|
|
f.Read4( tid, td->count, td->kernelSampleCnt, td->isFiber );
|
|
|
|
}
|
|
|
|
else if( fileVer >= FileVersion( 0, 7, 9 ) )
|
2021-06-16 23:47:19 +00:00
|
|
|
{
|
|
|
|
f.Read3( tid, td->count, td->kernelSampleCnt );
|
2021-11-02 00:46:29 +00:00
|
|
|
td->isFiber = 0;
|
2021-06-16 23:47:19 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
f.Read2( tid, td->count );
|
|
|
|
td->kernelSampleCnt = 0;
|
2021-11-02 00:46:29 +00:00
|
|
|
td->isFiber = 0;
|
2021-06-16 23:47:19 +00:00
|
|
|
}
|
2018-05-25 19:10:22 +00:00
|
|
|
td->id = tid;
|
2020-02-12 18:15:46 +00:00
|
|
|
m_data.zonesCnt += td->count;
|
2022-04-18 11:59:48 +00:00
|
|
|
uint32_t tsz;
|
|
|
|
f.Read( tsz );
|
|
|
|
if( tsz != 0 )
|
2020-01-26 15:18:16 +00:00
|
|
|
{
|
2022-04-18 11:59:48 +00:00
|
|
|
ReadTimeline( f, td->timeline, tsz, 0, childIdx );
|
2018-06-21 23:30:08 +00:00
|
|
|
}
|
2018-02-13 13:57:47 +00:00
|
|
|
uint64_t msz;
|
2018-04-30 23:47:56 +00:00
|
|
|
f.Read( msz );
|
2018-04-20 14:03:09 +00:00
|
|
|
if( eventMask & EventType::Messages )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2019-08-28 19:03:01 +00:00
|
|
|
const auto ctid = CompressThread( tid );
|
2019-02-15 00:58:23 +00:00
|
|
|
td->messages.reserve_exact( msz, m_slab );
|
2018-04-20 14:03:09 +00:00
|
|
|
for( uint64_t j=0; j<msz; j++ )
|
|
|
|
{
|
|
|
|
uint64_t ptr;
|
2018-04-30 23:47:56 +00:00
|
|
|
f.Read( ptr );
|
2018-05-25 19:10:22 +00:00
|
|
|
auto md = msgMap[ptr];
|
2018-08-09 17:37:37 +00:00
|
|
|
td->messages[j] = md;
|
2019-08-28 19:03:01 +00:00
|
|
|
md->thread = ctid;
|
2018-04-20 14:03:09 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
f.Skip( msz * sizeof( uint64_t ) );
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
2021-12-21 14:24:11 +00:00
|
|
|
if( fileVer >= FileVersion( 0, 7, 14 ) )
|
|
|
|
{
|
|
|
|
uint64_t ssz;
|
|
|
|
f.Read( ssz );
|
|
|
|
if( ssz != 0 )
|
|
|
|
{
|
|
|
|
if( eventMask & EventType::Samples )
|
|
|
|
{
|
|
|
|
int64_t refTime = 0;
|
|
|
|
td->ctxSwitchSamples.reserve_exact( ssz, m_slab );
|
|
|
|
auto ptr = td->ctxSwitchSamples.data();
|
|
|
|
for( uint64_t j=0; j<ssz; j++ )
|
|
|
|
{
|
|
|
|
ptr->time.SetVal( ReadTimeOffset( f, refTime ) );
|
|
|
|
f.Read( &ptr->callstack, sizeof( ptr->callstack ) );
|
|
|
|
ptr++;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
f.Skip( ssz * ( 8 + 3 ) );
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2022-04-18 11:59:48 +00:00
|
|
|
uint64_t ssz;
|
|
|
|
f.Read( ssz );
|
|
|
|
if( ssz != 0 )
|
2020-02-22 16:13:53 +00:00
|
|
|
{
|
2022-04-18 11:59:48 +00:00
|
|
|
if( eventMask & EventType::Samples )
|
2020-02-22 16:13:53 +00:00
|
|
|
{
|
2022-04-18 11:59:48 +00:00
|
|
|
m_data.samplesCnt += ssz;
|
|
|
|
int64_t refTime = 0;
|
|
|
|
td->samples.reserve_exact( ssz, m_slab );
|
|
|
|
auto ptr = td->samples.data();
|
|
|
|
for( uint64_t j=0; j<ssz; j++ )
|
2020-02-22 16:13:53 +00:00
|
|
|
{
|
2022-04-18 11:59:48 +00:00
|
|
|
ptr->time.SetVal( ReadTimeOffset( f, refTime ) );
|
|
|
|
f.Read( &ptr->callstack, sizeof( ptr->callstack ) );
|
|
|
|
ptr++;
|
2020-02-22 16:13:53 +00:00
|
|
|
}
|
|
|
|
}
|
2022-04-18 11:59:48 +00:00
|
|
|
else
|
|
|
|
{
|
|
|
|
f.Skip( ssz * ( 8 + 3 ) );
|
|
|
|
}
|
2020-02-22 16:13:53 +00:00
|
|
|
}
|
2018-08-09 17:37:37 +00:00
|
|
|
m_data.threads[i] = td;
|
2019-09-08 12:15:40 +00:00
|
|
|
m_threadMap.emplace( tid, td );
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
2018-07-29 14:56:46 +00:00
|
|
|
s_loadProgress.progress.store( LoadProgress::GpuZones, std::memory_order_relaxed );
|
2019-11-21 20:48:35 +00:00
|
|
|
f.Read( sz );
|
|
|
|
s_loadProgress.subTotal.store( sz, std::memory_order_relaxed );
|
|
|
|
s_loadProgress.subProgress.store( 0, std::memory_order_relaxed );
|
2020-06-14 13:52:52 +00:00
|
|
|
f.Read( sz );
|
|
|
|
m_data.gpuChildren.reserve_exact( sz, m_slab );
|
2020-08-15 00:14:29 +00:00
|
|
|
memset( (char*)m_data.gpuChildren.data(), 0, sizeof( Vector<short_ptr<GpuEvent>> ) * sz );
|
2019-11-02 11:38:07 +00:00
|
|
|
childIdx = 0;
|
2018-04-30 23:47:56 +00:00
|
|
|
f.Read( sz );
|
2019-02-15 00:58:23 +00:00
|
|
|
m_data.gpuData.reserve_exact( sz, m_slab );
|
2018-02-13 13:57:47 +00:00
|
|
|
for( uint64_t i=0; i<sz; i++ )
|
|
|
|
{
|
|
|
|
auto ctx = m_slab.AllocInit<GpuCtxData>();
|
2021-06-09 19:12:50 +00:00
|
|
|
if( fileVer >= FileVersion( 0, 7, 9 ) )
|
|
|
|
{
|
|
|
|
uint8_t calibration;
|
|
|
|
f.Read7( ctx->thread, calibration, ctx->count, ctx->period, ctx->type, ctx->name, ctx->overflow );
|
|
|
|
ctx->hasCalibration = calibration;
|
|
|
|
}
|
|
|
|
else if( fileVer >= FileVersion( 0, 7, 6 ) )
|
2021-01-31 17:56:12 +00:00
|
|
|
{
|
|
|
|
uint8_t calibration;
|
|
|
|
f.Read6( ctx->thread, calibration, ctx->count, ctx->period, ctx->type, ctx->name );
|
|
|
|
ctx->hasCalibration = calibration;
|
2021-06-09 19:12:50 +00:00
|
|
|
ctx->overflow = 0;
|
2021-01-31 17:56:12 +00:00
|
|
|
}
|
|
|
|
else if( fileVer >= FileVersion( 0, 7, 1 ) )
|
2020-05-27 16:16:53 +00:00
|
|
|
{
|
2020-07-07 19:19:33 +00:00
|
|
|
uint8_t calibration;
|
|
|
|
f.Read5( ctx->thread, calibration, ctx->count, ctx->period, ctx->type );
|
|
|
|
ctx->hasCalibration = calibration;
|
2021-06-09 19:12:50 +00:00
|
|
|
ctx->overflow = 0;
|
2020-05-27 16:16:53 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2020-07-07 19:19:33 +00:00
|
|
|
uint8_t accuracy;
|
2022-04-18 11:59:48 +00:00
|
|
|
f.Read5( ctx->thread, accuracy, ctx->count, ctx->period, ctx->type );
|
2020-07-07 19:19:33 +00:00
|
|
|
ctx->hasCalibration = false;
|
2021-06-09 19:12:50 +00:00
|
|
|
ctx->overflow = 0;
|
2020-05-27 16:16:53 +00:00
|
|
|
}
|
2020-06-15 23:42:52 +00:00
|
|
|
ctx->hasPeriod = ctx->period != 1.f;
|
2020-02-12 18:15:46 +00:00
|
|
|
m_data.gpuCnt += ctx->count;
|
2020-06-14 13:52:52 +00:00
|
|
|
uint64_t tdsz;
|
|
|
|
f.Read( tdsz );
|
|
|
|
for( uint64_t j=0; j<tdsz; j++ )
|
2019-09-23 15:27:49 +00:00
|
|
|
{
|
2020-06-14 13:52:52 +00:00
|
|
|
uint64_t tid, tsz;
|
|
|
|
f.Read2( tid, tsz );
|
2019-09-23 15:27:49 +00:00
|
|
|
if( tsz != 0 )
|
2018-07-22 19:15:28 +00:00
|
|
|
{
|
2019-09-23 15:27:49 +00:00
|
|
|
int64_t refTime = 0;
|
|
|
|
int64_t refGpuTime = 0;
|
2020-06-14 13:52:52 +00:00
|
|
|
auto td = ctx->threadData.emplace( tid, GpuCtxThreadData {} ).first;
|
|
|
|
ReadTimeline( f, td->second.timeline, tsz, refTime, refGpuTime, childIdx );
|
2018-07-22 19:15:28 +00:00
|
|
|
}
|
2018-06-17 16:27:16 +00:00
|
|
|
}
|
2018-08-09 17:37:37 +00:00
|
|
|
m_data.gpuData[i] = ctx;
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
2018-07-29 14:56:46 +00:00
|
|
|
s_loadProgress.progress.store( LoadProgress::Plots, std::memory_order_relaxed );
|
2018-04-30 23:47:56 +00:00
|
|
|
f.Read( sz );
|
2018-04-20 14:03:09 +00:00
|
|
|
if( eventMask & EventType::Plots )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2018-08-04 14:33:03 +00:00
|
|
|
m_data.plots.Data().reserve( sz );
|
2018-08-04 13:17:37 +00:00
|
|
|
s_loadProgress.subTotal.store( sz, std::memory_order_relaxed );
|
2021-10-17 11:14:44 +00:00
|
|
|
if( fileVer >= FileVersion( 0, 7, 10 ) )
|
2018-04-20 14:03:09 +00:00
|
|
|
{
|
2021-10-17 11:14:44 +00:00
|
|
|
for( uint64_t i=0; i<sz; i++ )
|
2019-08-15 16:48:52 +00:00
|
|
|
{
|
2021-10-17 11:14:44 +00:00
|
|
|
s_loadProgress.subProgress.store( i, std::memory_order_relaxed );
|
|
|
|
auto pd = m_slab.AllocInit<PlotData>();
|
|
|
|
uint64_t psz;
|
|
|
|
f.Read7( pd->type, pd->format, pd->name, pd->min, pd->max, pd->sum, psz );
|
|
|
|
pd->data.reserve_exact( psz, m_slab );
|
|
|
|
auto ptr = pd->data.data();
|
|
|
|
int64_t refTime = 0;
|
|
|
|
for( uint64_t j=0; j<psz; j++ )
|
|
|
|
{
|
|
|
|
int64_t t;
|
|
|
|
f.Read2( t, ptr->val );
|
|
|
|
refTime += t;
|
|
|
|
ptr->time = refTime;
|
|
|
|
ptr++;
|
|
|
|
}
|
|
|
|
m_data.plots.Data().push_back_no_space_check( pd );
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
for( uint64_t i=0; i<sz; i++ )
|
|
|
|
{
|
|
|
|
s_loadProgress.subProgress.store( i, std::memory_order_relaxed );
|
|
|
|
auto pd = m_slab.AllocInit<PlotData>();
|
|
|
|
uint64_t psz;
|
|
|
|
f.Read6( pd->type, pd->format, pd->name, pd->min, pd->max, psz );
|
|
|
|
pd->sum = 0;
|
|
|
|
pd->data.reserve_exact( psz, m_slab );
|
|
|
|
auto ptr = pd->data.data();
|
|
|
|
int64_t refTime = 0;
|
|
|
|
for( uint64_t j=0; j<psz; j++ )
|
|
|
|
{
|
|
|
|
int64_t t;
|
|
|
|
f.Read2( t, ptr->val );
|
|
|
|
pd->sum += ptr->val;
|
|
|
|
refTime += t;
|
|
|
|
ptr->time = refTime;
|
|
|
|
ptr++;
|
|
|
|
}
|
|
|
|
m_data.plots.Data().push_back_no_space_check( pd );
|
2019-08-15 16:48:52 +00:00
|
|
|
}
|
2018-04-20 14:03:09 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2021-10-17 11:14:44 +00:00
|
|
|
if( fileVer >= FileVersion( 0, 7, 10 ) )
|
2018-04-20 14:03:09 +00:00
|
|
|
{
|
2021-10-17 11:14:44 +00:00
|
|
|
for( uint64_t i=0; i<sz; i++ )
|
|
|
|
{
|
|
|
|
f.Skip( sizeof( PlotData::name ) + sizeof( PlotData::min ) + sizeof( PlotData::max ) + sizeof( PlotData::sum ) + sizeof( PlotData::type ) + sizeof( PlotData::format ) );
|
|
|
|
uint64_t psz;
|
|
|
|
f.Read( psz );
|
|
|
|
f.Skip( psz * ( sizeof( uint64_t ) + sizeof( double ) ) );
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
for( uint64_t i=0; i<sz; i++ )
|
|
|
|
{
|
|
|
|
f.Skip( sizeof( PlotData::name ) + sizeof( PlotData::min ) + sizeof( PlotData::max ) + sizeof( PlotData::type ) + sizeof( PlotData::format ) );
|
|
|
|
uint64_t psz;
|
|
|
|
f.Read( psz );
|
|
|
|
f.Skip( psz * ( sizeof( uint64_t ) + sizeof( double ) ) );
|
|
|
|
}
|
2018-04-20 14:03:09 +00:00
|
|
|
}
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
2018-04-02 00:05:16 +00:00
|
|
|
|
2018-07-28 17:05:01 +00:00
|
|
|
s_loadProgress.subTotal.store( 0, std::memory_order_relaxed );
|
2018-07-29 14:56:46 +00:00
|
|
|
s_loadProgress.progress.store( LoadProgress::Memory, std::memory_order_relaxed );
|
2019-08-15 20:56:55 +00:00
|
|
|
|
2020-09-25 14:39:00 +00:00
|
|
|
if( fileVer >= FileVersion( 0, 7, 3 ) )
|
|
|
|
{
|
|
|
|
uint64_t memcount, memtarget, memload = 0;
|
|
|
|
f.Read2( memcount, memtarget );
|
|
|
|
s_loadProgress.subTotal.store( memtarget, std::memory_order_relaxed );
|
|
|
|
|
|
|
|
for( uint64_t k=0; k<memcount; k++ )
|
2019-10-01 20:36:22 +00:00
|
|
|
{
|
2020-09-25 14:39:00 +00:00
|
|
|
uint64_t memname;
|
|
|
|
f.Read2( memname, sz );
|
|
|
|
if( eventMask & EventType::Memory )
|
2019-10-01 20:36:22 +00:00
|
|
|
{
|
2020-09-25 14:39:00 +00:00
|
|
|
auto mit = m_data.memNameMap.emplace( memname, m_slab.AllocInit<MemData>() );
|
|
|
|
if( memname == 0 ) m_data.memory = mit.first->second;
|
|
|
|
auto& memdata = *mit.first->second;
|
|
|
|
memdata.data.reserve_exact( sz, m_slab );
|
|
|
|
uint64_t activeSz, freesSz;
|
|
|
|
f.Read2( activeSz, freesSz );
|
|
|
|
memdata.active.reserve( activeSz );
|
|
|
|
memdata.frees.reserve_exact( freesSz, m_slab );
|
|
|
|
auto mem = memdata.data.data();
|
|
|
|
s_loadProgress.subTotal.store( sz, std::memory_order_relaxed );
|
|
|
|
size_t fidx = 0;
|
|
|
|
int64_t refTime = 0;
|
|
|
|
auto& frees = memdata.frees;
|
|
|
|
auto& active = memdata.active;
|
|
|
|
|
|
|
|
for( uint64_t i=0; i<sz; i++ )
|
|
|
|
{
|
|
|
|
s_loadProgress.subProgress.store( memload+i, std::memory_order_relaxed );
|
|
|
|
uint64_t ptr, size;
|
|
|
|
Int24 csAlloc;
|
|
|
|
int64_t timeAlloc, timeFree;
|
|
|
|
uint16_t threadAlloc, threadFree;
|
|
|
|
f.Read8( ptr, size, csAlloc, mem->csFree, timeAlloc, timeFree, threadAlloc, threadFree );
|
|
|
|
mem->SetPtr( ptr );
|
|
|
|
mem->SetSize( size );
|
|
|
|
mem->SetCsAlloc( csAlloc.Val() );
|
|
|
|
refTime += timeAlloc;
|
|
|
|
mem->SetTimeThreadAlloc( refTime, threadAlloc );
|
|
|
|
if( timeFree >= 0 )
|
|
|
|
{
|
|
|
|
mem->SetTimeThreadFree( timeFree + refTime, threadFree );
|
|
|
|
frees[fidx++] = i;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
mem->SetTimeThreadFree( timeFree, threadFree );
|
|
|
|
active.emplace( ptr, i );
|
|
|
|
}
|
|
|
|
mem++;
|
|
|
|
}
|
|
|
|
memload += sz;
|
|
|
|
f.Read4( memdata.high, memdata.low, memdata.usage, memdata.name );
|
|
|
|
|
|
|
|
if( sz != 0 )
|
|
|
|
{
|
|
|
|
memdata.reconstruct = true;
|
|
|
|
}
|
2019-10-01 20:36:22 +00:00
|
|
|
}
|
2020-06-14 13:52:52 +00:00
|
|
|
else
|
2018-12-30 22:06:03 +00:00
|
|
|
{
|
2020-09-25 14:39:00 +00:00
|
|
|
f.Skip( 2 * sizeof( uint64_t ) );
|
|
|
|
f.Skip( sz * ( sizeof( uint64_t ) + sizeof( uint64_t ) + sizeof( Int24 ) + sizeof( Int24 ) + sizeof( int64_t ) * 2 + sizeof( uint16_t ) * 2 ) );
|
|
|
|
f.Skip( sizeof( MemData::high ) + sizeof( MemData::low ) + sizeof( MemData::usage ) + sizeof( MemData::name ) );
|
2019-02-16 21:26:50 +00:00
|
|
|
}
|
2018-05-04 14:08:16 +00:00
|
|
|
}
|
2018-04-20 14:03:09 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2020-09-25 14:39:00 +00:00
|
|
|
m_data.memory = m_slab.AllocInit<MemData>();
|
|
|
|
m_data.memNameMap.emplace( 0, m_data.memory );
|
|
|
|
|
|
|
|
f.Read( sz );
|
|
|
|
if( eventMask & EventType::Memory )
|
|
|
|
{
|
|
|
|
auto& memdata = *m_data.memory;
|
|
|
|
memdata.data.reserve_exact( sz, m_slab );
|
|
|
|
uint64_t activeSz, freesSz;
|
|
|
|
f.Read2( activeSz, freesSz );
|
|
|
|
memdata.active.reserve( activeSz );
|
|
|
|
memdata.frees.reserve_exact( freesSz, m_slab );
|
|
|
|
auto mem = memdata.data.data();
|
|
|
|
s_loadProgress.subTotal.store( sz, std::memory_order_relaxed );
|
|
|
|
size_t fidx = 0;
|
|
|
|
int64_t refTime = 0;
|
|
|
|
auto& frees = memdata.frees;
|
|
|
|
auto& active = memdata.active;
|
|
|
|
|
|
|
|
for( uint64_t i=0; i<sz; i++ )
|
|
|
|
{
|
|
|
|
s_loadProgress.subProgress.store( i, std::memory_order_relaxed );
|
|
|
|
uint64_t ptr, size;
|
|
|
|
Int24 csAlloc;
|
|
|
|
int64_t timeAlloc, timeFree;
|
|
|
|
uint16_t threadAlloc, threadFree;
|
|
|
|
f.Read8( ptr, size, csAlloc, mem->csFree, timeAlloc, timeFree, threadAlloc, threadFree );
|
|
|
|
mem->SetPtr( ptr );
|
|
|
|
mem->SetSize( size );
|
|
|
|
mem->SetCsAlloc( csAlloc.Val() );
|
|
|
|
refTime += timeAlloc;
|
|
|
|
mem->SetTimeThreadAlloc( refTime, threadAlloc );
|
|
|
|
if( timeFree >= 0 )
|
|
|
|
{
|
|
|
|
mem->SetTimeThreadFree( timeFree + refTime, threadFree );
|
|
|
|
frees[fidx++] = i;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
mem->SetTimeThreadFree( timeFree, threadFree );
|
|
|
|
active.emplace( ptr, i );
|
|
|
|
}
|
|
|
|
mem++;
|
|
|
|
}
|
|
|
|
f.Read3( memdata.high, memdata.low, memdata.usage );
|
|
|
|
|
|
|
|
if( sz != 0 )
|
|
|
|
{
|
|
|
|
memdata.reconstruct = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
f.Skip( 2 * sizeof( uint64_t ) );
|
|
|
|
f.Skip( sz * ( sizeof( uint64_t ) + sizeof( uint64_t ) + sizeof( Int24 ) + sizeof( Int24 ) + sizeof( int64_t ) * 2 + sizeof( uint16_t ) * 2 ) );
|
|
|
|
f.Skip( sizeof( MemData::high ) + sizeof( MemData::low ) + sizeof( MemData::usage ) );
|
|
|
|
}
|
2018-04-02 00:05:16 +00:00
|
|
|
}
|
2018-06-19 20:04:26 +00:00
|
|
|
|
2018-07-28 17:05:01 +00:00
|
|
|
s_loadProgress.subTotal.store( 0, std::memory_order_relaxed );
|
2018-07-29 14:56:46 +00:00
|
|
|
s_loadProgress.progress.store( LoadProgress::CallStacks, std::memory_order_relaxed );
|
2018-06-19 20:04:26 +00:00
|
|
|
f.Read( sz );
|
|
|
|
m_data.callstackPayload.reserve( sz );
|
2022-04-18 11:59:48 +00:00
|
|
|
for( uint64_t i=0; i<sz; i++ )
|
2020-03-28 17:04:33 +00:00
|
|
|
{
|
2022-04-18 11:59:48 +00:00
|
|
|
uint16_t csz;
|
|
|
|
f.Read( csz );
|
2020-03-28 17:04:33 +00:00
|
|
|
|
2022-04-18 11:59:48 +00:00
|
|
|
const auto memsize = sizeof( VarArray<CallstackFrameId> ) + csz * sizeof( CallstackFrameId );
|
|
|
|
auto mem = (char*)m_slab.AllocRaw( memsize );
|
2020-03-28 17:04:33 +00:00
|
|
|
|
2022-04-18 11:59:48 +00:00
|
|
|
auto data = (CallstackFrameId*)mem;
|
|
|
|
f.Read( data, csz * sizeof( CallstackFrameId ) );
|
2020-03-28 17:04:33 +00:00
|
|
|
|
2022-04-18 11:59:48 +00:00
|
|
|
auto arr = (VarArray<CallstackFrameId>*)( mem + csz * sizeof( CallstackFrameId ) );
|
|
|
|
new(arr) VarArray<CallstackFrameId>( csz, data );
|
2020-03-28 17:04:33 +00:00
|
|
|
|
2022-04-18 11:59:48 +00:00
|
|
|
m_data.callstackPayload.push_back_no_space_check( arr );
|
2018-06-19 20:04:26 +00:00
|
|
|
}
|
2018-06-19 23:59:25 +00:00
|
|
|
|
2022-04-18 11:59:48 +00:00
|
|
|
f.Read( sz );
|
|
|
|
m_data.callstackFrameMap.reserve( sz );
|
|
|
|
for( uint64_t i=0; i<sz; i++ )
|
2020-02-25 22:42:59 +00:00
|
|
|
{
|
2022-04-18 11:59:48 +00:00
|
|
|
CallstackFrameId id;
|
|
|
|
auto frameData = m_slab.Alloc<CallstackFrameData>();
|
|
|
|
f.Read3( id, frameData->size, frameData->imageName );
|
2020-02-25 22:42:59 +00:00
|
|
|
|
2022-04-18 11:59:48 +00:00
|
|
|
frameData->data = m_slab.Alloc<CallstackFrame>( frameData->size );
|
|
|
|
f.Read( frameData->data, sizeof( CallstackFrame ) * frameData->size );
|
2020-02-25 22:42:59 +00:00
|
|
|
|
2022-04-18 11:59:48 +00:00
|
|
|
m_data.callstackFrameMap.emplace( id, frameData );
|
2020-02-25 22:42:59 +00:00
|
|
|
}
|
2018-06-19 23:59:25 +00:00
|
|
|
|
2019-11-21 20:48:35 +00:00
|
|
|
f.Read( sz );
|
|
|
|
if( sz > 0 )
|
2019-01-20 18:11:48 +00:00
|
|
|
{
|
2019-11-21 20:48:35 +00:00
|
|
|
m_data.appInfo.reserve_exact( sz, m_slab );
|
|
|
|
f.Read( m_data.appInfo.data(), sizeof( m_data.appInfo[0] ) * sz );
|
2018-06-19 23:59:25 +00:00
|
|
|
}
|
2018-06-24 15:10:46 +00:00
|
|
|
|
2019-11-21 20:48:35 +00:00
|
|
|
s_loadProgress.subTotal.store( 0, std::memory_order_relaxed );
|
|
|
|
s_loadProgress.progress.store( LoadProgress::FrameImages, std::memory_order_relaxed );
|
|
|
|
|
|
|
|
if( eventMask & EventType::FrameImages )
|
2019-07-12 16:45:35 +00:00
|
|
|
{
|
2021-05-15 15:02:25 +00:00
|
|
|
ZSTD_CDict* cdict = nullptr;
|
|
|
|
if( fileVer >= FileVersion( 0, 7, 8 ) )
|
|
|
|
{
|
|
|
|
uint32_t dsz;
|
|
|
|
f.Read( dsz );
|
|
|
|
auto dict = new char[dsz];
|
|
|
|
f.Read( dict, dsz );
|
|
|
|
cdict = ZSTD_createCDict( dict, dsz, 3 );
|
|
|
|
m_texcomp.SetDict( ZSTD_createDDict( dict, dsz ) );
|
|
|
|
delete[] dict;
|
|
|
|
}
|
|
|
|
|
2019-07-12 16:45:35 +00:00
|
|
|
f.Read( sz );
|
2019-11-21 20:48:35 +00:00
|
|
|
m_data.frameImage.reserve_exact( sz, m_slab );
|
|
|
|
s_loadProgress.subTotal.store( sz, std::memory_order_relaxed );
|
|
|
|
if( sz != 0 )
|
2019-07-12 16:45:35 +00:00
|
|
|
{
|
2019-11-21 20:48:35 +00:00
|
|
|
struct JobData
|
|
|
|
{
|
|
|
|
enum State : int { InProgress, Available, DataReady };
|
|
|
|
FrameImage* fi;
|
|
|
|
char* buf = nullptr;
|
|
|
|
size_t bufsz = 0;
|
|
|
|
char* outbuf = nullptr;
|
|
|
|
size_t outsz = 0;
|
Compress frame images using zstd.
Memory usage and trace load times:
!comp 587 MB, 439 ms -> 541 MB, 523 ms (92%, 119%)
android-vk 197 MB, 136 ms -> 188 MB, 178 ms (95%, 130%)
big2 4463 MB, 2.93 s -> 4198 MB, 3.65 s (94%, 124%)
fi 483 MB, 346 ms -> 416 MB, 409 ms (86%, 118%)
fi-big 3307 MB, 3.15 s -> 2985 MB, 3.53 s (90%, 112%)
large 19.74 GB, 10.05 s -> 19.28 GB, 11.16 s (97%, 110%)
2020-02-09 20:22:12 +00:00
|
|
|
ZSTD_CCtx* ctx = ZSTD_createCCtx();
|
2019-12-31 13:46:01 +00:00
|
|
|
alignas(64) std::atomic<State> state = Available;
|
2019-11-21 20:48:35 +00:00
|
|
|
};
|
2019-07-12 16:45:35 +00:00
|
|
|
|
2019-11-21 20:48:35 +00:00
|
|
|
// Leave one thread for file reader, second thread for dispatch (this thread)
|
|
|
|
// Minimum 2 threads to have at least two buffers (one in use, second one filling up)
|
|
|
|
const auto jobs = std::max<int>( std::thread::hardware_concurrency() - 2, 2 );
|
|
|
|
auto td = std::make_unique<TaskDispatch>( jobs );
|
|
|
|
auto data = std::make_unique<JobData[]>( jobs );
|
2019-06-06 21:40:37 +00:00
|
|
|
|
2019-11-21 20:48:35 +00:00
|
|
|
for( uint64_t i=0; i<sz; i++ )
|
2019-06-06 21:08:19 +00:00
|
|
|
{
|
2019-11-21 20:48:35 +00:00
|
|
|
s_loadProgress.subProgress.store( i, std::memory_order_relaxed );
|
|
|
|
auto fi = m_slab.Alloc<FrameImage>();
|
|
|
|
f.Read3( fi->w, fi->h, fi->flip );
|
|
|
|
const auto sz = size_t( fi->w * fi->h / 2 );
|
2019-09-20 21:03:12 +00:00
|
|
|
|
2019-11-21 20:48:35 +00:00
|
|
|
int idx = -1;
|
|
|
|
for(;;)
|
|
|
|
{
|
|
|
|
for( int j=0; j<jobs; j++ )
|
2019-09-20 21:03:12 +00:00
|
|
|
{
|
2019-11-21 20:48:35 +00:00
|
|
|
const auto state = data[j].state.load( std::memory_order_acquire );
|
|
|
|
if( state != JobData::InProgress )
|
2019-09-20 21:03:12 +00:00
|
|
|
{
|
2019-11-21 20:48:35 +00:00
|
|
|
if( state == JobData::DataReady )
|
2019-09-20 21:03:12 +00:00
|
|
|
{
|
2019-11-21 20:48:35 +00:00
|
|
|
char* tmp = (char*)m_slab.AllocBig( data[j].fi->csz );
|
|
|
|
memcpy( tmp, data[j].outbuf, data[j].fi->csz );
|
|
|
|
data[j].fi->ptr = tmp;
|
2019-09-20 21:03:12 +00:00
|
|
|
}
|
2019-11-21 20:48:35 +00:00
|
|
|
idx = j;
|
|
|
|
break;
|
2019-09-20 21:03:12 +00:00
|
|
|
}
|
|
|
|
}
|
2019-11-21 20:48:35 +00:00
|
|
|
if( idx >= 0 ) break;
|
2019-12-31 13:59:54 +00:00
|
|
|
YieldThread();
|
2019-11-21 20:48:35 +00:00
|
|
|
}
|
2019-09-20 21:03:12 +00:00
|
|
|
|
2019-11-21 20:48:35 +00:00
|
|
|
if( data[idx].bufsz < sz )
|
|
|
|
{
|
|
|
|
data[idx].bufsz = sz;
|
|
|
|
delete[] data[idx].buf;
|
|
|
|
data[idx].buf = new char[sz];
|
|
|
|
}
|
|
|
|
f.Read( data[idx].buf, sz );
|
|
|
|
data[idx].fi = fi;
|
2019-09-20 21:03:12 +00:00
|
|
|
|
2019-11-21 20:48:35 +00:00
|
|
|
data[idx].state.store( JobData::InProgress, std::memory_order_release );
|
2022-04-26 19:29:16 +00:00
|
|
|
td->Queue( [this, &data, idx, fi, cdict] {
|
2021-05-15 15:02:25 +00:00
|
|
|
if( cdict )
|
|
|
|
{
|
|
|
|
fi->csz = m_texcomp.Pack( data[idx].ctx, cdict, data[idx].outbuf, data[idx].outsz, data[idx].buf, fi->w * fi->h / 2 );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
fi->csz = m_texcomp.Pack( data[idx].ctx, data[idx].outbuf, data[idx].outsz, data[idx].buf, fi->w * fi->h / 2 );
|
|
|
|
}
|
2019-11-21 20:48:35 +00:00
|
|
|
data[idx].state.store( JobData::DataReady, std::memory_order_release );
|
|
|
|
} );
|
2019-09-20 21:03:12 +00:00
|
|
|
|
2019-11-21 20:48:35 +00:00
|
|
|
m_data.frameImage[i] = fi;
|
|
|
|
}
|
|
|
|
td->Sync();
|
|
|
|
td.reset();
|
2020-03-01 00:48:20 +00:00
|
|
|
for( int i=0; i<jobs; i++ )
|
2019-11-21 20:48:35 +00:00
|
|
|
{
|
|
|
|
if( data[i].state.load( std::memory_order_acquire ) == JobData::DataReady )
|
2019-06-08 10:17:18 +00:00
|
|
|
{
|
2019-11-21 20:48:35 +00:00
|
|
|
char* tmp = (char*)m_slab.AllocBig( data[i].fi->csz );
|
|
|
|
memcpy( tmp, data[i].outbuf, data[i].fi->csz );
|
|
|
|
data[i].fi->ptr = tmp;
|
2019-06-08 10:17:18 +00:00
|
|
|
}
|
Compress frame images using zstd.
Memory usage and trace load times:
!comp 587 MB, 439 ms -> 541 MB, 523 ms (92%, 119%)
android-vk 197 MB, 136 ms -> 188 MB, 178 ms (95%, 130%)
big2 4463 MB, 2.93 s -> 4198 MB, 3.65 s (94%, 124%)
fi 483 MB, 346 ms -> 416 MB, 409 ms (86%, 118%)
fi-big 3307 MB, 3.15 s -> 2985 MB, 3.53 s (90%, 112%)
large 19.74 GB, 10.05 s -> 19.28 GB, 11.16 s (97%, 110%)
2020-02-09 20:22:12 +00:00
|
|
|
ZSTD_freeCCtx( data[i].ctx );
|
2019-11-21 20:48:35 +00:00
|
|
|
delete[] data[i].buf;
|
|
|
|
delete[] data[i].outbuf;
|
|
|
|
}
|
2019-06-11 22:55:02 +00:00
|
|
|
|
2019-11-21 20:48:35 +00:00
|
|
|
const auto& frames = GetFramesBase()->frames;
|
|
|
|
const auto fsz = uint32_t( frames.size() );
|
|
|
|
for( uint32_t i=0; i<fsz; i++ )
|
|
|
|
{
|
|
|
|
const auto& f = frames[i];
|
|
|
|
if( f.frameImage != -1 )
|
2019-06-11 22:55:02 +00:00
|
|
|
{
|
2019-11-21 20:48:35 +00:00
|
|
|
m_data.frameImage[f.frameImage]->frameRef = i;
|
2019-06-11 22:55:02 +00:00
|
|
|
}
|
|
|
|
}
|
2019-06-06 21:08:19 +00:00
|
|
|
}
|
2021-05-15 15:02:25 +00:00
|
|
|
|
|
|
|
ZSTD_freeCDict( cdict );
|
2019-11-21 20:48:35 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2021-05-15 15:02:25 +00:00
|
|
|
if( fileVer >= FileVersion( 0, 7, 8 ) )
|
|
|
|
{
|
|
|
|
uint32_t dsz;
|
|
|
|
f.Read( dsz );
|
|
|
|
f.Skip( dsz );
|
|
|
|
}
|
2019-11-21 20:48:35 +00:00
|
|
|
f.Read( sz );
|
|
|
|
s_loadProgress.subTotal.store( sz, std::memory_order_relaxed );
|
|
|
|
for( uint64_t i=0; i<sz; i++ )
|
2019-06-06 21:08:19 +00:00
|
|
|
{
|
2019-11-21 20:48:35 +00:00
|
|
|
s_loadProgress.subProgress.store( i, std::memory_order_relaxed );
|
|
|
|
uint16_t w, h;
|
|
|
|
f.Read2( w, h );
|
|
|
|
const auto fisz = w * h / 2;
|
|
|
|
f.Skip( fisz + sizeof( FrameImage::flip ) );
|
2019-06-06 21:08:19 +00:00
|
|
|
}
|
2020-07-17 20:16:47 +00:00
|
|
|
for( auto& v : m_data.framesBase->frames )
|
|
|
|
{
|
|
|
|
v.frameImage = -1;
|
|
|
|
}
|
2019-06-06 21:08:19 +00:00
|
|
|
}
|
|
|
|
|
2020-06-14 13:52:52 +00:00
|
|
|
s_loadProgress.subTotal.store( 0, std::memory_order_relaxed );
|
|
|
|
s_loadProgress.progress.store( LoadProgress::ContextSwitches, std::memory_order_relaxed );
|
2019-08-12 22:56:57 +00:00
|
|
|
|
2020-06-14 13:52:52 +00:00
|
|
|
if( eventMask & EventType::ContextSwitches )
|
|
|
|
{
|
|
|
|
f.Read( sz );
|
|
|
|
s_loadProgress.subTotal.store( sz, std::memory_order_relaxed );
|
|
|
|
m_data.ctxSwitch.reserve( sz );
|
|
|
|
for( uint64_t i=0; i<sz; i++ )
|
2019-08-12 22:56:57 +00:00
|
|
|
{
|
2020-06-14 13:52:52 +00:00
|
|
|
s_loadProgress.subProgress.store( i, std::memory_order_relaxed );
|
|
|
|
uint64_t thread, csz;
|
|
|
|
f.Read2( thread, csz );
|
|
|
|
auto data = m_slab.AllocInit<ContextSwitch>();
|
|
|
|
data->v.reserve_exact( csz, m_slab );
|
|
|
|
int64_t runningTime = 0;
|
|
|
|
int64_t refTime = 0;
|
|
|
|
auto ptr = data->v.data();
|
2021-11-06 19:22:38 +00:00
|
|
|
if( fileVer >= FileVersion( 0, 7, 12 ) )
|
2019-08-12 22:56:57 +00:00
|
|
|
{
|
2021-11-06 19:22:38 +00:00
|
|
|
for( uint64_t j=0; j<csz; j++ )
|
|
|
|
{
|
|
|
|
int64_t deltaWakeup, deltaStart, diff, thread;
|
|
|
|
uint8_t cpu;
|
|
|
|
int8_t reason, state;
|
|
|
|
f.Read7( deltaWakeup, deltaStart, diff, cpu, reason, state, thread );
|
|
|
|
refTime += deltaWakeup;
|
|
|
|
ptr->SetWakeup( refTime );
|
|
|
|
refTime += deltaStart;
|
|
|
|
ptr->SetStartCpu( refTime, cpu );
|
|
|
|
if( diff > 0 ) runningTime += diff;
|
|
|
|
refTime += diff;
|
|
|
|
ptr->SetEndReasonState( refTime, reason, state );
|
|
|
|
ptr->SetThread( CompressThread( thread ) );
|
|
|
|
ptr++;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
for( uint64_t j=0; j<csz; j++ )
|
|
|
|
{
|
|
|
|
int64_t deltaWakeup, deltaStart, diff;
|
|
|
|
uint8_t cpu;
|
|
|
|
int8_t reason, state;
|
|
|
|
f.Read6( deltaWakeup, deltaStart, diff, cpu, reason, state );
|
|
|
|
refTime += deltaWakeup;
|
|
|
|
ptr->SetWakeup( refTime );
|
|
|
|
refTime += deltaStart;
|
|
|
|
ptr->SetStartCpu( refTime, cpu );
|
|
|
|
if( diff > 0 ) runningTime += diff;
|
|
|
|
refTime += diff;
|
|
|
|
ptr->SetEndReasonState( refTime, reason, state );
|
|
|
|
ptr->SetThread( 0 );
|
|
|
|
ptr++;
|
|
|
|
}
|
2019-08-12 22:56:57 +00:00
|
|
|
}
|
2020-06-14 13:52:52 +00:00
|
|
|
data->runningTime = runningTime;
|
|
|
|
m_data.ctxSwitch.emplace( thread, data );
|
2019-08-12 22:56:57 +00:00
|
|
|
}
|
2020-06-14 13:52:52 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
f.Read( sz );
|
|
|
|
s_loadProgress.subTotal.store( sz, std::memory_order_relaxed );
|
|
|
|
for( uint64_t i=0; i<sz; i++ )
|
2019-08-12 22:56:57 +00:00
|
|
|
{
|
2020-06-14 13:52:52 +00:00
|
|
|
s_loadProgress.subProgress.store( i, std::memory_order_relaxed );
|
|
|
|
f.Skip( sizeof( uint64_t ) );
|
|
|
|
uint64_t csz;
|
|
|
|
f.Read( csz );
|
2021-11-06 19:22:38 +00:00
|
|
|
if( fileVer >= FileVersion( 0, 7, 12 ) )
|
|
|
|
{
|
|
|
|
f.Skip( csz * ( sizeof( int64_t ) * 4 + sizeof( int8_t ) * 3 ) );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
f.Skip( csz * ( sizeof( int64_t ) * 3 + sizeof( int8_t ) * 3 ) );
|
|
|
|
}
|
2019-08-12 22:56:57 +00:00
|
|
|
}
|
2020-06-14 13:52:52 +00:00
|
|
|
}
|
2019-08-12 22:56:57 +00:00
|
|
|
|
2020-06-14 13:52:52 +00:00
|
|
|
s_loadProgress.subTotal.store( 0, std::memory_order_relaxed );
|
|
|
|
s_loadProgress.progress.store( LoadProgress::ContextSwitchesPerCpu, std::memory_order_relaxed );
|
|
|
|
f.Read( sz );
|
|
|
|
s_loadProgress.subTotal.store( sz, std::memory_order_relaxed );
|
|
|
|
if( eventMask & EventType::ContextSwitches )
|
|
|
|
{
|
|
|
|
uint64_t cnt = 0;
|
|
|
|
for( int i=0; i<256; i++ )
|
2019-08-16 14:51:02 +00:00
|
|
|
{
|
2020-06-14 13:52:52 +00:00
|
|
|
int64_t refTime = 0;
|
|
|
|
f.Read( sz );
|
|
|
|
if( sz != 0 )
|
2019-08-16 14:51:02 +00:00
|
|
|
{
|
2020-06-14 13:52:52 +00:00
|
|
|
m_data.cpuDataCount = i+1;
|
|
|
|
m_data.cpuData[i].cs.reserve_exact( sz, m_slab );
|
|
|
|
auto ptr = m_data.cpuData[i].cs.data();
|
|
|
|
for( uint64_t j=0; j<sz; j++ )
|
2019-08-16 14:51:02 +00:00
|
|
|
{
|
2020-06-14 13:52:52 +00:00
|
|
|
int64_t deltaStart, deltaEnd;
|
|
|
|
uint16_t thread;
|
|
|
|
f.Read3( deltaStart, deltaEnd, thread );
|
|
|
|
refTime += deltaStart;
|
|
|
|
ptr->SetStartThread( refTime, thread );
|
|
|
|
refTime += deltaEnd;
|
|
|
|
ptr->SetEnd( refTime );
|
|
|
|
ptr++;
|
2019-08-16 14:51:02 +00:00
|
|
|
}
|
2020-06-14 13:52:52 +00:00
|
|
|
cnt += sz;
|
2019-08-16 14:51:02 +00:00
|
|
|
}
|
2020-06-14 13:52:52 +00:00
|
|
|
s_loadProgress.subProgress.store( cnt, std::memory_order_relaxed );
|
2019-08-16 14:51:02 +00:00
|
|
|
}
|
2020-06-14 13:52:52 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
for( int i=0; i<256; i++ )
|
2019-08-16 14:51:02 +00:00
|
|
|
{
|
2020-06-14 13:52:52 +00:00
|
|
|
f.Read( sz );
|
|
|
|
f.Skip( sz * ( sizeof( int64_t ) * 2 + sizeof( uint16_t ) ) );
|
2019-08-16 14:51:02 +00:00
|
|
|
}
|
2020-06-14 13:52:52 +00:00
|
|
|
}
|
2019-08-16 14:51:02 +00:00
|
|
|
|
2020-06-14 13:52:52 +00:00
|
|
|
f.Read( sz );
|
|
|
|
for( uint64_t i=0; i<sz; i++ )
|
|
|
|
{
|
|
|
|
uint64_t tid, pid;
|
|
|
|
f.Read2( tid, pid );
|
|
|
|
m_data.tidToPid.emplace( tid, pid );
|
|
|
|
}
|
2019-08-17 23:53:38 +00:00
|
|
|
|
2020-06-14 13:52:52 +00:00
|
|
|
f.Read( sz );
|
|
|
|
for( uint64_t i=0; i<sz; i++ )
|
|
|
|
{
|
|
|
|
uint64_t tid;
|
|
|
|
CpuThreadData data;
|
|
|
|
f.Read2( tid, data );
|
|
|
|
m_data.cpuThreadData.emplace( tid, data );
|
2019-08-17 20:36:21 +00:00
|
|
|
}
|
|
|
|
|
2022-04-18 11:59:48 +00:00
|
|
|
f.Read( sz );
|
|
|
|
m_data.symbolLoc.reserve_exact( sz, m_slab );
|
|
|
|
f.Read( sz );
|
|
|
|
if( fileVer < FileVersion( 0, 7, 2 ) )
|
|
|
|
{
|
|
|
|
m_data.symbolLocInline.reserve_exact( sz + 1, m_slab );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
m_data.symbolLocInline.reserve_exact( sz, m_slab );
|
|
|
|
}
|
|
|
|
f.Read( sz );
|
|
|
|
m_data.symbolMap.reserve( sz );
|
|
|
|
int symIdx = 0;
|
|
|
|
int symInlineIdx = 0;
|
|
|
|
for( uint64_t i=0; i<sz; i++ )
|
2020-03-25 17:37:08 +00:00
|
|
|
{
|
2022-04-18 11:59:48 +00:00
|
|
|
uint64_t symAddr;
|
|
|
|
StringIdx name, file, imageName, callFile;
|
|
|
|
uint32_t line, callLine;
|
|
|
|
uint8_t isInline;
|
|
|
|
Int24 size;
|
|
|
|
f.Read9( symAddr, name, file, line, imageName, callFile, callLine, isInline, size );
|
2022-06-16 13:29:39 +00:00
|
|
|
m_data.symbolMap.emplace( symAddr, SymbolData { { name, file, line }, imageName, callFile, callLine, isInline, size } );
|
2022-04-18 11:59:48 +00:00
|
|
|
if( isInline )
|
2020-03-25 17:37:08 +00:00
|
|
|
{
|
2022-04-18 11:59:48 +00:00
|
|
|
m_data.symbolLocInline[symInlineIdx++] = symAddr;
|
2020-04-08 10:49:58 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2022-04-18 11:59:48 +00:00
|
|
|
m_data.symbolLoc[symIdx++] = SymbolLocation { symAddr, size.Val() };
|
2020-03-25 17:37:08 +00:00
|
|
|
}
|
|
|
|
}
|
2022-04-18 11:59:48 +00:00
|
|
|
if( fileVer < FileVersion( 0, 7, 2 ) )
|
2020-02-26 21:53:18 +00:00
|
|
|
{
|
2022-04-18 11:59:48 +00:00
|
|
|
m_data.symbolLocInline[symInlineIdx] = std::numeric_limits<uint64_t>::max();
|
|
|
|
}
|
|
|
|
|
|
|
|
f.Read( sz );
|
|
|
|
if( eventMask & EventType::SymbolCode )
|
|
|
|
{
|
|
|
|
uint64_t ssz = 0;
|
|
|
|
m_data.symbolCode.reserve( sz );
|
2020-02-26 21:53:18 +00:00
|
|
|
for( uint64_t i=0; i<sz; i++ )
|
|
|
|
{
|
|
|
|
uint64_t symAddr;
|
2022-04-18 11:59:48 +00:00
|
|
|
uint32_t len;
|
|
|
|
f.Read2( symAddr, len );
|
|
|
|
ssz += len;
|
|
|
|
auto ptr = (char*)m_slab.AllocBig( len );
|
|
|
|
f.Read( ptr, len );
|
|
|
|
m_data.symbolCode.emplace( symAddr, MemoryBlock { ptr, len } );
|
2020-02-26 21:53:18 +00:00
|
|
|
}
|
2022-04-18 11:59:48 +00:00
|
|
|
m_data.symbolCodeSize = ssz;
|
2020-02-26 21:53:18 +00:00
|
|
|
}
|
2022-04-18 11:59:48 +00:00
|
|
|
else
|
2020-03-25 19:52:59 +00:00
|
|
|
{
|
2022-04-18 11:59:48 +00:00
|
|
|
for( uint64_t i=0; i<sz; i++ )
|
2020-03-25 19:52:59 +00:00
|
|
|
{
|
2022-04-18 11:59:48 +00:00
|
|
|
uint64_t symAddr;
|
|
|
|
uint32_t len;
|
|
|
|
f.Read2( symAddr, len );
|
|
|
|
f.Skip( len );
|
2020-03-25 19:52:59 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-04-18 11:59:48 +00:00
|
|
|
f.Read( sz );
|
|
|
|
if( eventMask & EventType::SymbolCode )
|
2020-04-02 10:12:10 +00:00
|
|
|
{
|
2022-04-18 11:59:48 +00:00
|
|
|
m_data.locationCodeAddressList.reserve( sz );
|
|
|
|
for( uint64_t i=0; i<sz; i++ )
|
2020-04-02 10:12:10 +00:00
|
|
|
{
|
2022-04-18 11:59:48 +00:00
|
|
|
uint64_t packed;
|
|
|
|
uint16_t lsz;
|
|
|
|
f.Read2( packed, lsz );
|
|
|
|
Vector<uint64_t> data;
|
|
|
|
data.reserve_exact( lsz, m_slab );
|
|
|
|
uint64_t ref = 0;
|
|
|
|
for( uint16_t j=0; j<lsz; j++ )
|
2020-04-02 10:12:10 +00:00
|
|
|
{
|
2022-04-18 11:59:48 +00:00
|
|
|
uint64_t diff;
|
|
|
|
f.Read( diff );
|
|
|
|
ref += diff;
|
|
|
|
data[j] = ref;
|
|
|
|
m_data.codeAddressToLocation.emplace( ref, packed );
|
2020-04-02 10:12:10 +00:00
|
|
|
}
|
2022-04-18 11:59:48 +00:00
|
|
|
m_data.locationCodeAddressList.emplace( packed, std::move( data ) );
|
2020-04-02 10:12:10 +00:00
|
|
|
}
|
2022-04-18 11:59:48 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
for( uint64_t i=0; i<sz; i++ )
|
2020-04-02 10:12:10 +00:00
|
|
|
{
|
2022-04-18 11:59:48 +00:00
|
|
|
uint64_t packed;
|
|
|
|
uint16_t lsz;
|
|
|
|
f.Read2( packed, lsz );
|
|
|
|
f.Skip( lsz * sizeof( uint64_t ) );
|
2020-04-02 10:12:10 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-05-20 17:37:51 +00:00
|
|
|
if( fileVer >= FileVersion( 0, 7, 9 ) )
|
|
|
|
{
|
2021-06-19 17:58:16 +00:00
|
|
|
f.Read( sz );
|
|
|
|
m_data.codeSymbolMap.reserve( sz );
|
|
|
|
for( uint64_t i=0; i<sz; i++ )
|
|
|
|
{
|
|
|
|
uint64_t v1, v2;
|
|
|
|
f.Read2( v1, v2 );
|
|
|
|
m_data.codeSymbolMap.emplace( v1, v2 );
|
|
|
|
}
|
|
|
|
|
2021-05-20 17:37:51 +00:00
|
|
|
f.Read( sz );
|
|
|
|
m_data.hwSamples.reserve( sz );
|
|
|
|
for( uint64_t i=0; i<sz; i++ )
|
|
|
|
{
|
|
|
|
uint64_t addr;
|
2021-06-04 11:38:45 +00:00
|
|
|
f.Read( addr );
|
|
|
|
auto& data = m_data.hwSamples.emplace( addr, HwSampleData {} ).first->second;
|
|
|
|
ReadHwSampleVec( f, data.cycles, m_slab );
|
|
|
|
ReadHwSampleVec( f, data.retired, m_slab );
|
|
|
|
ReadHwSampleVec( f, data.cacheRef, m_slab );
|
|
|
|
ReadHwSampleVec( f, data.cacheMiss, m_slab );
|
2022-04-01 16:46:46 +00:00
|
|
|
if( ReadHwSampleVec( f, data.branchRetired, m_slab ) != 0 ) m_data.hasBranchRetirement = true;
|
2021-06-04 11:38:45 +00:00
|
|
|
ReadHwSampleVec( f, data.branchMiss, m_slab );
|
2021-05-20 17:37:51 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-04-18 11:59:48 +00:00
|
|
|
f.Read( sz );
|
|
|
|
if( eventMask & EventType::SourceCache )
|
2020-05-23 13:43:42 +00:00
|
|
|
{
|
2022-04-18 11:59:48 +00:00
|
|
|
m_data.sourceFileCache.reserve( sz );
|
|
|
|
for( uint64_t i=0; i<sz; i++ )
|
2020-05-23 13:43:42 +00:00
|
|
|
{
|
2022-04-18 11:59:48 +00:00
|
|
|
uint32_t len;
|
|
|
|
f.Read( len );
|
|
|
|
auto key = m_slab.Alloc<char>( len+1 );
|
|
|
|
f.Read( key, len );
|
|
|
|
key[len] = '\0';
|
|
|
|
f.Read( len );
|
|
|
|
auto data = (char*)m_slab.AllocBig( len );
|
|
|
|
f.Read( data, len );
|
|
|
|
m_data.sourceFileCache.emplace( key, MemoryBlock { data, len } );
|
2020-05-23 13:43:42 +00:00
|
|
|
}
|
2022-04-18 11:59:48 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
for( uint64_t i=0; i<sz; i++ )
|
2020-05-23 13:43:42 +00:00
|
|
|
{
|
2022-04-18 11:59:48 +00:00
|
|
|
uint32_t s32;
|
|
|
|
f.Read( s32 );
|
|
|
|
f.Skip( s32 );
|
|
|
|
f.Read( s32 );
|
|
|
|
f.Skip( s32 );
|
2020-05-23 13:43:42 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-14 00:17:37 +00:00
|
|
|
s_loadProgress.total.store( 0, std::memory_order_relaxed );
|
|
|
|
m_loadTime = std::chrono::duration_cast<std::chrono::nanoseconds>( std::chrono::high_resolution_clock::now() - loadStart ).count();
|
|
|
|
|
2019-09-29 18:52:25 +00:00
|
|
|
if( !bgTasks )
|
|
|
|
{
|
|
|
|
m_backgroundDone.store( true, std::memory_order_relaxed );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
m_backgroundDone.store( false, std::memory_order_relaxed );
|
2019-02-14 00:17:37 +00:00
|
|
|
#ifndef TRACY_NO_STATISTICS
|
2021-11-13 02:23:43 +00:00
|
|
|
if( fileVer < FileVersion( 0, 7, 13 ) )
|
|
|
|
{
|
|
|
|
for( auto& t : m_data.threads )
|
|
|
|
{
|
|
|
|
pdqsort_branchless( t->samples.begin(), t->samples.end(), [] ( const auto& lhs, const auto& rhs ) { return lhs.time.Val() < rhs.time.Val(); } );
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-09-25 14:39:00 +00:00
|
|
|
m_threadBackground = std::thread( [this, eventMask] {
|
2020-03-18 01:02:37 +00:00
|
|
|
std::vector<std::thread> jobs;
|
|
|
|
|
2021-11-06 19:49:15 +00:00
|
|
|
if( !m_data.ctxSwitch.empty() && m_data.cpuDataCount != 0 )
|
2020-02-10 21:29:54 +00:00
|
|
|
{
|
2020-03-18 01:02:37 +00:00
|
|
|
jobs.emplace_back( std::thread( [this] { ReconstructContextSwitchUsage(); } ) );
|
2020-02-10 21:29:54 +00:00
|
|
|
}
|
|
|
|
|
2020-09-25 14:39:00 +00:00
|
|
|
for( auto& mem : m_data.memNameMap )
|
2020-02-10 21:29:54 +00:00
|
|
|
{
|
2020-09-25 14:39:00 +00:00
|
|
|
if( mem.second->reconstruct ) jobs.emplace_back( std::thread( [this, mem = mem.second] { ReconstructMemAllocPlot( *mem ); } ) );
|
2020-02-10 21:29:54 +00:00
|
|
|
}
|
|
|
|
|
2021-11-14 15:42:11 +00:00
|
|
|
std::function<void(uint8_t*, Vector<short_ptr<ZoneEvent>>&, uint16_t)> ProcessTimeline;
|
|
|
|
ProcessTimeline = [this, &ProcessTimeline] ( uint8_t* countMap, Vector<short_ptr<ZoneEvent>>& _vec, uint16_t thread )
|
2019-03-13 00:46:05 +00:00
|
|
|
{
|
2019-10-31 20:41:21 +00:00
|
|
|
if( m_shutdown.load( std::memory_order_relaxed ) ) return;
|
2019-11-09 22:39:28 +00:00
|
|
|
assert( _vec.is_magic() );
|
|
|
|
auto& vec = *(Vector<ZoneEvent>*)( &_vec );
|
2019-09-29 18:52:25 +00:00
|
|
|
for( auto& zone : vec )
|
2019-03-13 00:46:05 +00:00
|
|
|
{
|
2021-06-05 18:13:57 +00:00
|
|
|
if( zone.IsEndValid() ) ReconstructZoneStatistics( countMap, zone, thread );
|
|
|
|
if( zone.HasChildren() )
|
|
|
|
{
|
2021-11-14 15:42:11 +00:00
|
|
|
countMap[uint16_t(zone.SrcLoc())]++;
|
2021-06-05 18:13:57 +00:00
|
|
|
ProcessTimeline( countMap, GetZoneChildrenMutable( zone.Child() ), thread );
|
2021-11-14 15:42:11 +00:00
|
|
|
countMap[uint16_t(zone.SrcLoc())]--;
|
2021-06-05 18:13:57 +00:00
|
|
|
}
|
2019-03-13 00:46:05 +00:00
|
|
|
}
|
2019-09-29 18:52:25 +00:00
|
|
|
};
|
2019-03-13 00:46:05 +00:00
|
|
|
|
2020-03-18 01:02:37 +00:00
|
|
|
jobs.emplace_back( std::thread( [this, ProcessTimeline] {
|
|
|
|
for( auto& t : m_data.threads )
|
2019-09-29 18:52:25 +00:00
|
|
|
{
|
2020-03-18 01:02:37 +00:00
|
|
|
if( m_shutdown.load( std::memory_order_relaxed ) ) return;
|
|
|
|
if( !t->timeline.empty() )
|
|
|
|
{
|
2021-11-14 15:42:11 +00:00
|
|
|
uint8_t countMap[64*1024];
|
2020-03-18 01:02:37 +00:00
|
|
|
// Don't touch thread compression cache in a thread.
|
2021-06-05 18:13:57 +00:00
|
|
|
ProcessTimeline( countMap, t->timeline, m_data.localThreadCompress.DecompressMustRaw( t->id ) );
|
2020-03-18 01:02:37 +00:00
|
|
|
}
|
2019-09-29 18:52:25 +00:00
|
|
|
}
|
2021-11-14 15:05:55 +00:00
|
|
|
std::lock_guard<std::mutex> lock( m_data.lock );
|
|
|
|
m_data.sourceLocationZonesReady = true;
|
2020-03-18 01:02:37 +00:00
|
|
|
} ) );
|
2020-02-10 21:29:54 +00:00
|
|
|
|
2022-01-28 23:48:01 +00:00
|
|
|
std::function<void(Vector<short_ptr<GpuEvent>>&, uint16_t)> ProcessTimelineGpu;
|
|
|
|
ProcessTimelineGpu = [this, &ProcessTimelineGpu] ( Vector<short_ptr<GpuEvent>>& _vec, uint16_t thread )
|
|
|
|
{
|
|
|
|
if( m_shutdown.load( std::memory_order_relaxed ) ) return;
|
|
|
|
assert( _vec.is_magic() );
|
|
|
|
auto& vec = *(Vector<GpuEvent>*)( &_vec );
|
|
|
|
for( auto& zone : vec )
|
|
|
|
{
|
|
|
|
if( zone.GpuEnd() >= 0 ) ReconstructZoneStatistics( zone, thread );
|
|
|
|
if( zone.Child() >= 0 )
|
|
|
|
{
|
|
|
|
ProcessTimelineGpu( GetGpuChildrenMutable( zone.Child() ), thread );
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
jobs.emplace_back( std::thread( [this, ProcessTimelineGpu] {
|
|
|
|
for( auto& t : m_data.gpuData )
|
|
|
|
{
|
|
|
|
for( auto& td : t->threadData )
|
|
|
|
{
|
|
|
|
if( m_shutdown.load( std::memory_order_relaxed ) ) return;
|
|
|
|
if( !td.second.timeline.empty() )
|
|
|
|
{
|
|
|
|
ProcessTimelineGpu( td.second.timeline, td.first );
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
std::lock_guard<std::mutex> lock( m_data.lock );
|
|
|
|
m_data.gpuSourceLocationZonesReady = true;
|
|
|
|
} ) );
|
|
|
|
|
2020-02-27 00:22:36 +00:00
|
|
|
if( eventMask & EventType::Samples )
|
|
|
|
{
|
2020-03-18 01:02:37 +00:00
|
|
|
jobs.emplace_back( std::thread( [this] {
|
|
|
|
unordered_flat_map<uint32_t, uint32_t> counts;
|
|
|
|
uint32_t total = 0;
|
|
|
|
for( auto& t : m_data.threads ) total += t->samples.size();
|
|
|
|
if( total != 0 )
|
2020-02-27 00:22:36 +00:00
|
|
|
{
|
2020-03-18 01:02:37 +00:00
|
|
|
for( auto& t : m_data.threads )
|
2020-02-27 00:22:36 +00:00
|
|
|
{
|
2020-03-18 01:02:37 +00:00
|
|
|
if( m_shutdown.load( std::memory_order_relaxed ) ) return;
|
2021-12-21 14:24:11 +00:00
|
|
|
auto cit = t->ctxSwitchSamples.begin();
|
2020-03-18 01:02:37 +00:00
|
|
|
for( auto& sd : t->samples )
|
2020-02-27 00:22:36 +00:00
|
|
|
{
|
2021-11-13 02:15:20 +00:00
|
|
|
bool isCtxSwitch = false;
|
2021-12-21 14:24:11 +00:00
|
|
|
if( cit != t->ctxSwitchSamples.end() )
|
2020-03-18 01:02:37 +00:00
|
|
|
{
|
2021-12-21 14:24:11 +00:00
|
|
|
const auto sdt = sd.time.Val();
|
|
|
|
cit = std::lower_bound( cit, t->ctxSwitchSamples.end(), sdt, []( const auto& l, const auto& r ) { return (uint64_t)l.time.Val() < (uint64_t)r; } );
|
|
|
|
isCtxSwitch = cit != t->ctxSwitchSamples.end() && cit->time.Val() == sdt;
|
2020-03-18 01:02:37 +00:00
|
|
|
}
|
2021-11-13 02:15:20 +00:00
|
|
|
if( !isCtxSwitch )
|
2020-03-25 00:55:43 +00:00
|
|
|
{
|
2021-11-13 02:15:20 +00:00
|
|
|
const auto cs = sd.callstack.Val();
|
|
|
|
auto it = counts.find( cs );
|
|
|
|
if( it == counts.end() )
|
2020-03-25 00:55:43 +00:00
|
|
|
{
|
2021-11-13 02:15:20 +00:00
|
|
|
counts.emplace( cs, 1 );
|
2020-03-25 00:55:43 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2021-11-13 02:15:20 +00:00
|
|
|
it->second++;
|
|
|
|
}
|
|
|
|
|
|
|
|
const auto& callstack = GetCallstack( cs );
|
|
|
|
auto& ip = callstack[0];
|
|
|
|
auto frame = GetCallstackFrame( ip );
|
|
|
|
if( frame )
|
|
|
|
{
|
|
|
|
const auto symAddr = frame->data[0].symAddr;
|
|
|
|
auto it = m_data.instructionPointersMap.find( symAddr );
|
|
|
|
if( it == m_data.instructionPointersMap.end() )
|
2020-03-25 00:55:43 +00:00
|
|
|
{
|
2021-11-13 02:15:20 +00:00
|
|
|
m_data.instructionPointersMap.emplace( symAddr, unordered_flat_map<CallstackFrameId, uint32_t, CallstackFrameIdHash, CallstackFrameIdCompare> { { ip, 1 } } );
|
2020-03-25 00:55:43 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2021-11-13 02:15:20 +00:00
|
|
|
auto fit = it->second.find( ip );
|
|
|
|
if( fit == it->second.end() )
|
|
|
|
{
|
|
|
|
it->second.emplace( ip, 1 );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
fit->second++;
|
|
|
|
}
|
2020-03-25 00:55:43 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2020-02-27 00:22:36 +00:00
|
|
|
}
|
|
|
|
}
|
2020-03-18 01:02:37 +00:00
|
|
|
for( auto& v : counts ) UpdateSampleStatistics( v.first, v.second, false );
|
2020-02-27 00:22:36 +00:00
|
|
|
}
|
2021-02-07 17:11:24 +00:00
|
|
|
std::lock_guard<std::mutex> lock( m_data.lock );
|
2020-03-10 20:06:38 +00:00
|
|
|
m_data.callstackSamplesReady = true;
|
2020-03-18 01:02:37 +00:00
|
|
|
} ) );
|
2020-03-10 20:06:38 +00:00
|
|
|
|
2020-03-18 01:02:37 +00:00
|
|
|
jobs.emplace_back( std::thread( [this] {
|
|
|
|
uint32_t gcnt = 0;
|
|
|
|
for( auto& t : m_data.threads )
|
2020-03-10 20:06:38 +00:00
|
|
|
{
|
2020-03-18 01:02:37 +00:00
|
|
|
if( m_shutdown.load( std::memory_order_relaxed ) ) return;
|
2021-12-02 01:06:05 +00:00
|
|
|
if( !t->samples.empty() )
|
2020-03-10 20:06:38 +00:00
|
|
|
{
|
2021-12-02 01:06:05 +00:00
|
|
|
if( t->samples[0].time.Val() != 0 )
|
|
|
|
{
|
|
|
|
for( auto& sd : t->samples )
|
|
|
|
{
|
|
|
|
gcnt += AddGhostZone( GetCallstack( sd.callstack.Val() ), &t->ghostZones, sd.time.Val() );
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
for( auto& sd : t->samples )
|
|
|
|
{
|
|
|
|
const auto st = sd.time.Val();
|
|
|
|
if( st != 0 ) gcnt += AddGhostZone( GetCallstack( sd.callstack.Val() ), &t->ghostZones, st );
|
|
|
|
}
|
|
|
|
}
|
2020-03-10 20:06:38 +00:00
|
|
|
}
|
|
|
|
}
|
2021-02-07 17:11:24 +00:00
|
|
|
std::lock_guard<std::mutex> lock( m_data.lock );
|
2020-03-18 01:02:37 +00:00
|
|
|
m_data.ghostZonesReady = true;
|
|
|
|
m_data.ghostCnt = gcnt;
|
|
|
|
} ) );
|
2020-08-07 17:05:04 +00:00
|
|
|
|
|
|
|
jobs.emplace_back( std::thread( [this] {
|
|
|
|
for( auto& t : m_data.threads )
|
|
|
|
{
|
2021-09-04 14:11:41 +00:00
|
|
|
uint16_t tid = CompressThread( t->id );
|
2020-08-07 17:05:04 +00:00
|
|
|
for( auto& v : t->samples )
|
|
|
|
{
|
|
|
|
const auto& time = v.time;
|
|
|
|
const auto cs = v.callstack.Val();
|
|
|
|
const auto& callstack = GetCallstack( cs );
|
|
|
|
auto& ip = callstack[0];
|
|
|
|
auto frame = GetCallstackFrame( ip );
|
|
|
|
if( frame )
|
|
|
|
{
|
|
|
|
const auto symAddr = frame->data[0].symAddr;
|
|
|
|
auto it = m_data.symbolSamples.find( symAddr );
|
|
|
|
if( it == m_data.symbolSamples.end() )
|
|
|
|
{
|
2021-09-04 14:11:41 +00:00
|
|
|
m_data.symbolSamples.emplace( symAddr, Vector<SampleDataRange>( SampleDataRange { time, tid, ip } ) );
|
2020-08-07 17:05:04 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2021-09-04 14:11:41 +00:00
|
|
|
it->second.push_back_non_empty( SampleDataRange { time, tid, ip } );
|
2020-08-07 17:05:04 +00:00
|
|
|
}
|
|
|
|
}
|
2021-11-27 01:03:59 +00:00
|
|
|
auto childAddr = GetCanonicalPointer( callstack[0] );
|
2021-04-09 18:35:17 +00:00
|
|
|
for( uint16_t i=1; i<callstack.size(); i++ )
|
|
|
|
{
|
2021-04-18 18:35:59 +00:00
|
|
|
auto addr = GetCanonicalPointer( callstack[i] );
|
|
|
|
auto it = m_data.childSamples.find( addr );
|
2021-04-09 18:35:17 +00:00
|
|
|
if( it == m_data.childSamples.end() )
|
|
|
|
{
|
2021-11-27 01:03:59 +00:00
|
|
|
m_data.childSamples.emplace( addr, Vector<ChildSample>( ChildSample { time, childAddr } ) );
|
2021-04-09 18:35:17 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2021-11-27 01:03:59 +00:00
|
|
|
it->second.push_back_non_empty( ChildSample { time, childAddr } );
|
2021-04-09 18:35:17 +00:00
|
|
|
}
|
2021-11-27 01:03:59 +00:00
|
|
|
childAddr = addr;
|
2021-04-09 18:35:17 +00:00
|
|
|
}
|
2020-08-07 17:05:04 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
for( auto& v : m_data.symbolSamples )
|
|
|
|
{
|
2020-08-10 12:52:51 +00:00
|
|
|
pdqsort_branchless( v.second.begin(), v.second.end(), []( const auto& lhs, const auto& rhs ) { return lhs.time.Val() < rhs.time.Val(); } );
|
2020-08-07 17:05:04 +00:00
|
|
|
}
|
2021-04-09 18:35:17 +00:00
|
|
|
for( auto& v : m_data.childSamples )
|
|
|
|
{
|
2021-11-27 01:03:59 +00:00
|
|
|
pdqsort_branchless( v.second.begin(), v.second.end(), []( const auto& lhs, const auto& rhs ) { return lhs.time.Val() < rhs.time.Val(); } );
|
2021-04-09 18:35:17 +00:00
|
|
|
}
|
2021-02-07 17:11:24 +00:00
|
|
|
std::lock_guard<std::mutex> lock( m_data.lock );
|
2020-08-07 17:05:04 +00:00
|
|
|
m_data.symbolSamplesReady = true;
|
|
|
|
} ) );
|
2020-03-18 01:02:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
for( auto& job : jobs ) job.join();
|
2019-09-29 18:52:25 +00:00
|
|
|
m_backgroundDone.store( true, std::memory_order_relaxed );
|
|
|
|
} );
|
|
|
|
#else
|
2019-11-10 23:19:38 +00:00
|
|
|
m_backgroundDone.store( true, std::memory_order_relaxed );
|
2019-02-14 00:17:37 +00:00
|
|
|
#endif
|
2019-09-29 18:52:25 +00:00
|
|
|
}
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
Worker::~Worker()
|
|
|
|
{
|
2018-02-15 12:19:20 +00:00
|
|
|
Shutdown();
|
2018-04-29 11:40:04 +00:00
|
|
|
|
2019-10-28 21:45:10 +00:00
|
|
|
if( m_threadNet.joinable() ) m_threadNet.join();
|
2018-04-29 11:40:04 +00:00
|
|
|
if( m_thread.joinable() ) m_thread.join();
|
2019-02-14 00:17:37 +00:00
|
|
|
if( m_threadBackground.joinable() ) m_threadBackground.join();
|
2018-04-29 11:40:04 +00:00
|
|
|
|
2018-04-21 18:11:59 +00:00
|
|
|
delete[] m_buffer;
|
2019-11-06 00:25:38 +00:00
|
|
|
LZ4_freeStreamDecode( (LZ4_streamDecode_t*)m_stream );
|
2018-04-21 18:12:16 +00:00
|
|
|
|
2019-06-08 10:17:18 +00:00
|
|
|
delete[] m_frameImageBuffer;
|
2020-05-24 14:17:54 +00:00
|
|
|
delete[] m_tmpBuf;
|
2019-06-08 10:17:18 +00:00
|
|
|
|
2018-04-21 18:34:29 +00:00
|
|
|
for( auto& v : m_data.threads )
|
|
|
|
{
|
2018-07-22 19:01:45 +00:00
|
|
|
v->timeline.~Vector();
|
|
|
|
v->stack.~Vector();
|
2018-04-21 18:36:33 +00:00
|
|
|
v->messages.~Vector();
|
2019-06-09 15:56:41 +00:00
|
|
|
v->zoneIdStack.~Vector();
|
2020-03-10 20:46:24 +00:00
|
|
|
v->samples.~Vector();
|
2019-10-26 21:11:48 +00:00
|
|
|
#ifndef TRACY_NO_STATISTICS
|
|
|
|
v->childTimeStack.~Vector();
|
2020-03-10 20:06:38 +00:00
|
|
|
v->ghostZones.~Vector();
|
2019-10-26 21:11:48 +00:00
|
|
|
#endif
|
2018-04-21 18:34:29 +00:00
|
|
|
}
|
2018-07-22 19:01:45 +00:00
|
|
|
for( auto& v : m_data.gpuData )
|
|
|
|
{
|
2019-09-23 15:27:49 +00:00
|
|
|
for( auto& vt : v->threadData )
|
|
|
|
{
|
|
|
|
vt.second.timeline.~Vector();
|
|
|
|
vt.second.stack.~Vector();
|
|
|
|
}
|
2018-07-22 19:01:45 +00:00
|
|
|
}
|
2018-08-04 14:33:03 +00:00
|
|
|
for( auto& v : m_data.plots.Data() )
|
2018-04-21 18:12:16 +00:00
|
|
|
{
|
|
|
|
v->~PlotData();
|
|
|
|
}
|
2018-08-04 17:47:09 +00:00
|
|
|
for( auto& v : m_data.frames.Data() )
|
|
|
|
{
|
|
|
|
v->~FrameData();
|
|
|
|
}
|
2019-03-16 01:09:50 +00:00
|
|
|
for( auto& v : m_data.lockMap )
|
|
|
|
{
|
|
|
|
v.second->~LockMap();
|
|
|
|
}
|
2020-04-14 00:11:02 +00:00
|
|
|
for( auto& v : m_data.zoneChildren )
|
|
|
|
{
|
|
|
|
v.~Vector();
|
|
|
|
}
|
2020-04-14 00:34:28 +00:00
|
|
|
for( auto& v : m_data.ctxSwitch )
|
|
|
|
{
|
|
|
|
v.second->v.~Vector();
|
|
|
|
}
|
2020-04-14 00:11:02 +00:00
|
|
|
for( auto& v : m_data.gpuChildren )
|
|
|
|
{
|
|
|
|
v.~Vector();
|
|
|
|
}
|
2020-03-10 20:06:38 +00:00
|
|
|
#ifndef TRACY_NO_STATISTICS
|
|
|
|
for( auto& v : m_data.ghostChildren )
|
|
|
|
{
|
|
|
|
v.~Vector();
|
|
|
|
}
|
|
|
|
#endif
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
2018-08-08 17:21:53 +00:00
|
|
|
uint64_t Worker::GetLockCount() const
|
|
|
|
{
|
|
|
|
uint64_t cnt = 0;
|
|
|
|
for( auto& l : m_data.lockMap )
|
|
|
|
{
|
2019-03-16 01:09:50 +00:00
|
|
|
cnt += l.second->timeline.size();
|
2018-08-08 17:21:53 +00:00
|
|
|
}
|
|
|
|
return cnt;
|
|
|
|
}
|
|
|
|
|
|
|
|
uint64_t Worker::GetPlotCount() const
|
|
|
|
{
|
|
|
|
uint64_t cnt = 0;
|
|
|
|
for( auto& p : m_data.plots.Data() )
|
|
|
|
{
|
2020-02-05 22:41:53 +00:00
|
|
|
if( p->type == PlotType::User )
|
|
|
|
{
|
|
|
|
cnt += p->data.size();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return cnt;
|
|
|
|
}
|
|
|
|
|
|
|
|
uint64_t Worker::GetTracyPlotCount() const
|
|
|
|
{
|
|
|
|
uint64_t cnt = 0;
|
|
|
|
for( auto& p : m_data.plots.Data() )
|
|
|
|
{
|
|
|
|
if( p->type != PlotType::User )
|
2018-08-08 17:21:53 +00:00
|
|
|
{
|
|
|
|
cnt += p->data.size();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return cnt;
|
|
|
|
}
|
|
|
|
|
2019-08-12 22:29:09 +00:00
|
|
|
uint64_t Worker::GetContextSwitchCount() const
|
|
|
|
{
|
|
|
|
uint64_t cnt = 0;
|
|
|
|
for( auto& v : m_data.ctxSwitch )
|
|
|
|
{
|
|
|
|
cnt += v.second->v.size();
|
|
|
|
}
|
|
|
|
return cnt;
|
|
|
|
}
|
|
|
|
|
2019-08-16 14:36:33 +00:00
|
|
|
uint64_t Worker::GetContextSwitchPerCpuCount() const
|
|
|
|
{
|
|
|
|
uint64_t cnt = 0;
|
2019-10-15 14:13:36 +00:00
|
|
|
for( int i=0; i<m_data.cpuDataCount; i++ )
|
2019-08-16 14:36:33 +00:00
|
|
|
{
|
|
|
|
cnt += m_data.cpuData[i].cs.size();
|
|
|
|
}
|
|
|
|
return cnt;
|
|
|
|
}
|
|
|
|
|
2021-04-18 20:59:10 +00:00
|
|
|
#ifndef TRACY_NO_STATISTICS
|
2021-04-18 13:03:42 +00:00
|
|
|
uint64_t Worker::GetChildSamplesCountFull() const
|
|
|
|
{
|
|
|
|
uint64_t cnt = 0;
|
|
|
|
for( auto& v : m_data.childSamples )
|
|
|
|
{
|
|
|
|
cnt += v.second.size();
|
|
|
|
}
|
|
|
|
return cnt;
|
|
|
|
}
|
2021-11-13 01:47:39 +00:00
|
|
|
|
|
|
|
uint64_t Worker::GetContextSwitchSampleCount() const
|
|
|
|
{
|
|
|
|
uint64_t cnt = 0;
|
|
|
|
for( auto& v : m_data.threads )
|
|
|
|
{
|
|
|
|
cnt += v->ctxSwitchSamples.size();
|
|
|
|
}
|
|
|
|
return cnt;
|
|
|
|
}
|
2021-04-18 20:59:10 +00:00
|
|
|
#endif
|
2021-04-18 13:03:42 +00:00
|
|
|
|
2019-08-17 23:51:02 +00:00
|
|
|
uint64_t Worker::GetPidFromTid( uint64_t tid ) const
|
|
|
|
{
|
|
|
|
auto it = m_data.tidToPid.find( tid );
|
|
|
|
if( it == m_data.tidToPid.end() ) return 0;
|
|
|
|
return it->second;
|
|
|
|
}
|
|
|
|
|
2021-06-20 12:18:59 +00:00
|
|
|
void Worker::GetCpuUsage( int64_t t0, double tstep, size_t num, std::vector<std::pair<int, int>>& out )
|
2019-10-15 14:54:43 +00:00
|
|
|
{
|
2021-06-20 12:18:59 +00:00
|
|
|
if( out.size() < num ) out.resize( num );
|
2019-11-05 00:41:27 +00:00
|
|
|
|
2021-06-20 12:34:47 +00:00
|
|
|
if( t0 > m_data.lastTime || int64_t( t0 + tstep * num ) < 0 )
|
|
|
|
{
|
|
|
|
memset( out.data(), 0, sizeof( int ) * 2 * num );
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2021-06-20 12:22:14 +00:00
|
|
|
#ifndef TRACY_NO_STATISTICS
|
|
|
|
if( !m_data.ctxUsage.empty() )
|
2019-10-15 14:54:43 +00:00
|
|
|
{
|
2021-06-20 12:30:43 +00:00
|
|
|
auto ptr = out.data();
|
2021-06-20 12:24:42 +00:00
|
|
|
auto itBegin = m_data.ctxUsage.begin();
|
2021-06-20 12:22:14 +00:00
|
|
|
for( size_t i=0; i<num; i++ )
|
2019-10-15 14:54:43 +00:00
|
|
|
{
|
2021-06-20 12:22:14 +00:00
|
|
|
const auto time = int64_t( t0 + tstep * i );
|
|
|
|
if( time < 0 || time > m_data.lastTime )
|
|
|
|
{
|
|
|
|
ptr->first = 0;
|
|
|
|
ptr->second = 0;
|
|
|
|
}
|
|
|
|
else
|
2019-10-15 14:54:43 +00:00
|
|
|
{
|
2021-06-20 12:18:59 +00:00
|
|
|
const auto test = ( time << 16 ) | 0xFFFF;
|
2021-06-20 12:24:42 +00:00
|
|
|
auto it = std::upper_bound( itBegin, m_data.ctxUsage.end(), test, [] ( const auto& l, const auto& r ) { return l < r._time_other_own; } );
|
2021-06-20 12:18:59 +00:00
|
|
|
if( it == m_data.ctxUsage.begin() || it == m_data.ctxUsage.end() )
|
2019-10-15 14:54:43 +00:00
|
|
|
{
|
2021-06-20 12:18:59 +00:00
|
|
|
ptr->first = 0;
|
|
|
|
ptr->second = 0;
|
2019-10-15 14:54:43 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2021-06-20 12:18:59 +00:00
|
|
|
--it;
|
|
|
|
ptr->first = it->Own();
|
|
|
|
ptr->second = it->Other();
|
|
|
|
}
|
2021-06-20 12:24:42 +00:00
|
|
|
itBegin = it;
|
2021-06-20 12:18:59 +00:00
|
|
|
}
|
2021-06-20 12:22:14 +00:00
|
|
|
ptr++;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
2021-06-20 12:18:59 +00:00
|
|
|
#endif
|
2021-06-20 12:22:14 +00:00
|
|
|
{
|
2021-06-20 12:30:43 +00:00
|
|
|
memset( out.data(), 0, sizeof( int ) * 2 * num );
|
|
|
|
for( int i=0; i<m_data.cpuDataCount; i++ )
|
2021-06-20 12:22:14 +00:00
|
|
|
{
|
2021-06-20 12:30:43 +00:00
|
|
|
auto& cs = m_data.cpuData[i].cs;
|
|
|
|
if( !cs.empty() )
|
2021-06-20 12:18:59 +00:00
|
|
|
{
|
2021-06-20 12:37:56 +00:00
|
|
|
auto itBegin = cs.begin();
|
2021-06-20 12:30:43 +00:00
|
|
|
auto ptr = out.data();
|
|
|
|
for( size_t i=0; i<num; i++ )
|
2021-06-20 12:18:59 +00:00
|
|
|
{
|
2021-06-20 12:30:43 +00:00
|
|
|
const auto time = int64_t( t0 + tstep * i );
|
2021-06-20 12:33:08 +00:00
|
|
|
if( time > m_data.lastTime ) break;
|
|
|
|
if( time >= 0 )
|
2021-06-20 12:18:59 +00:00
|
|
|
{
|
2021-06-20 12:37:56 +00:00
|
|
|
auto it = std::lower_bound( itBegin, cs.end(), time, [] ( const auto& l, const auto& r ) { return (uint64_t)l.End() < (uint64_t)r; } );
|
2021-06-20 12:33:08 +00:00
|
|
|
if( it == cs.end() ) break;
|
|
|
|
if( it->IsEndValid() && it->Start() <= time )
|
2021-06-20 12:18:59 +00:00
|
|
|
{
|
|
|
|
if( GetPidFromTid( DecompressThreadExternal( it->Thread() ) ) == m_pid )
|
|
|
|
{
|
2021-06-20 12:30:43 +00:00
|
|
|
ptr->first++;
|
2021-06-20 12:18:59 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2021-06-20 12:30:43 +00:00
|
|
|
ptr->second++;
|
2021-06-20 12:18:59 +00:00
|
|
|
}
|
|
|
|
}
|
2021-06-20 12:37:56 +00:00
|
|
|
itBegin = it;
|
2021-06-20 12:18:59 +00:00
|
|
|
}
|
2021-06-20 12:30:43 +00:00
|
|
|
ptr++;
|
2019-10-15 14:54:43 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-08-14 18:16:11 +00:00
|
|
|
const ContextSwitch* const Worker::GetContextSwitchDataImpl( uint64_t thread )
|
2019-08-12 22:20:56 +00:00
|
|
|
{
|
|
|
|
auto it = m_data.ctxSwitch.find( thread );
|
|
|
|
if( it != m_data.ctxSwitch.end() )
|
|
|
|
{
|
2019-08-14 18:16:11 +00:00
|
|
|
m_data.ctxSwitchLast.first = thread;
|
|
|
|
m_data.ctxSwitchLast.second = it->second;
|
2019-08-12 22:20:56 +00:00
|
|
|
return it->second;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-09-01 10:38:12 +00:00
|
|
|
size_t Worker::GetFullFrameCount( const FrameData& fd ) const
|
|
|
|
{
|
|
|
|
const auto sz = fd.frames.size();
|
|
|
|
assert( sz != 0 );
|
|
|
|
|
|
|
|
if( fd.continuous )
|
|
|
|
{
|
|
|
|
if( IsConnected() )
|
|
|
|
{
|
|
|
|
return sz - 1;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
return sz;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
const auto& last = fd.frames.back();
|
|
|
|
if( last.end >= 0 )
|
|
|
|
{
|
|
|
|
return sz;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
return sz - 1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-08-04 17:47:09 +00:00
|
|
|
int64_t Worker::GetFrameTime( const FrameData& fd, size_t idx ) const
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2018-08-05 00:09:59 +00:00
|
|
|
if( fd.continuous )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2018-08-05 00:09:59 +00:00
|
|
|
if( idx < fd.frames.size() - 1 )
|
|
|
|
{
|
|
|
|
return fd.frames[idx+1].start - fd.frames[idx].start;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
assert( m_data.lastTime != 0 );
|
|
|
|
return m_data.lastTime - fd.frames.back().start;
|
|
|
|
}
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2019-01-29 21:10:14 +00:00
|
|
|
const auto& frame = fd.frames[idx];
|
|
|
|
if( frame.end >= 0 )
|
2018-08-05 00:09:59 +00:00
|
|
|
{
|
2019-01-29 21:10:14 +00:00
|
|
|
return frame.end - frame.start;
|
2018-08-05 00:09:59 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
return m_data.lastTime - fd.frames.back().start;
|
|
|
|
}
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-08-04 17:47:09 +00:00
|
|
|
int64_t Worker::GetFrameBegin( const FrameData& fd, size_t idx ) const
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2018-08-04 17:47:09 +00:00
|
|
|
assert( idx < fd.frames.size() );
|
2018-08-05 00:09:59 +00:00
|
|
|
return fd.frames[idx].start;
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
2018-08-04 17:47:09 +00:00
|
|
|
int64_t Worker::GetFrameEnd( const FrameData& fd, size_t idx ) const
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2018-08-05 00:09:59 +00:00
|
|
|
if( fd.continuous )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2018-08-05 00:09:59 +00:00
|
|
|
if( idx < fd.frames.size() - 1 )
|
|
|
|
{
|
|
|
|
return fd.frames[idx+1].start;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
return m_data.lastTime;
|
|
|
|
}
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2018-08-05 00:09:59 +00:00
|
|
|
if( fd.frames[idx].end >= 0 )
|
|
|
|
{
|
|
|
|
return fd.frames[idx].end;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
return m_data.lastTime;
|
|
|
|
}
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-06 19:48:01 +00:00
|
|
|
const FrameImage* Worker::GetFrameImage( const FrameData& fd, size_t idx ) const
|
|
|
|
{
|
|
|
|
assert( idx < fd.frames.size() );
|
|
|
|
const auto& v = fd.frames[idx].frameImage;
|
|
|
|
if( v < 0 ) return nullptr;
|
|
|
|
return m_data.frameImage[v];
|
|
|
|
}
|
|
|
|
|
2019-06-22 12:05:18 +00:00
|
|
|
std::pair<int, int> Worker::GetFrameRange( const FrameData& fd, int64_t from, int64_t to )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2018-08-05 00:09:59 +00:00
|
|
|
auto zitbegin = std::lower_bound( fd.frames.begin(), fd.frames.end(), from, [] ( const auto& lhs, const auto& rhs ) { return lhs.start < rhs; } );
|
2018-08-04 21:19:35 +00:00
|
|
|
if( zitbegin == fd.frames.end() ) zitbegin--;
|
|
|
|
|
2018-08-05 00:09:59 +00:00
|
|
|
const auto zitend = std::lower_bound( zitbegin, fd.frames.end(), to, [] ( const auto& lhs, const auto& rhs ) { return lhs.start < rhs; } );
|
2018-02-13 13:57:47 +00:00
|
|
|
|
2018-08-04 17:47:09 +00:00
|
|
|
int zbegin = std::distance( fd.frames.begin(), zitbegin );
|
2018-08-05 00:09:59 +00:00
|
|
|
if( zbegin > 0 && zitbegin->start != from ) --zbegin;
|
2018-08-04 17:47:09 +00:00
|
|
|
const int zend = std::distance( fd.frames.begin(), zitend );
|
2018-02-13 13:57:47 +00:00
|
|
|
|
|
|
|
return std::make_pair( zbegin, zend );
|
|
|
|
}
|
|
|
|
|
2019-03-03 15:50:18 +00:00
|
|
|
const CallstackFrameData* Worker::GetCallstackFrame( const CallstackFrameId& ptr ) const
|
2018-06-19 23:18:59 +00:00
|
|
|
{
|
2020-02-29 18:49:33 +00:00
|
|
|
assert( ptr.custom == 0 );
|
2018-06-19 23:18:59 +00:00
|
|
|
auto it = m_data.callstackFrameMap.find( ptr );
|
|
|
|
if( it == m_data.callstackFrameMap.end() )
|
|
|
|
{
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
return it->second;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-02-29 18:49:33 +00:00
|
|
|
#ifndef TRACY_NO_STATISTICS
|
|
|
|
const CallstackFrameData* Worker::GetParentCallstackFrame( const CallstackFrameId& ptr ) const
|
|
|
|
{
|
|
|
|
assert( ptr.custom == 1 );
|
|
|
|
auto it = m_data.parentCallstackFrameMap.find( ptr );
|
|
|
|
if( it == m_data.parentCallstackFrameMap.end() )
|
|
|
|
{
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
return it->second;
|
|
|
|
}
|
|
|
|
}
|
2020-08-07 17:14:21 +00:00
|
|
|
|
2020-08-10 12:52:51 +00:00
|
|
|
const Vector<SampleDataRange>* Worker::GetSamplesForSymbol( uint64_t symAddr ) const
|
2020-08-07 17:14:21 +00:00
|
|
|
{
|
|
|
|
assert( m_data.symbolSamplesReady );
|
|
|
|
auto it = m_data.symbolSamples.find( symAddr );
|
|
|
|
if( it == m_data.symbolSamples.end() ) return nullptr;
|
|
|
|
return &it->second;
|
|
|
|
}
|
2021-04-09 18:26:21 +00:00
|
|
|
|
2021-11-27 01:03:59 +00:00
|
|
|
const Vector<ChildSample>* Worker::GetChildSamples( uint64_t addr ) const
|
2021-04-09 18:26:21 +00:00
|
|
|
{
|
|
|
|
assert( m_data.symbolSamplesReady );
|
|
|
|
auto it = m_data.childSamples.find( addr );
|
|
|
|
if( it == m_data.childSamples.end() ) return nullptr;
|
|
|
|
return &it->second;
|
|
|
|
}
|
2020-02-29 18:49:33 +00:00
|
|
|
#endif
|
|
|
|
|
2020-02-26 21:46:02 +00:00
|
|
|
const SymbolData* Worker::GetSymbolData( uint64_t sym ) const
|
|
|
|
{
|
|
|
|
auto it = m_data.symbolMap.find( sym );
|
|
|
|
if( it == m_data.symbolMap.end() )
|
|
|
|
{
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
return &it->second;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-08-13 14:24:09 +00:00
|
|
|
bool Worker::HasSymbolCode( uint64_t sym ) const
|
|
|
|
{
|
|
|
|
return m_data.symbolCode.find( sym ) != m_data.symbolCode.end();
|
|
|
|
}
|
|
|
|
|
2020-03-25 21:15:22 +00:00
|
|
|
const char* Worker::GetSymbolCode( uint64_t sym, uint32_t& len ) const
|
|
|
|
{
|
|
|
|
auto it = m_data.symbolCode.find( sym );
|
|
|
|
if( it == m_data.symbolCode.end() ) return nullptr;
|
|
|
|
len = it->second.len;
|
|
|
|
return it->second.data;
|
|
|
|
}
|
|
|
|
|
2021-11-14 12:05:05 +00:00
|
|
|
uint64_t Worker::GetSymbolForAddress( uint64_t address )
|
2020-04-08 14:55:49 +00:00
|
|
|
{
|
2021-11-14 12:05:05 +00:00
|
|
|
DoPostponedSymbols();
|
2020-04-08 14:55:49 +00:00
|
|
|
auto it = std::lower_bound( m_data.symbolLoc.begin(), m_data.symbolLoc.end(), address, [] ( const auto& l, const auto& r ) { return l.addr + l.len < r; } );
|
|
|
|
if( it == m_data.symbolLoc.end() || address < it->addr ) return 0;
|
|
|
|
return it->addr;
|
|
|
|
}
|
|
|
|
|
2021-11-14 12:05:05 +00:00
|
|
|
uint64_t Worker::GetSymbolForAddress( uint64_t address, uint32_t& offset )
|
2020-03-27 16:39:42 +00:00
|
|
|
{
|
2021-11-14 12:05:05 +00:00
|
|
|
DoPostponedSymbols();
|
2020-03-27 16:39:42 +00:00
|
|
|
auto it = std::lower_bound( m_data.symbolLoc.begin(), m_data.symbolLoc.end(), address, [] ( const auto& l, const auto& r ) { return l.addr + l.len < r; } );
|
|
|
|
if( it == m_data.symbolLoc.end() || address < it->addr ) return 0;
|
|
|
|
offset = address - it->addr;
|
|
|
|
return it->addr;
|
|
|
|
}
|
|
|
|
|
2021-06-19 17:13:34 +00:00
|
|
|
uint64_t Worker::GetInlineSymbolForAddress( uint64_t address ) const
|
|
|
|
{
|
|
|
|
auto it = m_data.codeSymbolMap.find( address );
|
|
|
|
if( it == m_data.codeSymbolMap.end() ) return 0;
|
|
|
|
return it->second;
|
|
|
|
}
|
|
|
|
|
2020-04-01 22:31:53 +00:00
|
|
|
StringIdx Worker::GetLocationForAddress( uint64_t address, uint32_t& line ) const
|
|
|
|
{
|
|
|
|
auto it = m_data.codeAddressToLocation.find( address );
|
|
|
|
if( it == m_data.codeAddressToLocation.end() )
|
|
|
|
{
|
|
|
|
line = 0;
|
|
|
|
return StringIdx();
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
const auto idx = UnpackFileLine( it->second, line );
|
|
|
|
return StringIdx( idx );
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-04-02 00:17:22 +00:00
|
|
|
const Vector<uint64_t>* Worker::GetAddressesForLocation( uint32_t fileStringIdx, uint32_t line ) const
|
|
|
|
{
|
|
|
|
auto it = m_data.locationCodeAddressList.find( PackFileLine( fileStringIdx, line ) );
|
|
|
|
if( it == m_data.locationCodeAddressList.end() )
|
|
|
|
{
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
return &it->second;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-11-14 12:05:05 +00:00
|
|
|
const uint64_t* Worker::GetInlineSymbolList( uint64_t sym, uint32_t len )
|
2020-04-08 13:34:14 +00:00
|
|
|
{
|
2021-11-14 12:05:05 +00:00
|
|
|
DoPostponedInlineSymbols();
|
2020-04-08 13:34:14 +00:00
|
|
|
auto it = std::lower_bound( m_data.symbolLocInline.begin(), m_data.symbolLocInline.end(), sym );
|
|
|
|
if( it == m_data.symbolLocInline.end() ) return nullptr;
|
|
|
|
if( *it >= sym + len ) return nullptr;
|
|
|
|
return it;
|
|
|
|
}
|
|
|
|
|
2018-03-23 00:50:38 +00:00
|
|
|
int64_t Worker::GetZoneEnd( const ZoneEvent& ev )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
|
|
|
auto ptr = &ev;
|
|
|
|
for(;;)
|
|
|
|
{
|
2020-01-22 21:25:04 +00:00
|
|
|
if( ptr->IsEndValid() ) return ptr->End();
|
2020-01-24 01:17:38 +00:00
|
|
|
if( !ptr->HasChildren() ) return ptr->Start();
|
2019-11-10 00:43:28 +00:00
|
|
|
auto& children = GetZoneChildren( ptr->Child() );
|
|
|
|
if( children.is_magic() )
|
|
|
|
{
|
|
|
|
auto& c = *(Vector<ZoneEvent>*)&children;
|
|
|
|
ptr = &c.back();
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
ptr = children.back();
|
|
|
|
}
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-03-23 00:50:38 +00:00
|
|
|
int64_t Worker::GetZoneEnd( const GpuEvent& ev )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
|
|
|
auto ptr = &ev;
|
|
|
|
for(;;)
|
|
|
|
{
|
2019-10-30 22:50:37 +00:00
|
|
|
if( ptr->GpuEnd() >= 0 ) return ptr->GpuEnd();
|
|
|
|
if( ptr->Child() < 0 ) return ptr->GpuStart() >= 0 ? ptr->GpuStart() : m_data.lastTime;
|
2019-11-10 00:43:28 +00:00
|
|
|
auto& children = GetGpuChildren( ptr->Child() );
|
|
|
|
if( children.is_magic() )
|
|
|
|
{
|
|
|
|
auto& c = *(Vector<GpuEvent>*)&children;
|
|
|
|
ptr = &c.back();
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
ptr = children.back();
|
|
|
|
}
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-04-02 00:08:00 +00:00
|
|
|
uint32_t Worker::FindStringIdx( const char* str ) const
|
|
|
|
{
|
2020-04-04 12:42:00 +00:00
|
|
|
if( !str ) return 0;
|
2020-04-02 00:08:00 +00:00
|
|
|
charutil::StringKey key = { str, strlen( str ) };
|
|
|
|
auto sit = m_data.stringMap.find( key );
|
|
|
|
if( sit == m_data.stringMap.end() )
|
|
|
|
{
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
return sit->second;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-02-13 13:57:47 +00:00
|
|
|
const char* Worker::GetString( uint64_t ptr ) const
|
|
|
|
{
|
|
|
|
const auto it = m_data.strings.find( ptr );
|
|
|
|
if( it == m_data.strings.end() || it->second == nullptr )
|
|
|
|
{
|
|
|
|
return "???";
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
return it->second;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
const char* Worker::GetString( const StringRef& ref ) const
|
|
|
|
{
|
|
|
|
if( ref.isidx )
|
|
|
|
{
|
|
|
|
assert( ref.active );
|
2018-03-04 16:52:51 +00:00
|
|
|
return m_data.stringData[ref.str];
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
if( ref.active )
|
|
|
|
{
|
2018-03-04 16:52:51 +00:00
|
|
|
return GetString( ref.str );
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
return "???";
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
const char* Worker::GetString( const StringIdx& idx ) const
|
|
|
|
{
|
2019-09-29 18:32:42 +00:00
|
|
|
assert( idx.Active() );
|
|
|
|
return m_data.stringData[idx.Idx()];
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
2019-08-30 21:09:07 +00:00
|
|
|
static const char* BadExternalThreadNames[] = {
|
|
|
|
"ntdll.dll",
|
2020-02-23 10:39:51 +00:00
|
|
|
"???",
|
2019-08-30 21:09:07 +00:00
|
|
|
nullptr
|
|
|
|
};
|
|
|
|
|
2019-08-27 21:00:13 +00:00
|
|
|
const char* Worker::GetThreadName( uint64_t id ) const
|
|
|
|
{
|
|
|
|
const auto it = m_data.threadNames.find( id );
|
|
|
|
if( it == m_data.threadNames.end() )
|
|
|
|
{
|
|
|
|
const auto eit = m_data.externalNames.find( id );
|
|
|
|
if( eit == m_data.externalNames.end() )
|
|
|
|
{
|
|
|
|
return "???";
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
return eit->second.second;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
// Client should send additional information about thread name, to make this check unnecessary
|
|
|
|
const auto txt = it->second;
|
2020-03-01 00:48:20 +00:00
|
|
|
if( txt[0] >= '0' && txt[0] <= '9' && (uint64_t)atoi( txt ) == id )
|
2019-08-27 21:00:13 +00:00
|
|
|
{
|
|
|
|
const auto eit = m_data.externalNames.find( id );
|
|
|
|
if( eit != m_data.externalNames.end() )
|
|
|
|
{
|
2019-08-30 21:09:07 +00:00
|
|
|
const char* ext = eit->second.second;
|
|
|
|
const char** ptr = BadExternalThreadNames;
|
|
|
|
while( *ptr )
|
|
|
|
{
|
|
|
|
if( strcmp( *ptr, ext ) == 0 ) return txt;
|
|
|
|
ptr++;
|
|
|
|
}
|
|
|
|
return ext;
|
2019-08-27 21:00:13 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return txt;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-02-22 17:51:53 +00:00
|
|
|
bool Worker::IsThreadLocal( uint64_t id )
|
2019-08-16 15:59:25 +00:00
|
|
|
{
|
2020-03-30 21:41:21 +00:00
|
|
|
auto td = RetrieveThread( id );
|
2020-03-31 00:20:34 +00:00
|
|
|
return td && ( td->count > 0 || !td->samples.empty() );
|
2019-08-16 15:59:25 +00:00
|
|
|
}
|
|
|
|
|
2021-11-06 18:02:02 +00:00
|
|
|
bool Worker::IsThreadFiber( uint64_t id )
|
|
|
|
{
|
|
|
|
auto td = RetrieveThread( id );
|
|
|
|
return td && ( td->isFiber );
|
|
|
|
}
|
|
|
|
|
2019-08-15 15:42:26 +00:00
|
|
|
const SourceLocation& Worker::GetSourceLocation( int16_t srcloc ) const
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
|
|
|
if( srcloc < 0 )
|
|
|
|
{
|
|
|
|
return *m_data.sourceLocationPayload[-srcloc-1];
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
const auto it = m_data.sourceLocation.find( m_data.sourceLocationExpand[srcloc] );
|
|
|
|
assert( it != m_data.sourceLocation.end() );
|
|
|
|
return it->second;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-08-16 17:49:16 +00:00
|
|
|
std::pair<const char*, const char*> Worker::GetExternalName( uint64_t id ) const
|
2019-08-16 17:22:23 +00:00
|
|
|
{
|
|
|
|
const auto it = m_data.externalNames.find( id );
|
|
|
|
if( it == m_data.externalNames.end() )
|
|
|
|
{
|
2019-08-16 17:49:16 +00:00
|
|
|
return std::make_pair( "???", "???" );
|
2019-08-16 17:22:23 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
return it->second;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-10 15:45:19 +00:00
|
|
|
const char* Worker::GetZoneName( const SourceLocation& srcloc ) const
|
|
|
|
{
|
|
|
|
if( srcloc.name.active )
|
|
|
|
{
|
|
|
|
return GetString( srcloc.name );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
return GetString( srcloc.function );
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-06-29 13:14:20 +00:00
|
|
|
const char* Worker::GetZoneName( const ZoneEvent& ev ) const
|
|
|
|
{
|
2019-08-15 18:12:09 +00:00
|
|
|
auto& srcloc = GetSourceLocation( ev.SrcLoc() );
|
2018-06-29 13:14:20 +00:00
|
|
|
return GetZoneName( ev, srcloc );
|
|
|
|
}
|
|
|
|
|
|
|
|
const char* Worker::GetZoneName( const ZoneEvent& ev, const SourceLocation& srcloc ) const
|
|
|
|
{
|
2020-01-26 14:57:55 +00:00
|
|
|
if( HasZoneExtra( ev ) && GetZoneExtra( ev ).name.Active() )
|
2018-06-29 14:12:40 +00:00
|
|
|
{
|
2020-01-26 14:57:55 +00:00
|
|
|
return GetString( GetZoneExtra( ev ).name );
|
2018-06-29 14:12:40 +00:00
|
|
|
}
|
|
|
|
else if( srcloc.name.active )
|
2018-06-29 13:14:20 +00:00
|
|
|
{
|
|
|
|
return GetString( srcloc.name );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
return GetString( srcloc.function );
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
const char* Worker::GetZoneName( const GpuEvent& ev ) const
|
|
|
|
{
|
2019-10-13 12:36:59 +00:00
|
|
|
auto& srcloc = GetSourceLocation( ev.SrcLoc() );
|
2018-06-29 13:14:20 +00:00
|
|
|
return GetZoneName( ev, srcloc );
|
|
|
|
}
|
|
|
|
|
|
|
|
const char* Worker::GetZoneName( const GpuEvent& ev, const SourceLocation& srcloc ) const
|
|
|
|
{
|
2018-07-15 17:00:40 +00:00
|
|
|
if( srcloc.name.active )
|
|
|
|
{
|
|
|
|
return GetString( srcloc.name );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
return GetString( srcloc.function );
|
|
|
|
}
|
2018-06-29 13:14:20 +00:00
|
|
|
}
|
|
|
|
|
2018-12-18 15:52:05 +00:00
|
|
|
static bool strstr_nocase( const char* l, const char* r )
|
|
|
|
{
|
|
|
|
const auto lsz = strlen( l );
|
|
|
|
const auto rsz = strlen( r );
|
|
|
|
auto ll = (char*)alloca( lsz + 1 );
|
2019-09-28 09:19:45 +00:00
|
|
|
auto rl = (char*)alloca( rsz + 1 );
|
2018-12-18 15:52:05 +00:00
|
|
|
for( size_t i=0; i<lsz; i++ )
|
|
|
|
{
|
|
|
|
ll[i] = tolower( l[i] );
|
|
|
|
}
|
|
|
|
ll[lsz] = '\0';
|
|
|
|
for( size_t i=0; i<rsz; i++ )
|
|
|
|
{
|
|
|
|
rl[i] = tolower( r[i] );
|
|
|
|
}
|
|
|
|
rl[rsz] = '\0';
|
|
|
|
return strstr( ll, rl ) != nullptr;
|
|
|
|
}
|
|
|
|
|
2019-08-15 15:42:26 +00:00
|
|
|
std::vector<int16_t> Worker::GetMatchingSourceLocation( const char* query, bool ignoreCase ) const
|
2018-03-04 15:52:45 +00:00
|
|
|
{
|
2019-08-15 15:42:26 +00:00
|
|
|
std::vector<int16_t> match;
|
2018-03-04 15:52:45 +00:00
|
|
|
|
|
|
|
const auto sz = m_data.sourceLocationExpand.size();
|
2018-03-04 15:57:57 +00:00
|
|
|
for( size_t i=1; i<sz; i++ )
|
2018-03-04 15:52:45 +00:00
|
|
|
{
|
|
|
|
const auto it = m_data.sourceLocation.find( m_data.sourceLocationExpand[i] );
|
|
|
|
assert( it != m_data.sourceLocation.end() );
|
|
|
|
const auto& srcloc = it->second;
|
|
|
|
const auto str = GetString( srcloc.name.active ? srcloc.name : srcloc.function );
|
2018-12-18 15:52:29 +00:00
|
|
|
bool found = false;
|
|
|
|
if( ignoreCase )
|
|
|
|
{
|
|
|
|
found = strstr_nocase( str, query );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
found = strstr( str, query ) != nullptr;
|
|
|
|
}
|
|
|
|
if( found )
|
2018-03-04 15:52:45 +00:00
|
|
|
{
|
2019-08-15 15:42:26 +00:00
|
|
|
match.push_back( (int16_t)i );
|
2018-03-04 15:52:45 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for( auto& srcloc : m_data.sourceLocationPayload )
|
|
|
|
{
|
|
|
|
const auto str = GetString( srcloc->name.active ? srcloc->name : srcloc->function );
|
2018-12-18 15:52:29 +00:00
|
|
|
bool found = false;
|
|
|
|
if( ignoreCase )
|
|
|
|
{
|
|
|
|
found = strstr_nocase( str, query );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
found = strstr( str, query ) != nullptr;
|
|
|
|
}
|
|
|
|
if( found )
|
2018-03-04 15:52:45 +00:00
|
|
|
{
|
2019-11-02 15:37:27 +00:00
|
|
|
auto it = m_data.sourceLocationPayloadMap.find( (const SourceLocation*)srcloc );
|
2018-03-04 15:52:45 +00:00
|
|
|
assert( it != m_data.sourceLocationPayloadMap.end() );
|
2019-08-15 15:42:26 +00:00
|
|
|
match.push_back( -int16_t( it->second + 1 ) );
|
2018-03-04 15:52:45 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return match;
|
|
|
|
}
|
|
|
|
|
2018-03-18 11:55:54 +00:00
|
|
|
#ifndef TRACY_NO_STATISTICS
|
2021-11-14 15:05:55 +00:00
|
|
|
Worker::SourceLocationZones& Worker::GetZonesForSourceLocation( int16_t srcloc )
|
|
|
|
{
|
|
|
|
assert( AreSourceLocationZonesReady() );
|
|
|
|
static SourceLocationZones empty;
|
|
|
|
auto it = m_data.sourceLocationZones.find( srcloc );
|
|
|
|
return it != m_data.sourceLocationZones.end() ? it->second : empty;
|
|
|
|
}
|
|
|
|
|
2019-08-15 15:42:26 +00:00
|
|
|
const Worker::SourceLocationZones& Worker::GetZonesForSourceLocation( int16_t srcloc ) const
|
2018-03-18 01:35:39 +00:00
|
|
|
{
|
2019-09-07 15:23:11 +00:00
|
|
|
assert( AreSourceLocationZonesReady() );
|
2018-03-18 19:20:24 +00:00
|
|
|
static const SourceLocationZones empty;
|
2018-03-18 01:35:39 +00:00
|
|
|
auto it = m_data.sourceLocationZones.find( srcloc );
|
2018-03-18 19:20:24 +00:00
|
|
|
return it != m_data.sourceLocationZones.end() ? it->second : empty;
|
2018-03-18 01:35:39 +00:00
|
|
|
}
|
2020-02-29 16:58:41 +00:00
|
|
|
|
2020-02-29 17:41:07 +00:00
|
|
|
const SymbolStats* Worker::GetSymbolStats( uint64_t symAddr ) const
|
2020-02-29 16:58:41 +00:00
|
|
|
{
|
|
|
|
assert( AreCallstackSamplesReady() );
|
|
|
|
auto it = m_data.symbolStats.find( symAddr );
|
2020-02-29 17:41:07 +00:00
|
|
|
if( it == m_data.symbolStats.end() )
|
|
|
|
{
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
return &it->second;
|
|
|
|
}
|
2020-02-29 16:58:41 +00:00
|
|
|
}
|
2020-03-25 00:08:29 +00:00
|
|
|
|
|
|
|
const unordered_flat_map<CallstackFrameId, uint32_t, Worker::CallstackFrameIdHash, Worker::CallstackFrameIdCompare>* Worker::GetSymbolInstructionPointers( uint64_t symAddr ) const
|
|
|
|
{
|
|
|
|
assert( AreCallstackSamplesReady() );
|
|
|
|
auto it = m_data.instructionPointersMap.find( symAddr );
|
|
|
|
if( it == m_data.instructionPointersMap.end() )
|
|
|
|
{
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
return &it->second;
|
|
|
|
}
|
|
|
|
}
|
2018-03-18 11:55:54 +00:00
|
|
|
#endif
|
2018-03-18 01:35:39 +00:00
|
|
|
|
2019-10-28 21:45:10 +00:00
|
|
|
void Worker::Network()
|
|
|
|
{
|
2019-10-28 22:22:50 +00:00
|
|
|
auto ShouldExit = [this] { return m_shutdown.load( std::memory_order_relaxed ); };
|
|
|
|
auto lz4buf = std::make_unique<char[]>( LZ4Size );
|
|
|
|
|
|
|
|
for(;;)
|
|
|
|
{
|
|
|
|
{
|
|
|
|
std::unique_lock<std::mutex> lock( m_netWriteLock );
|
|
|
|
m_netWriteCv.wait( lock, [this] { return m_netWriteCnt > 0 || m_shutdown.load( std::memory_order_relaxed ); } );
|
|
|
|
if( m_shutdown.load( std::memory_order_relaxed ) ) goto close;
|
|
|
|
m_netWriteCnt--;
|
|
|
|
}
|
|
|
|
|
|
|
|
auto buf = m_buffer + m_bufferOffset;
|
|
|
|
lz4sz_t lz4sz;
|
|
|
|
if( !m_sock.Read( &lz4sz, sizeof( lz4sz ), 10, ShouldExit ) ) goto close;
|
|
|
|
if( !m_sock.Read( lz4buf.get(), lz4sz, 10, ShouldExit ) ) goto close;
|
2020-02-11 23:53:03 +00:00
|
|
|
auto bb = m_bytes.load( std::memory_order_relaxed );
|
|
|
|
m_bytes.store( bb + sizeof( lz4sz ) + lz4sz, std::memory_order_relaxed );
|
2019-10-28 22:22:50 +00:00
|
|
|
|
2019-11-06 00:25:38 +00:00
|
|
|
auto sz = LZ4_decompress_safe_continue( (LZ4_streamDecode_t*)m_stream, lz4buf.get(), buf, lz4sz, TargetFrameSize );
|
2019-10-28 22:22:50 +00:00
|
|
|
assert( sz >= 0 );
|
2020-02-11 23:53:03 +00:00
|
|
|
bb = m_decBytes.load( std::memory_order_relaxed );
|
|
|
|
m_decBytes.store( bb + sz, std::memory_order_relaxed );
|
2019-10-28 22:22:50 +00:00
|
|
|
|
|
|
|
{
|
|
|
|
std::lock_guard<std::mutex> lock( m_netReadLock );
|
|
|
|
m_netRead.push_back( NetBuffer { m_bufferOffset, sz } );
|
|
|
|
m_netReadCv.notify_one();
|
|
|
|
}
|
|
|
|
|
|
|
|
m_bufferOffset += sz;
|
|
|
|
if( m_bufferOffset > TargetFrameSize * 2 ) m_bufferOffset = 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
close:
|
|
|
|
std::lock_guard<std::mutex> lock( m_netReadLock );
|
|
|
|
m_netRead.push_back( NetBuffer { -1 } );
|
|
|
|
m_netReadCv.notify_one();
|
2019-10-28 21:45:10 +00:00
|
|
|
}
|
|
|
|
|
2018-02-13 13:57:47 +00:00
|
|
|
void Worker::Exec()
|
|
|
|
{
|
2019-10-28 21:53:06 +00:00
|
|
|
auto ShouldExit = [this] { return m_shutdown.load( std::memory_order_relaxed ); };
|
2018-02-13 13:57:47 +00:00
|
|
|
|
|
|
|
for(;;)
|
|
|
|
{
|
2019-10-28 22:32:51 +00:00
|
|
|
if( m_shutdown.load( std::memory_order_relaxed ) ) { m_netWriteCv.notify_one(); return; };
|
2019-09-21 13:43:01 +00:00
|
|
|
if( m_sock.Connect( m_addr.c_str(), m_port ) ) break;
|
2020-05-14 00:27:57 +00:00
|
|
|
std::this_thread::sleep_for( std::chrono::milliseconds( 10 ) );
|
2018-09-09 15:47:20 +00:00
|
|
|
}
|
2018-02-13 13:57:47 +00:00
|
|
|
|
2018-09-09 15:47:20 +00:00
|
|
|
std::chrono::time_point<std::chrono::high_resolution_clock> t0;
|
|
|
|
|
2018-09-09 17:28:53 +00:00
|
|
|
m_sock.Send( HandshakeShibboleth, HandshakeShibbolethSize );
|
|
|
|
uint32_t protocolVersion = ProtocolVersion;
|
|
|
|
m_sock.Send( &protocolVersion, sizeof( protocolVersion ) );
|
|
|
|
HandshakeStatus handshake;
|
2019-02-12 00:41:09 +00:00
|
|
|
if( !m_sock.Read( &handshake, sizeof( handshake ), 10, ShouldExit ) )
|
|
|
|
{
|
|
|
|
m_handshake.store( HandshakeDropped, std::memory_order_relaxed );
|
|
|
|
goto close;
|
|
|
|
}
|
2018-09-09 17:28:53 +00:00
|
|
|
m_handshake.store( handshake, std::memory_order_relaxed );
|
|
|
|
switch( handshake )
|
|
|
|
{
|
|
|
|
case HandshakeWelcome:
|
|
|
|
break;
|
|
|
|
case HandshakeProtocolMismatch:
|
2018-09-09 17:42:06 +00:00
|
|
|
case HandshakeNotAvailable:
|
2018-09-09 17:28:53 +00:00
|
|
|
default:
|
|
|
|
goto close;
|
|
|
|
}
|
|
|
|
|
2018-09-09 15:47:20 +00:00
|
|
|
m_data.framesBase = m_data.frames.Retrieve( 0, [this] ( uint64_t name ) {
|
|
|
|
auto fd = m_slab.AllocInit<FrameData>();
|
|
|
|
fd->name = name;
|
|
|
|
fd->continuous = 1;
|
|
|
|
return fd;
|
|
|
|
}, [this] ( uint64_t name ) {
|
|
|
|
assert( name == 0 );
|
|
|
|
char tmp[6] = "Frame";
|
|
|
|
HandleFrameName( name, tmp, 5 );
|
|
|
|
} );
|
2018-02-13 13:57:47 +00:00
|
|
|
|
2018-09-09 15:47:20 +00:00
|
|
|
{
|
|
|
|
WelcomeMessage welcome;
|
2019-02-12 10:07:12 +00:00
|
|
|
if( !m_sock.Read( &welcome, sizeof( welcome ), 10, ShouldExit ) )
|
|
|
|
{
|
|
|
|
m_handshake.store( HandshakeDropped, std::memory_order_relaxed );
|
|
|
|
goto close;
|
|
|
|
}
|
2018-09-09 15:47:20 +00:00
|
|
|
m_timerMul = welcome.timerMul;
|
2019-08-15 15:52:36 +00:00
|
|
|
m_data.baseTime = welcome.initBegin;
|
2022-06-21 23:30:01 +00:00
|
|
|
const auto initEnd = TscTime( welcome.initEnd );
|
2019-08-15 15:52:36 +00:00
|
|
|
m_data.framesBase->frames.push_back( FrameEvent{ 0, -1, -1 } );
|
2019-06-06 19:44:48 +00:00
|
|
|
m_data.framesBase->frames.push_back( FrameEvent{ initEnd, -1, -1 } );
|
2018-09-09 15:47:20 +00:00
|
|
|
m_data.lastTime = initEnd;
|
2022-06-21 23:30:01 +00:00
|
|
|
m_delay = TscPeriod( welcome.delay );
|
|
|
|
m_resolution = TscPeriod( welcome.resolution );
|
2019-08-17 20:19:04 +00:00
|
|
|
m_pid = welcome.pid;
|
2020-02-25 22:13:28 +00:00
|
|
|
m_samplingPeriod = welcome.samplingPeriod;
|
2021-06-14 23:26:50 +00:00
|
|
|
m_onDemand = welcome.flags & WelcomeFlag::OnDemand;
|
2018-09-09 15:47:20 +00:00
|
|
|
m_captureProgram = welcome.programName;
|
|
|
|
m_captureTime = welcome.epoch;
|
2021-01-31 16:51:16 +00:00
|
|
|
m_executableTime = welcome.exectime;
|
2021-06-14 23:26:50 +00:00
|
|
|
m_ignoreMemFreeFaults = ( welcome.flags & WelcomeFlag::OnDemand ) || ( welcome.flags & WelcomeFlag::IsApple );
|
2020-03-25 20:48:24 +00:00
|
|
|
m_data.cpuArch = (CpuArchitecture)welcome.cpuArch;
|
2021-06-14 23:26:50 +00:00
|
|
|
m_codeTransfer = welcome.flags & WelcomeFlag::CodeTransfer;
|
2021-06-14 23:33:43 +00:00
|
|
|
m_combineSamples = welcome.flags & WelcomeFlag::CombineSamples;
|
2021-12-04 14:16:17 +00:00
|
|
|
m_identifySamples = welcome.flags & WelcomeFlag::IdentifySamples;
|
2020-05-06 16:59:54 +00:00
|
|
|
m_data.cpuId = welcome.cpuId;
|
|
|
|
memcpy( m_data.cpuManufacturer, welcome.cpuManufacturer, 12 );
|
|
|
|
m_data.cpuManufacturer[12] = '\0';
|
2018-09-09 15:47:20 +00:00
|
|
|
|
|
|
|
char dtmp[64];
|
|
|
|
time_t date = welcome.epoch;
|
|
|
|
auto lt = localtime( &date );
|
|
|
|
strftime( dtmp, 64, "%F %T", lt );
|
|
|
|
char tmp[1024];
|
|
|
|
sprintf( tmp, "%s @ %s", welcome.programName, dtmp );
|
|
|
|
m_captureName = tmp;
|
2018-02-13 13:57:47 +00:00
|
|
|
|
2018-09-09 15:47:20 +00:00
|
|
|
m_hostInfo = welcome.hostInfo;
|
2018-08-04 17:47:09 +00:00
|
|
|
|
2021-06-14 23:26:50 +00:00
|
|
|
if( m_onDemand )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2018-09-09 15:47:20 +00:00
|
|
|
OnDemandPayloadMessage onDemand;
|
2019-02-12 10:07:12 +00:00
|
|
|
if( !m_sock.Read( &onDemand, sizeof( onDemand ), 10, ShouldExit ) )
|
|
|
|
{
|
|
|
|
m_handshake.store( HandshakeDropped, std::memory_order_relaxed );
|
|
|
|
goto close;
|
|
|
|
}
|
2018-09-09 15:47:20 +00:00
|
|
|
m_data.frameOffset = onDemand.frames;
|
2022-06-21 23:30:01 +00:00
|
|
|
m_data.framesBase->frames.push_back( FrameEvent{ TscTime( onDemand.currentTime ), -1, -1 } );
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
2018-09-09 15:47:20 +00:00
|
|
|
}
|
2018-02-13 13:57:47 +00:00
|
|
|
|
2021-11-14 17:13:34 +00:00
|
|
|
m_serverQuerySpaceBase = m_serverQuerySpaceLeft = std::min( ( m_sock.GetSendBufSize() / ServerQueryPacketSize ), 8*1024 ) - 4; // leave space for terminate request
|
2018-09-09 15:47:20 +00:00
|
|
|
m_hasData.store( true, std::memory_order_release );
|
2018-02-13 13:57:47 +00:00
|
|
|
|
2019-11-06 00:25:38 +00:00
|
|
|
LZ4_setStreamDecode( (LZ4_streamDecode_t*)m_stream, nullptr, 0 );
|
2018-09-09 15:47:20 +00:00
|
|
|
m_connected.store( true, std::memory_order_relaxed );
|
2019-10-28 22:22:50 +00:00
|
|
|
{
|
|
|
|
std::lock_guard<std::mutex> lock( m_netWriteLock );
|
|
|
|
m_netWriteCnt = 2;
|
|
|
|
m_netWriteCv.notify_one();
|
|
|
|
}
|
2018-02-13 13:57:47 +00:00
|
|
|
|
2018-09-09 15:47:20 +00:00
|
|
|
t0 = std::chrono::high_resolution_clock::now();
|
2018-02-13 13:57:47 +00:00
|
|
|
|
2018-09-09 15:47:20 +00:00
|
|
|
for(;;)
|
|
|
|
{
|
2019-06-09 14:06:34 +00:00
|
|
|
if( m_shutdown.load( std::memory_order_relaxed ) )
|
|
|
|
{
|
|
|
|
QueryTerminate();
|
2019-10-28 21:52:52 +00:00
|
|
|
goto close;
|
2019-06-09 14:06:34 +00:00
|
|
|
}
|
2018-02-13 13:57:47 +00:00
|
|
|
|
2019-10-28 22:22:50 +00:00
|
|
|
NetBuffer netbuf;
|
|
|
|
{
|
|
|
|
std::unique_lock<std::mutex> lock( m_netReadLock );
|
|
|
|
m_netReadCv.wait( lock, [this] { return !m_netRead.empty(); } );
|
|
|
|
netbuf = m_netRead.front();
|
|
|
|
m_netRead.erase( m_netRead.begin() );
|
|
|
|
}
|
|
|
|
if( netbuf.bufferOffset < 0 ) goto close;
|
2018-02-13 13:57:47 +00:00
|
|
|
|
2019-11-07 00:29:11 +00:00
|
|
|
const char* ptr = m_buffer + netbuf.bufferOffset;
|
2019-10-28 22:22:50 +00:00
|
|
|
const char* end = ptr + netbuf.size;
|
2018-02-13 13:57:47 +00:00
|
|
|
|
2018-09-09 15:47:20 +00:00
|
|
|
{
|
2021-02-07 17:11:24 +00:00
|
|
|
std::lock_guard<std::mutex> lock( m_data.lock );
|
2018-09-09 15:47:20 +00:00
|
|
|
while( ptr < end )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2018-09-09 15:47:20 +00:00
|
|
|
auto ev = (const QueueItem*)ptr;
|
2019-06-09 14:06:34 +00:00
|
|
|
if( !DispatchProcess( *ev, ptr ) )
|
|
|
|
{
|
2020-03-29 21:01:57 +00:00
|
|
|
if( m_failure != Failure::None ) HandleFailure( ptr, end );
|
2019-06-09 14:06:34 +00:00
|
|
|
QueryTerminate();
|
|
|
|
goto close;
|
|
|
|
}
|
2018-09-09 15:47:20 +00:00
|
|
|
}
|
2018-02-13 13:57:47 +00:00
|
|
|
|
2019-10-28 22:22:50 +00:00
|
|
|
{
|
|
|
|
std::lock_guard<std::mutex> lock( m_netWriteLock );
|
|
|
|
m_netWriteCnt++;
|
|
|
|
m_netWriteCv.notify_one();
|
|
|
|
}
|
2018-02-13 13:57:47 +00:00
|
|
|
|
2022-03-30 22:04:32 +00:00
|
|
|
if( m_serverQuerySpaceLeft > 0 && !m_serverQueryQueuePrio.empty() )
|
|
|
|
{
|
|
|
|
const auto toSend = std::min( m_serverQuerySpaceLeft, m_serverQueryQueuePrio.size() );
|
|
|
|
m_sock.Send( m_serverQueryQueuePrio.data(), toSend * ServerQueryPacketSize );
|
|
|
|
m_serverQuerySpaceLeft -= toSend;
|
|
|
|
if( toSend == m_serverQueryQueuePrio.size() )
|
|
|
|
{
|
|
|
|
m_serverQueryQueuePrio.clear();
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
m_serverQueryQueuePrio.erase( m_serverQueryQueuePrio.begin(), m_serverQueryQueuePrio.begin() + toSend );
|
|
|
|
}
|
|
|
|
}
|
2022-03-30 22:03:02 +00:00
|
|
|
if( m_serverQuerySpaceLeft > 0 && !m_serverQueryQueue.empty() )
|
2019-04-01 17:47:29 +00:00
|
|
|
{
|
2020-04-01 19:05:25 +00:00
|
|
|
const auto toSend = std::min( m_serverQuerySpaceLeft, m_serverQueryQueue.size() );
|
|
|
|
m_sock.Send( m_serverQueryQueue.data(), toSend * ServerQueryPacketSize );
|
|
|
|
m_serverQuerySpaceLeft -= toSend;
|
|
|
|
if( toSend == m_serverQueryQueue.size() )
|
|
|
|
{
|
|
|
|
m_serverQueryQueue.clear();
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
m_serverQueryQueue.erase( m_serverQueryQueue.begin(), m_serverQueryQueue.begin() + toSend );
|
|
|
|
}
|
2019-04-01 17:47:29 +00:00
|
|
|
}
|
2018-09-09 15:47:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
auto t1 = std::chrono::high_resolution_clock::now();
|
|
|
|
auto td = std::chrono::duration_cast<std::chrono::milliseconds>( t1 - t0 ).count();
|
|
|
|
enum { MbpsUpdateTime = 200 };
|
|
|
|
if( td > MbpsUpdateTime )
|
|
|
|
{
|
2020-04-29 00:36:38 +00:00
|
|
|
UpdateMbps( td );
|
2018-09-09 15:47:20 +00:00
|
|
|
t0 = t1;
|
|
|
|
}
|
2018-02-13 13:57:47 +00:00
|
|
|
|
2018-09-09 15:47:20 +00:00
|
|
|
if( m_terminate )
|
|
|
|
{
|
|
|
|
if( m_pendingStrings != 0 || m_pendingThreads != 0 || m_pendingSourceLocation != 0 || m_pendingCallstackFrames != 0 ||
|
2020-09-29 15:04:41 +00:00
|
|
|
m_data.plots.IsPending() || m_pendingCallstackId != 0 || m_pendingExternalNames != 0 ||
|
2020-07-25 23:35:08 +00:00
|
|
|
m_pendingCallstackSubframes != 0 || m_pendingFrameImageData.image != nullptr || !m_pendingSymbols.empty() ||
|
2022-03-30 22:04:32 +00:00
|
|
|
m_pendingSymbolCode != 0 || m_pendingCodeInformation != 0 || !m_serverQueryQueue.empty() || !m_serverQueryQueuePrio.empty() ||
|
2021-02-03 23:03:25 +00:00
|
|
|
m_pendingSourceLocationPayload != 0 || m_pendingSingleString.ptr != nullptr || m_pendingSecondString.ptr != nullptr ||
|
2021-11-03 17:57:30 +00:00
|
|
|
!m_sourceCodeQuery.empty() || m_pendingFibers != 0 )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2018-09-09 15:47:20 +00:00
|
|
|
continue;
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
2019-08-01 21:14:09 +00:00
|
|
|
if( !m_crashed && !m_disconnect )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2018-09-09 15:47:20 +00:00
|
|
|
bool done = true;
|
|
|
|
for( auto& v : m_data.threads )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2018-09-09 15:47:20 +00:00
|
|
|
if( !v->stack.empty() )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2018-09-09 15:47:20 +00:00
|
|
|
done = false;
|
|
|
|
break;
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
}
|
2018-09-09 15:47:20 +00:00
|
|
|
if( !done ) continue;
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
2020-03-27 01:02:36 +00:00
|
|
|
QueryTerminate();
|
2020-04-29 00:36:38 +00:00
|
|
|
UpdateMbps( 0 );
|
2018-09-09 15:47:20 +00:00
|
|
|
break;
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
2018-09-09 15:47:20 +00:00
|
|
|
}
|
2018-02-13 13:57:47 +00:00
|
|
|
|
|
|
|
close:
|
2019-10-28 22:22:50 +00:00
|
|
|
Shutdown();
|
|
|
|
m_netWriteCv.notify_one();
|
2018-09-09 15:47:20 +00:00
|
|
|
m_sock.Close();
|
|
|
|
m_connected.store( false, std::memory_order_relaxed );
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
2020-04-29 00:36:38 +00:00
|
|
|
void Worker::UpdateMbps( int64_t td )
|
|
|
|
{
|
|
|
|
const auto bytes = m_bytes.exchange( 0, std::memory_order_relaxed );
|
|
|
|
const auto decBytes = m_decBytes.exchange( 0, std::memory_order_relaxed );
|
|
|
|
std::lock_guard<std::shared_mutex> lock( m_mbpsData.lock );
|
|
|
|
if( td != 0 )
|
|
|
|
{
|
|
|
|
m_mbpsData.mbps.erase( m_mbpsData.mbps.begin() );
|
|
|
|
m_mbpsData.mbps.emplace_back( bytes / ( td * 125.f ) );
|
|
|
|
}
|
2020-05-07 23:55:03 +00:00
|
|
|
m_mbpsData.compRatio = decBytes == 0 ? 1 : float( bytes ) / decBytes;
|
2022-03-30 22:04:32 +00:00
|
|
|
m_mbpsData.queue = m_serverQueryQueue.size() + m_serverQueryQueuePrio.size();
|
2020-04-29 00:36:38 +00:00
|
|
|
m_mbpsData.transferred += bytes;
|
|
|
|
}
|
|
|
|
|
2020-03-29 21:01:57 +00:00
|
|
|
bool Worker::IsThreadStringRetrieved( uint64_t id )
|
|
|
|
{
|
|
|
|
const auto name = GetThreadName( m_failureData.thread );
|
|
|
|
return strcmp( name, "???" ) != 0;
|
|
|
|
}
|
|
|
|
|
2020-09-29 16:39:48 +00:00
|
|
|
bool Worker::IsCallstackRetrieved( uint32_t callstack )
|
|
|
|
{
|
|
|
|
auto& cs = GetCallstack( callstack );
|
|
|
|
for( auto& v : cs )
|
|
|
|
{
|
|
|
|
auto frameData = GetCallstackFrame( v );
|
|
|
|
if( !frameData ) return false;
|
|
|
|
}
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2020-03-29 21:01:57 +00:00
|
|
|
bool Worker::IsSourceLocationRetrieved( int16_t srcloc )
|
|
|
|
{
|
|
|
|
auto& sl = GetSourceLocation( srcloc );
|
|
|
|
auto func = GetString( sl.function );
|
|
|
|
auto file = GetString( sl.file );
|
|
|
|
return strcmp( func, "???" ) != 0 && strcmp( file, "???" ) != 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool Worker::HasAllFailureData()
|
|
|
|
{
|
|
|
|
if( m_failureData.thread != 0 && !IsThreadStringRetrieved( m_failureData.thread ) ) return false;
|
|
|
|
if( m_failureData.srcloc != 0 && !IsSourceLocationRetrieved( m_failureData.srcloc ) ) return false;
|
2020-09-29 16:39:48 +00:00
|
|
|
if( m_failureData.callstack != 0 && !IsCallstackRetrieved( m_failureData.callstack ) ) return false;
|
2020-03-29 21:01:57 +00:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
void Worker::HandleFailure( const char* ptr, const char* end )
|
|
|
|
{
|
|
|
|
if( HasAllFailureData() ) return;
|
|
|
|
for(;;)
|
|
|
|
{
|
|
|
|
while( ptr < end )
|
|
|
|
{
|
|
|
|
auto ev = (const QueueItem*)ptr;
|
|
|
|
DispatchFailure( *ev, ptr );
|
|
|
|
}
|
|
|
|
if( HasAllFailureData() ) return;
|
|
|
|
|
|
|
|
{
|
|
|
|
std::lock_guard<std::mutex> lock( m_netWriteLock );
|
|
|
|
m_netWriteCnt++;
|
|
|
|
m_netWriteCv.notify_one();
|
|
|
|
}
|
|
|
|
|
2022-03-30 22:04:32 +00:00
|
|
|
if( m_serverQuerySpaceLeft > 0 && !m_serverQueryQueuePrio.empty() )
|
|
|
|
{
|
|
|
|
const auto toSend = std::min( m_serverQuerySpaceLeft, m_serverQueryQueuePrio.size() );
|
|
|
|
m_sock.Send( m_serverQueryQueuePrio.data(), toSend * ServerQueryPacketSize );
|
|
|
|
m_serverQuerySpaceLeft -= toSend;
|
|
|
|
if( toSend == m_serverQueryQueuePrio.size() )
|
|
|
|
{
|
|
|
|
m_serverQueryQueuePrio.clear();
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
m_serverQueryQueuePrio.erase( m_serverQueryQueuePrio.begin(), m_serverQueryQueuePrio.begin() + toSend );
|
|
|
|
}
|
|
|
|
}
|
2022-03-30 22:03:02 +00:00
|
|
|
if( m_serverQuerySpaceLeft > 0 && !m_serverQueryQueue.empty() )
|
2020-03-29 21:01:57 +00:00
|
|
|
{
|
2020-04-02 10:15:50 +00:00
|
|
|
const auto toSend = std::min( m_serverQuerySpaceLeft, m_serverQueryQueue.size() );
|
|
|
|
m_sock.Send( m_serverQueryQueue.data(), toSend * ServerQueryPacketSize );
|
|
|
|
m_serverQuerySpaceLeft -= toSend;
|
|
|
|
if( toSend == m_serverQueryQueue.size() )
|
|
|
|
{
|
|
|
|
m_serverQueryQueue.clear();
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
m_serverQueryQueue.erase( m_serverQueryQueue.begin(), m_serverQueryQueue.begin() + toSend );
|
|
|
|
}
|
2020-03-29 21:01:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if( m_shutdown.load( std::memory_order_relaxed ) ) return;
|
|
|
|
|
|
|
|
NetBuffer netbuf;
|
|
|
|
{
|
|
|
|
std::unique_lock<std::mutex> lock( m_netReadLock );
|
|
|
|
m_netReadCv.wait( lock, [this] { return !m_netRead.empty(); } );
|
|
|
|
netbuf = m_netRead.front();
|
|
|
|
m_netRead.erase( m_netRead.begin() );
|
|
|
|
}
|
|
|
|
if( netbuf.bufferOffset < 0 ) return;
|
|
|
|
|
|
|
|
ptr = m_buffer + netbuf.bufferOffset;
|
|
|
|
end = ptr + netbuf.size;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void Worker::DispatchFailure( const QueueItem& ev, const char*& ptr )
|
|
|
|
{
|
|
|
|
if( ev.hdr.idx >= (int)QueueType::StringData )
|
|
|
|
{
|
|
|
|
ptr += sizeof( QueueHeader ) + sizeof( QueueStringTransfer );
|
|
|
|
if( ev.hdr.type == QueueType::FrameImageData ||
|
2021-02-03 23:03:25 +00:00
|
|
|
ev.hdr.type == QueueType::SymbolCode ||
|
|
|
|
ev.hdr.type == QueueType::SourceCode )
|
2020-03-29 21:01:57 +00:00
|
|
|
{
|
2021-02-03 23:03:25 +00:00
|
|
|
if( ev.hdr.type == QueueType::SymbolCode || ev.hdr.type == QueueType::SourceCode )
|
|
|
|
{
|
|
|
|
m_serverQuerySpaceLeft++;
|
|
|
|
}
|
2020-03-29 21:01:57 +00:00
|
|
|
uint32_t sz;
|
|
|
|
memcpy( &sz, ptr, sizeof( sz ) );
|
|
|
|
ptr += sizeof( sz ) + sz;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
uint16_t sz;
|
|
|
|
memcpy( &sz, ptr, sizeof( sz ) );
|
|
|
|
ptr += sizeof( sz );
|
|
|
|
switch( ev.hdr.type )
|
|
|
|
{
|
|
|
|
case QueueType::StringData:
|
|
|
|
AddString( ev.stringTransfer.ptr, ptr, sz );
|
|
|
|
m_serverQuerySpaceLeft++;
|
|
|
|
break;
|
|
|
|
case QueueType::ThreadName:
|
|
|
|
AddThreadString( ev.stringTransfer.ptr, ptr, sz );
|
|
|
|
m_serverQuerySpaceLeft++;
|
|
|
|
break;
|
2021-11-03 17:57:30 +00:00
|
|
|
case QueueType::FiberName:
|
|
|
|
AddFiberName( ev.stringTransfer.ptr, ptr, sz );
|
|
|
|
m_serverQuerySpaceLeft++;
|
|
|
|
break;
|
2020-03-29 21:01:57 +00:00
|
|
|
case QueueType::PlotName:
|
|
|
|
case QueueType::FrameName:
|
|
|
|
case QueueType::ExternalName:
|
|
|
|
m_serverQuerySpaceLeft++;
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
ptr += sz;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2020-09-30 13:46:00 +00:00
|
|
|
uint16_t sz;
|
2020-03-29 21:01:57 +00:00
|
|
|
switch( ev.hdr.type )
|
|
|
|
{
|
2020-09-30 13:46:00 +00:00
|
|
|
case QueueType::SingleStringData:
|
2020-09-30 13:57:14 +00:00
|
|
|
ptr += sizeof( QueueHeader );
|
|
|
|
memcpy( &sz, ptr, sizeof( sz ) );
|
|
|
|
ptr += sizeof( sz );
|
|
|
|
AddSingleStringFailure( ptr, sz );
|
|
|
|
ptr += sz;
|
|
|
|
break;
|
2020-09-30 13:46:00 +00:00
|
|
|
case QueueType::SecondStringData:
|
|
|
|
ptr += sizeof( QueueHeader );
|
|
|
|
memcpy( &sz, ptr, sizeof( sz ) );
|
2020-09-30 13:57:14 +00:00
|
|
|
ptr += sizeof( sz );
|
|
|
|
AddSecondString( ptr, sz );
|
|
|
|
ptr += sz;
|
2020-03-29 21:01:57 +00:00
|
|
|
break;
|
|
|
|
default:
|
2020-09-30 13:46:00 +00:00
|
|
|
ptr += QueueDataSize[ev.hdr.idx];
|
|
|
|
switch( ev.hdr.type )
|
|
|
|
{
|
|
|
|
case QueueType::SourceLocation:
|
|
|
|
AddSourceLocation( ev.srcloc );
|
|
|
|
m_serverQuerySpaceLeft++;
|
|
|
|
break;
|
|
|
|
case QueueType::CallstackFrameSize:
|
2020-09-30 13:57:14 +00:00
|
|
|
ProcessCallstackFrameSize( ev.callstackFrameSize );
|
|
|
|
m_serverQuerySpaceLeft++;
|
|
|
|
break;
|
|
|
|
case QueueType::CallstackFrame:
|
|
|
|
ProcessCallstackFrame( ev.callstackFrame, false );
|
|
|
|
break;
|
2020-09-30 13:46:00 +00:00
|
|
|
case QueueType::SymbolInformation:
|
2020-09-30 14:48:59 +00:00
|
|
|
case QueueType::CodeInformation:
|
2021-02-03 20:47:03 +00:00
|
|
|
case QueueType::AckServerQueryNoop:
|
2021-02-03 23:03:25 +00:00
|
|
|
case QueueType::AckSourceCodeNotAvailable:
|
2021-11-25 21:44:56 +00:00
|
|
|
case QueueType::AckSymbolCodeNotAvailable:
|
2020-09-30 13:46:00 +00:00
|
|
|
m_serverQuerySpaceLeft++;
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
2020-03-29 21:01:57 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-03-25 19:04:01 +00:00
|
|
|
void Worker::Query( ServerQuery type, uint64_t data, uint32_t extra )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2020-03-25 19:04:01 +00:00
|
|
|
ServerQueryPacket query { type, data, extra };
|
2022-03-30 22:04:32 +00:00
|
|
|
if( m_serverQuerySpaceLeft > 0 && m_serverQueryQueuePrio.empty() && m_serverQueryQueue.empty() )
|
2019-04-01 17:47:29 +00:00
|
|
|
{
|
|
|
|
m_serverQuerySpaceLeft--;
|
|
|
|
m_sock.Send( &query, ServerQueryPacketSize );
|
|
|
|
}
|
2022-03-30 22:04:32 +00:00
|
|
|
else if( IsQueryPrio( type ) )
|
|
|
|
{
|
|
|
|
m_serverQueryQueuePrio.push_back( query );
|
|
|
|
}
|
2019-04-01 17:47:29 +00:00
|
|
|
else
|
|
|
|
{
|
2020-04-01 19:05:25 +00:00
|
|
|
m_serverQueryQueue.push_back( query );
|
2019-04-01 17:47:29 +00:00
|
|
|
}
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
2019-06-09 13:58:56 +00:00
|
|
|
void Worker::QueryTerminate()
|
|
|
|
{
|
2020-03-25 19:04:01 +00:00
|
|
|
ServerQueryPacket query { ServerQueryTerminate, 0, 0 };
|
2019-06-09 13:58:56 +00:00
|
|
|
m_sock.Send( &query, ServerQueryPacketSize );
|
|
|
|
}
|
|
|
|
|
2022-05-01 12:25:07 +00:00
|
|
|
void Worker::QuerySourceFile( const char* fn, const char* image )
|
2021-02-03 23:03:25 +00:00
|
|
|
{
|
2022-05-01 12:30:18 +00:00
|
|
|
if( image ) QueryDataTransfer( image, strlen( image ) + 1 );
|
2021-02-03 23:03:25 +00:00
|
|
|
QueryDataTransfer( fn, strlen( fn ) + 1 );
|
|
|
|
Query( ServerQuerySourceCode, 0 );
|
|
|
|
}
|
|
|
|
|
|
|
|
void Worker::QueryDataTransfer( const void* ptr, size_t size )
|
|
|
|
{
|
|
|
|
Query( ServerQueryDataTransfer, size );
|
|
|
|
auto data = (const char*)ptr;
|
|
|
|
while( size > 0 )
|
|
|
|
{
|
|
|
|
uint64_t d8;
|
|
|
|
uint32_t d4;
|
|
|
|
if( size >= 12 )
|
|
|
|
{
|
|
|
|
memcpy( &d8, data, 8 );
|
|
|
|
memcpy( &d4, data+8, 4 );
|
|
|
|
data += 12;
|
|
|
|
size -= 12;
|
|
|
|
}
|
|
|
|
else if( size > 8 )
|
|
|
|
{
|
|
|
|
memcpy( &d8, data, 8 );
|
|
|
|
memset( &d4, 0, 4 );
|
|
|
|
memcpy( &d4, data+8, size-8 );
|
|
|
|
size = 0;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
memset( &d8, 0, 8 );
|
|
|
|
memset( &d4, 0, 4 );
|
|
|
|
memcpy( &d8, data, size );
|
|
|
|
size = 0;
|
|
|
|
}
|
|
|
|
Query( ServerQueryDataTransferPart, d8, d4 );
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-07 00:29:11 +00:00
|
|
|
bool Worker::DispatchProcess( const QueueItem& ev, const char*& ptr )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2018-06-19 17:15:19 +00:00
|
|
|
if( ev.hdr.idx >= (int)QueueType::StringData )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
|
|
|
ptr += sizeof( QueueHeader ) + sizeof( QueueStringTransfer );
|
2020-03-25 19:04:55 +00:00
|
|
|
if( ev.hdr.type == QueueType::FrameImageData ||
|
2021-02-03 23:03:25 +00:00
|
|
|
ev.hdr.type == QueueType::SymbolCode ||
|
|
|
|
ev.hdr.type == QueueType::SourceCode )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2019-06-06 19:39:54 +00:00
|
|
|
uint32_t sz;
|
|
|
|
memcpy( &sz, ptr, sizeof( sz ) );
|
|
|
|
ptr += sizeof( sz );
|
2020-03-25 19:04:55 +00:00
|
|
|
switch( ev.hdr.type )
|
|
|
|
{
|
|
|
|
case QueueType::FrameImageData:
|
|
|
|
AddFrameImageData( ev.stringTransfer.ptr, ptr, sz );
|
|
|
|
break;
|
|
|
|
case QueueType::SymbolCode:
|
|
|
|
AddSymbolCode( ev.stringTransfer.ptr, ptr, sz );
|
2020-03-25 19:33:50 +00:00
|
|
|
m_serverQuerySpaceLeft++;
|
2020-03-25 19:04:55 +00:00
|
|
|
break;
|
2021-02-03 23:03:25 +00:00
|
|
|
case QueueType::SourceCode:
|
|
|
|
AddSourceCode( ptr, sz );
|
|
|
|
m_serverQuerySpaceLeft++;
|
|
|
|
break;
|
2020-03-25 19:04:55 +00:00
|
|
|
default:
|
|
|
|
assert( false );
|
|
|
|
break;
|
|
|
|
}
|
2019-06-06 19:39:54 +00:00
|
|
|
ptr += sz;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
uint16_t sz;
|
|
|
|
memcpy( &sz, ptr, sizeof( sz ) );
|
|
|
|
ptr += sizeof( sz );
|
|
|
|
switch( ev.hdr.type )
|
|
|
|
{
|
|
|
|
case QueueType::StringData:
|
|
|
|
AddString( ev.stringTransfer.ptr, ptr, sz );
|
|
|
|
m_serverQuerySpaceLeft++;
|
|
|
|
break;
|
|
|
|
case QueueType::ThreadName:
|
|
|
|
AddThreadString( ev.stringTransfer.ptr, ptr, sz );
|
|
|
|
m_serverQuerySpaceLeft++;
|
|
|
|
break;
|
2021-11-03 17:57:30 +00:00
|
|
|
case QueueType::FiberName:
|
|
|
|
AddFiberName( ev.stringTransfer.ptr, ptr, sz );
|
|
|
|
m_serverQuerySpaceLeft++;
|
|
|
|
break;
|
2019-06-06 19:39:54 +00:00
|
|
|
case QueueType::PlotName:
|
|
|
|
HandlePlotName( ev.stringTransfer.ptr, ptr, sz );
|
|
|
|
m_serverQuerySpaceLeft++;
|
|
|
|
break;
|
|
|
|
case QueueType::SourceLocationPayload:
|
|
|
|
AddSourceLocationPayload( ev.stringTransfer.ptr, ptr, sz );
|
|
|
|
break;
|
|
|
|
case QueueType::CallstackPayload:
|
|
|
|
AddCallstackPayload( ev.stringTransfer.ptr, ptr, sz );
|
|
|
|
break;
|
|
|
|
case QueueType::FrameName:
|
|
|
|
HandleFrameName( ev.stringTransfer.ptr, ptr, sz );
|
|
|
|
m_serverQuerySpaceLeft++;
|
|
|
|
break;
|
|
|
|
case QueueType::CallstackAllocPayload:
|
|
|
|
AddCallstackAllocPayload( ev.stringTransfer.ptr, ptr, sz );
|
|
|
|
break;
|
2019-08-16 17:22:23 +00:00
|
|
|
case QueueType::ExternalName:
|
|
|
|
AddExternalName( ev.stringTransfer.ptr, ptr, sz );
|
2020-03-25 19:33:50 +00:00
|
|
|
m_serverQuerySpaceLeft++;
|
2019-08-16 17:22:23 +00:00
|
|
|
break;
|
2019-08-16 17:49:16 +00:00
|
|
|
case QueueType::ExternalThreadName:
|
|
|
|
AddExternalThreadName( ev.stringTransfer.ptr, ptr, sz );
|
|
|
|
break;
|
2019-06-06 19:39:54 +00:00
|
|
|
default:
|
|
|
|
assert( false );
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
ptr += sz;
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
2019-01-14 22:08:34 +00:00
|
|
|
return true;
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2020-07-25 23:32:49 +00:00
|
|
|
uint16_t sz;
|
|
|
|
switch( ev.hdr.type )
|
|
|
|
{
|
|
|
|
case QueueType::SingleStringData:
|
|
|
|
ptr += sizeof( QueueHeader );
|
|
|
|
memcpy( &sz, ptr, sizeof( sz ) );
|
|
|
|
ptr += sizeof( sz );
|
|
|
|
AddSingleString( ptr, sz );
|
|
|
|
ptr += sz;
|
|
|
|
return true;
|
|
|
|
case QueueType::SecondStringData:
|
|
|
|
ptr += sizeof( QueueHeader );
|
|
|
|
memcpy( &sz, ptr, sizeof( sz ) );
|
|
|
|
ptr += sizeof( sz );
|
|
|
|
AddSecondString( ptr, sz );
|
|
|
|
ptr += sz;
|
|
|
|
return true;
|
|
|
|
default:
|
|
|
|
ptr += QueueDataSize[ev.hdr.idx];
|
|
|
|
return Process( ev );
|
|
|
|
}
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void Worker::CheckSourceLocation( uint64_t ptr )
|
|
|
|
{
|
2019-10-25 19:01:16 +00:00
|
|
|
if( m_data.checkSrclocLast != ptr )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2019-10-25 19:01:16 +00:00
|
|
|
m_data.checkSrclocLast = ptr;
|
|
|
|
if( m_data.sourceLocation.find( ptr ) == m_data.sourceLocation.end() )
|
|
|
|
{
|
|
|
|
NewSourceLocation( ptr );
|
|
|
|
}
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void Worker::NewSourceLocation( uint64_t ptr )
|
|
|
|
{
|
|
|
|
static const SourceLocation emptySourceLocation = {};
|
|
|
|
|
|
|
|
m_data.sourceLocation.emplace( ptr, emptySourceLocation );
|
|
|
|
m_pendingSourceLocation++;
|
|
|
|
m_sourceLocationQueue.push_back( ptr );
|
|
|
|
|
2019-04-01 16:52:32 +00:00
|
|
|
Query( ServerQuerySourceLocation, ptr );
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
2019-10-25 19:07:28 +00:00
|
|
|
int16_t Worker::ShrinkSourceLocationReal( uint64_t srcloc )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
|
|
|
auto it = m_sourceLocationShrink.find( srcloc );
|
|
|
|
if( it != m_sourceLocationShrink.end() )
|
|
|
|
{
|
2019-10-25 19:07:28 +00:00
|
|
|
m_data.shrinkSrclocLast.first = srcloc;
|
|
|
|
m_data.shrinkSrclocLast.second = it->second;
|
2018-02-13 13:57:47 +00:00
|
|
|
return it->second;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
return NewShrinkedSourceLocation( srcloc );
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-08-15 15:42:26 +00:00
|
|
|
int16_t Worker::NewShrinkedSourceLocation( uint64_t srcloc )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2019-08-15 15:42:26 +00:00
|
|
|
assert( m_data.sourceLocationExpand.size() < std::numeric_limits<int16_t>::max() );
|
|
|
|
const auto sz = int16_t( m_data.sourceLocationExpand.size() );
|
2018-02-13 13:57:47 +00:00
|
|
|
m_data.sourceLocationExpand.push_back( srcloc );
|
2018-03-18 11:55:54 +00:00
|
|
|
#ifndef TRACY_NO_STATISTICS
|
2019-10-25 19:29:55 +00:00
|
|
|
auto res = m_data.sourceLocationZones.emplace( sz, SourceLocationZones() );
|
|
|
|
m_data.srclocZonesLast.first = sz;
|
|
|
|
m_data.srclocZonesLast.second = &res.first->second;
|
2018-07-29 12:16:13 +00:00
|
|
|
#else
|
2019-10-26 14:30:13 +00:00
|
|
|
auto res = m_data.sourceLocationZonesCnt.emplace( sz, 0 );
|
|
|
|
m_data.srclocCntLast.first = sz;
|
|
|
|
m_data.srclocCntLast.second = &res.first->second;
|
2018-03-18 11:55:54 +00:00
|
|
|
#endif
|
2018-02-13 13:57:47 +00:00
|
|
|
m_sourceLocationShrink.emplace( srcloc, sz );
|
2019-10-25 19:07:28 +00:00
|
|
|
m_data.shrinkSrclocLast.first = srcloc;
|
|
|
|
m_data.shrinkSrclocLast.second = sz;
|
2018-02-13 13:57:47 +00:00
|
|
|
return sz;
|
|
|
|
}
|
|
|
|
|
2019-11-10 16:23:04 +00:00
|
|
|
void Worker::InsertMessageData( MessageData* msg )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
|
|
|
if( m_data.messages.empty() )
|
|
|
|
{
|
|
|
|
m_data.messages.push_back( msg );
|
|
|
|
}
|
|
|
|
else if( m_data.messages.back()->time < msg->time )
|
|
|
|
{
|
|
|
|
m_data.messages.push_back_non_empty( msg );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
auto mit = std::lower_bound( m_data.messages.begin(), m_data.messages.end(), msg->time, [] ( const auto& lhs, const auto& rhs ) { return lhs->time < rhs; } );
|
|
|
|
m_data.messages.insert( mit, msg );
|
|
|
|
}
|
|
|
|
|
2021-11-06 17:55:29 +00:00
|
|
|
auto td = GetCurrentThreadData();
|
2019-11-10 16:23:04 +00:00
|
|
|
auto vec = &td->messages;
|
2018-02-13 13:57:47 +00:00
|
|
|
if( vec->empty() )
|
|
|
|
{
|
|
|
|
vec->push_back( msg );
|
|
|
|
}
|
|
|
|
else if( vec->back()->time < msg->time )
|
|
|
|
{
|
|
|
|
vec->push_back_non_empty( msg );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
auto tmit = std::lower_bound( vec->begin(), vec->end(), msg->time, [] ( const auto& lhs, const auto& rhs ) { return lhs->time < rhs; } );
|
|
|
|
vec->insert( tmit, msg );
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-08-03 12:35:01 +00:00
|
|
|
ThreadData* Worker::NoticeThreadReal( uint64_t thread )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2021-11-06 17:55:29 +00:00
|
|
|
auto it = m_threadMap.find( thread );
|
|
|
|
if( it != m_threadMap.end() )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2021-11-02 00:51:54 +00:00
|
|
|
m_data.threadDataLast.first = thread;
|
|
|
|
m_data.threadDataLast.second = it->second;
|
|
|
|
return it->second;
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
2021-11-06 17:55:29 +00:00
|
|
|
else
|
|
|
|
{
|
|
|
|
CheckThreadString( thread );
|
|
|
|
return NewThread( thread, false );
|
|
|
|
}
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
2019-10-24 20:33:48 +00:00
|
|
|
ThreadData* Worker::RetrieveThreadReal( uint64_t thread )
|
|
|
|
{
|
2021-11-06 17:55:29 +00:00
|
|
|
auto it = m_threadMap.find( thread );
|
|
|
|
if( it != m_threadMap.end() )
|
2019-10-24 20:33:48 +00:00
|
|
|
{
|
2021-11-02 00:51:54 +00:00
|
|
|
m_data.threadDataLast.first = thread;
|
|
|
|
m_data.threadDataLast.second = it->second;
|
|
|
|
return it->second;
|
2019-10-24 20:33:48 +00:00
|
|
|
}
|
2021-11-06 17:55:29 +00:00
|
|
|
else
|
|
|
|
{
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
ThreadData* Worker::GetCurrentThreadData()
|
|
|
|
{
|
|
|
|
auto td = m_threadCtxData;
|
|
|
|
if( !td ) td = m_threadCtxData = NoticeThread( m_threadCtx );
|
|
|
|
if( td->fiber ) td = td->fiber;
|
|
|
|
return td;
|
2019-10-24 20:33:48 +00:00
|
|
|
}
|
|
|
|
|
2019-10-25 19:29:55 +00:00
|
|
|
#ifndef TRACY_NO_STATISTICS
|
|
|
|
Worker::SourceLocationZones* Worker::GetSourceLocationZonesReal( uint16_t srcloc )
|
|
|
|
{
|
|
|
|
auto it = m_data.sourceLocationZones.find( srcloc );
|
|
|
|
assert( it != m_data.sourceLocationZones.end() );
|
|
|
|
m_data.srclocZonesLast.first = srcloc;
|
|
|
|
m_data.srclocZonesLast.second = &it->second;
|
|
|
|
return &it->second;
|
|
|
|
}
|
2022-01-23 14:46:30 +00:00
|
|
|
|
|
|
|
Worker::GpuSourceLocationZones* Worker::GetGpuSourceLocationZonesReal( uint16_t srcloc )
|
|
|
|
{
|
|
|
|
auto it = m_data.gpuSourceLocationZones.find( srcloc );
|
2022-01-29 14:45:25 +00:00
|
|
|
if( it == m_data.gpuSourceLocationZones.end() )
|
|
|
|
{
|
|
|
|
it = m_data.gpuSourceLocationZones.emplace( srcloc, GpuSourceLocationZones() ).first;
|
|
|
|
}
|
2022-01-23 14:46:30 +00:00
|
|
|
m_data.gpuZonesLast.first = srcloc;
|
|
|
|
m_data.gpuZonesLast.second = &it->second;
|
|
|
|
return &it->second;
|
|
|
|
}
|
2019-10-26 14:30:13 +00:00
|
|
|
#else
|
|
|
|
uint64_t* Worker::GetSourceLocationZonesCntReal( uint16_t srcloc )
|
|
|
|
{
|
|
|
|
auto it = m_data.sourceLocationZonesCnt.find( srcloc );
|
|
|
|
assert( it != m_data.sourceLocationZonesCnt.end() );
|
|
|
|
m_data.srclocCntLast.first = srcloc;
|
|
|
|
m_data.srclocCntLast.second = &it->second;
|
|
|
|
return &it->second;
|
|
|
|
}
|
2022-01-23 14:46:30 +00:00
|
|
|
|
|
|
|
uint64_t* Worker::GetGpuSourceLocationZonesCntReal( uint16_t srcloc )
|
|
|
|
{
|
|
|
|
auto it = m_data.gpuSourceLocationZonesCnt.find( srcloc );
|
|
|
|
assert( it != m_data.gpuSourceLocationZonesCnt.end() );
|
|
|
|
m_data.gpuCntLast.first = srcloc;
|
|
|
|
m_data.gpuCntLast.second = &it->second;
|
|
|
|
return &it->second;
|
|
|
|
}
|
2019-10-25 19:29:55 +00:00
|
|
|
#endif
|
|
|
|
|
2019-09-08 12:07:16 +00:00
|
|
|
const ThreadData* Worker::GetThreadData( uint64_t tid ) const
|
|
|
|
{
|
|
|
|
auto it = m_threadMap.find( tid );
|
|
|
|
if( it == m_threadMap.end() ) return nullptr;
|
|
|
|
return it->second;
|
|
|
|
}
|
|
|
|
|
2020-09-25 15:51:05 +00:00
|
|
|
const MemData& Worker::GetMemoryNamed( uint64_t name ) const
|
|
|
|
{
|
|
|
|
auto it = m_data.memNameMap.find( name );
|
|
|
|
assert( it != m_data.memNameMap.end() );
|
|
|
|
return *it->second;
|
|
|
|
}
|
|
|
|
|
2021-11-02 00:45:01 +00:00
|
|
|
ThreadData* Worker::NewThread( uint64_t thread, bool fiber )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
|
|
|
auto td = m_slab.AllocInit<ThreadData>();
|
|
|
|
td->id = thread;
|
|
|
|
td->count = 0;
|
2019-01-14 21:56:10 +00:00
|
|
|
td->nextZoneId = 0;
|
2020-05-31 13:24:11 +00:00
|
|
|
#ifndef TRACY_NO_STATISTICS
|
2020-05-31 12:17:54 +00:00
|
|
|
td->ghostIdx = 0;
|
2020-05-31 13:24:11 +00:00
|
|
|
#endif
|
2021-06-16 23:47:19 +00:00
|
|
|
td->kernelSampleCnt = 0;
|
2021-06-15 00:25:12 +00:00
|
|
|
td->pendingSample.time.Clear();
|
2021-11-02 00:45:01 +00:00
|
|
|
td->isFiber = fiber;
|
2021-11-06 17:55:29 +00:00
|
|
|
td->fiber = nullptr;
|
2021-11-14 15:42:11 +00:00
|
|
|
td->stackCount = (uint8_t*)m_slab.AllocBig( sizeof( uint8_t ) * 64*1024 );
|
|
|
|
memset( td->stackCount, 0, sizeof( uint8_t ) * 64*1024 );
|
2018-02-13 13:57:47 +00:00
|
|
|
m_data.threads.push_back( td );
|
|
|
|
m_threadMap.emplace( thread, td );
|
2019-08-03 12:35:01 +00:00
|
|
|
m_data.threadDataLast.first = thread;
|
|
|
|
m_data.threadDataLast.second = td;
|
2018-02-13 13:57:47 +00:00
|
|
|
return td;
|
|
|
|
}
|
|
|
|
|
2021-11-06 17:55:29 +00:00
|
|
|
void Worker::NewZone( ZoneEvent* zone )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
|
|
|
m_data.zonesCnt++;
|
2018-03-18 01:05:33 +00:00
|
|
|
|
2021-11-06 17:55:29 +00:00
|
|
|
auto td = GetCurrentThreadData();
|
2018-02-13 13:57:47 +00:00
|
|
|
td->count++;
|
2021-06-05 18:13:57 +00:00
|
|
|
td->IncStackCount( zone->SrcLoc() );
|
2020-02-23 14:35:08 +00:00
|
|
|
const auto ssz = td->stack.size();
|
|
|
|
if( ssz == 0 )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
|
|
|
td->stack.push_back( zone );
|
|
|
|
td->timeline.push_back( zone );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2020-02-23 14:35:08 +00:00
|
|
|
auto& back = td->stack.data()[ssz-1];
|
2020-01-24 01:17:38 +00:00
|
|
|
if( !back->HasChildren() )
|
2018-07-22 14:05:50 +00:00
|
|
|
{
|
2019-09-30 23:05:37 +00:00
|
|
|
back->SetChild( int32_t( m_data.zoneChildren.size() ) );
|
2019-03-26 21:06:00 +00:00
|
|
|
if( m_data.zoneVectorCache.empty() )
|
|
|
|
{
|
2019-11-02 15:17:20 +00:00
|
|
|
m_data.zoneChildren.push_back( Vector<short_ptr<ZoneEvent>>( zone ) );
|
2019-03-26 21:06:00 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2019-11-02 15:17:20 +00:00
|
|
|
Vector<short_ptr<ZoneEvent>> vze = std::move( m_data.zoneVectorCache.back_and_pop() );
|
2019-03-26 21:06:00 +00:00
|
|
|
assert( !vze.empty() );
|
|
|
|
vze.clear();
|
|
|
|
vze.push_back_non_empty( zone );
|
|
|
|
m_data.zoneChildren.push_back( std::move( vze ) );
|
|
|
|
}
|
2018-07-22 14:05:50 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2020-01-24 01:17:38 +00:00
|
|
|
const auto backChild = back->Child();
|
2019-10-24 22:51:01 +00:00
|
|
|
assert( !m_data.zoneChildren[backChild].empty() );
|
|
|
|
m_data.zoneChildren[backChild].push_back_non_empty( zone );
|
2018-07-22 14:05:50 +00:00
|
|
|
}
|
2018-02-13 13:57:47 +00:00
|
|
|
td->stack.push_back_non_empty( zone );
|
|
|
|
}
|
2019-01-14 22:08:34 +00:00
|
|
|
|
|
|
|
td->zoneIdStack.push_back( td->nextZoneId );
|
|
|
|
td->nextZoneId = 0;
|
2019-10-24 22:33:44 +00:00
|
|
|
|
|
|
|
#ifndef TRACY_NO_STATISTICS
|
|
|
|
td->childTimeStack.push_back( 0 );
|
|
|
|
#endif
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
2019-10-24 21:25:04 +00:00
|
|
|
void Worker::InsertLockEvent( LockMap& lockmap, LockEvent* lev, uint64_t thread, int64_t time )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2019-10-24 21:25:04 +00:00
|
|
|
if( m_data.lastTime < time ) m_data.lastTime = time;
|
2018-02-13 13:57:47 +00:00
|
|
|
|
|
|
|
NoticeThread( thread );
|
|
|
|
|
|
|
|
auto it = lockmap.threadMap.find( thread );
|
|
|
|
if( it == lockmap.threadMap.end() )
|
|
|
|
{
|
|
|
|
assert( lockmap.threadList.size() < MaxLockThreads );
|
|
|
|
it = lockmap.threadMap.emplace( thread, lockmap.threadList.size() ).first;
|
|
|
|
lockmap.threadList.emplace_back( thread );
|
|
|
|
}
|
|
|
|
lev->thread = it->second;
|
|
|
|
assert( lev->thread == it->second );
|
|
|
|
auto& timeline = lockmap.timeline;
|
|
|
|
if( timeline.empty() )
|
|
|
|
{
|
2019-03-16 13:18:43 +00:00
|
|
|
timeline.push_back( { lev } );
|
2018-02-13 13:57:47 +00:00
|
|
|
UpdateLockCount( lockmap, timeline.size() - 1 );
|
|
|
|
}
|
2019-08-12 11:51:01 +00:00
|
|
|
else
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2019-10-24 23:00:32 +00:00
|
|
|
assert( timeline.back().ptr->Time() <= time );
|
2019-03-16 13:18:43 +00:00
|
|
|
timeline.push_back_non_empty( { lev } );
|
2018-02-13 13:57:47 +00:00
|
|
|
UpdateLockCount( lockmap, timeline.size() - 1 );
|
|
|
|
}
|
2019-03-16 01:19:19 +00:00
|
|
|
|
|
|
|
auto& range = lockmap.range[it->second];
|
2019-10-24 21:25:04 +00:00
|
|
|
if( range.start > time ) range.start = time;
|
|
|
|
if( range.end < time ) range.end = time;
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
2020-05-23 11:08:57 +00:00
|
|
|
bool Worker::CheckString( uint64_t ptr )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2020-05-23 11:08:57 +00:00
|
|
|
if( ptr == 0 ) return true;
|
|
|
|
if( m_data.strings.find( ptr ) != m_data.strings.end() ) return true;
|
2018-02-13 13:57:47 +00:00
|
|
|
|
|
|
|
m_data.strings.emplace( ptr, "???" );
|
|
|
|
m_pendingStrings++;
|
|
|
|
|
2019-04-01 16:52:32 +00:00
|
|
|
Query( ServerQueryString, ptr );
|
2020-05-23 11:08:57 +00:00
|
|
|
return false;
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void Worker::CheckThreadString( uint64_t id )
|
|
|
|
{
|
|
|
|
if( m_data.threadNames.find( id ) != m_data.threadNames.end() ) return;
|
|
|
|
|
|
|
|
m_data.threadNames.emplace( id, "???" );
|
|
|
|
m_pendingThreads++;
|
|
|
|
|
2019-12-19 16:23:46 +00:00
|
|
|
if( m_sock.IsValid() ) Query( ServerQueryThreadString, id );
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
2021-11-03 17:57:30 +00:00
|
|
|
void Worker::CheckFiberName( uint64_t id, uint64_t tid )
|
|
|
|
{
|
|
|
|
if( m_data.threadNames.find( tid ) != m_data.threadNames.end() ) return;
|
|
|
|
|
|
|
|
m_data.threadNames.emplace( tid, "???" );
|
|
|
|
m_pendingFibers++;
|
|
|
|
|
|
|
|
if( m_sock.IsValid() ) Query( ServerQueryFiberName, id );
|
|
|
|
}
|
|
|
|
|
2019-08-16 17:22:23 +00:00
|
|
|
void Worker::CheckExternalName( uint64_t id )
|
|
|
|
{
|
|
|
|
if( m_data.externalNames.find( id ) != m_data.externalNames.end() ) return;
|
|
|
|
|
2019-08-16 17:49:16 +00:00
|
|
|
m_data.externalNames.emplace( id, std::make_pair( "???", "???" ) );
|
|
|
|
m_pendingExternalNames += 2;
|
2019-08-16 17:22:23 +00:00
|
|
|
|
|
|
|
Query( ServerQueryExternalName, id );
|
|
|
|
}
|
|
|
|
|
2018-02-13 13:57:47 +00:00
|
|
|
void Worker::AddSourceLocation( const QueueSourceLocation& srcloc )
|
|
|
|
{
|
|
|
|
assert( m_pendingSourceLocation > 0 );
|
|
|
|
m_pendingSourceLocation--;
|
|
|
|
|
|
|
|
const auto ptr = m_sourceLocationQueue.front();
|
|
|
|
m_sourceLocationQueue.erase( m_sourceLocationQueue.begin() );
|
|
|
|
|
|
|
|
auto it = m_data.sourceLocation.find( ptr );
|
|
|
|
assert( it != m_data.sourceLocation.end() );
|
|
|
|
CheckString( srcloc.name );
|
2020-05-23 12:23:04 +00:00
|
|
|
if( CheckString( srcloc.file ) )
|
|
|
|
{
|
|
|
|
StringRef ref( StringRef::Ptr, srcloc.file );
|
2020-05-23 13:20:23 +00:00
|
|
|
if( srcloc.file != 0 && m_checkedFileStrings.find( ref ) == m_checkedFileStrings.end() && m_pendingFileStrings.find( ref ) == m_pendingFileStrings.end() )
|
2020-05-23 12:23:04 +00:00
|
|
|
{
|
|
|
|
CacheSource( ref );
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
StringRef ref( StringRef::Ptr, srcloc.file );
|
|
|
|
assert( m_checkedFileStrings.find( ref ) == m_checkedFileStrings.end() );
|
|
|
|
if( m_pendingFileStrings.find( ref ) == m_pendingFileStrings.end() )
|
|
|
|
{
|
|
|
|
m_pendingFileStrings.emplace( ref );
|
|
|
|
}
|
|
|
|
}
|
2018-02-13 13:57:47 +00:00
|
|
|
CheckString( srcloc.function );
|
2019-05-10 18:20:08 +00:00
|
|
|
const uint32_t color = ( srcloc.r << 16 ) | ( srcloc.g << 8 ) | srcloc.b;
|
2022-06-16 13:29:39 +00:00
|
|
|
it->second = SourceLocation {{ srcloc.name == 0 ? StringRef() : StringRef( StringRef::Ptr, srcloc.name ), StringRef( StringRef::Ptr, srcloc.function ), StringRef( StringRef::Ptr, srcloc.file ), srcloc.line, color }};
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
2019-11-07 00:29:11 +00:00
|
|
|
void Worker::AddSourceLocationPayload( uint64_t ptr, const char* data, size_t sz )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
|
|
|
const auto start = data;
|
|
|
|
|
2020-05-10 17:20:59 +00:00
|
|
|
assert( m_pendingSourceLocationPayload == 0 );
|
2018-02-13 13:57:47 +00:00
|
|
|
|
|
|
|
uint32_t color, line;
|
|
|
|
memcpy( &color, data, 4 );
|
|
|
|
memcpy( &line, data + 4, 4 );
|
|
|
|
data += 8;
|
|
|
|
auto end = data;
|
|
|
|
|
|
|
|
while( *end ) end++;
|
|
|
|
const auto func = StoreString( data, end - data );
|
|
|
|
end++;
|
|
|
|
|
|
|
|
data = end;
|
|
|
|
while( *end ) end++;
|
|
|
|
const auto source = StoreString( data, end - data );
|
|
|
|
end++;
|
|
|
|
|
|
|
|
const auto nsz = sz - ( end - start );
|
|
|
|
|
|
|
|
color = ( ( color & 0x00FF0000 ) >> 16 ) |
|
|
|
|
( ( color & 0x0000FF00 ) ) |
|
|
|
|
( ( color & 0x000000FF ) << 16 );
|
|
|
|
|
2022-06-16 13:29:39 +00:00
|
|
|
SourceLocation srcloc {{ nsz == 0 ? StringRef() : StringRef( StringRef::Idx, StoreString( end, nsz ).idx ), StringRef( StringRef::Idx, func.idx ), StringRef( StringRef::Idx, source.idx ), line, color }};
|
2018-02-13 13:57:47 +00:00
|
|
|
auto it = m_data.sourceLocationPayloadMap.find( &srcloc );
|
|
|
|
if( it == m_data.sourceLocationPayloadMap.end() )
|
|
|
|
{
|
|
|
|
auto slptr = m_slab.Alloc<SourceLocation>();
|
|
|
|
memcpy( slptr, &srcloc, sizeof( srcloc ) );
|
|
|
|
uint32_t idx = m_data.sourceLocationPayload.size();
|
|
|
|
m_data.sourceLocationPayloadMap.emplace( slptr, idx );
|
2020-05-10 17:20:59 +00:00
|
|
|
m_pendingSourceLocationPayload = -int16_t( idx + 1 );
|
2018-02-13 13:57:47 +00:00
|
|
|
m_data.sourceLocationPayload.push_back( slptr );
|
2019-10-25 19:29:55 +00:00
|
|
|
const auto key = -int16_t( idx + 1 );
|
2019-10-26 14:30:13 +00:00
|
|
|
#ifndef TRACY_NO_STATISTICS
|
2019-10-25 19:29:55 +00:00
|
|
|
auto res = m_data.sourceLocationZones.emplace( key, SourceLocationZones() );
|
|
|
|
m_data.srclocZonesLast.first = key;
|
|
|
|
m_data.srclocZonesLast.second = &res.first->second;
|
2018-07-29 12:16:13 +00:00
|
|
|
#else
|
2019-10-26 14:30:13 +00:00
|
|
|
auto res = m_data.sourceLocationZonesCnt.emplace( key, 0 );
|
|
|
|
m_data.srclocCntLast.first = key;
|
|
|
|
m_data.srclocCntLast.second = &res.first->second;
|
2018-03-18 11:55:54 +00:00
|
|
|
#endif
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2020-05-10 17:20:59 +00:00
|
|
|
m_pendingSourceLocationPayload = -int16_t( it->second + 1 );
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-07 00:29:11 +00:00
|
|
|
void Worker::AddString( uint64_t ptr, const char* str, size_t sz )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
|
|
|
assert( m_pendingStrings > 0 );
|
|
|
|
m_pendingStrings--;
|
|
|
|
auto it = m_data.strings.find( ptr );
|
|
|
|
assert( it != m_data.strings.end() && strcmp( it->second, "???" ) == 0 );
|
|
|
|
const auto sl = StoreString( str, sz );
|
|
|
|
it->second = sl.ptr;
|
2020-05-23 12:23:04 +00:00
|
|
|
|
|
|
|
StringRef ref( StringRef::Ptr, ptr );
|
|
|
|
auto sit = m_pendingFileStrings.find( ref );
|
|
|
|
if( sit != m_pendingFileStrings.end() )
|
|
|
|
{
|
|
|
|
m_pendingFileStrings.erase( sit );
|
|
|
|
CacheSource( ref );
|
|
|
|
}
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
2019-11-07 00:29:11 +00:00
|
|
|
void Worker::AddThreadString( uint64_t id, const char* str, size_t sz )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
|
|
|
assert( m_pendingThreads > 0 );
|
|
|
|
m_pendingThreads--;
|
|
|
|
auto it = m_data.threadNames.find( id );
|
|
|
|
assert( it != m_data.threadNames.end() && strcmp( it->second, "???" ) == 0 );
|
|
|
|
const auto sl = StoreString( str, sz );
|
|
|
|
it->second = sl.ptr;
|
|
|
|
}
|
|
|
|
|
2021-11-03 17:57:30 +00:00
|
|
|
void Worker::AddFiberName( uint64_t id, const char* str, size_t sz )
|
|
|
|
{
|
|
|
|
assert( m_pendingFibers > 0 );
|
|
|
|
m_pendingFibers--;
|
|
|
|
auto it = m_data.fiberToThreadMap.find( id );
|
|
|
|
assert( it != m_data.fiberToThreadMap.end() );
|
|
|
|
auto tit = m_data.threadNames.find( it->second );
|
|
|
|
assert( tit != m_data.threadNames.end() && strcmp( tit->second, "???" ) == 0 );
|
|
|
|
const auto sl = StoreString( str, sz );
|
|
|
|
tit->second = sl.ptr;
|
|
|
|
}
|
|
|
|
|
2020-07-25 21:13:01 +00:00
|
|
|
void Worker::AddSingleString( const char* str, size_t sz )
|
|
|
|
{
|
|
|
|
assert( m_pendingSingleString.ptr == nullptr );
|
|
|
|
m_pendingSingleString = StoreString( str, sz );
|
|
|
|
}
|
|
|
|
|
2020-09-30 13:57:14 +00:00
|
|
|
void Worker::AddSingleStringFailure( const char* str, size_t sz )
|
|
|
|
{
|
|
|
|
// During failure dispatch processing of most events is ignored, but string data
|
|
|
|
// is still send. Just ignore anything that was already in the staging area.
|
|
|
|
m_pendingSingleString = StoreString( str, sz );
|
|
|
|
}
|
|
|
|
|
2020-07-25 23:32:49 +00:00
|
|
|
void Worker::AddSecondString( const char* str, size_t sz )
|
|
|
|
{
|
|
|
|
assert( m_pendingSecondString.ptr == nullptr );
|
|
|
|
m_pendingSecondString = StoreString( str, sz );
|
|
|
|
}
|
|
|
|
|
2019-11-07 00:29:11 +00:00
|
|
|
void Worker::AddExternalName( uint64_t ptr, const char* str, size_t sz )
|
2019-08-16 17:22:23 +00:00
|
|
|
{
|
|
|
|
assert( m_pendingExternalNames > 0 );
|
|
|
|
m_pendingExternalNames--;
|
|
|
|
auto it = m_data.externalNames.find( ptr );
|
2019-08-16 17:49:16 +00:00
|
|
|
assert( it != m_data.externalNames.end() && strcmp( it->second.first, "???" ) == 0 );
|
2019-08-16 17:22:23 +00:00
|
|
|
const auto sl = StoreString( str, sz );
|
2019-08-16 17:49:16 +00:00
|
|
|
it->second.first = sl.ptr;
|
|
|
|
}
|
|
|
|
|
2019-11-07 00:29:11 +00:00
|
|
|
void Worker::AddExternalThreadName( uint64_t ptr, const char* str, size_t sz )
|
2019-08-16 17:49:16 +00:00
|
|
|
{
|
|
|
|
assert( m_pendingExternalNames > 0 );
|
|
|
|
m_pendingExternalNames--;
|
|
|
|
auto it = m_data.externalNames.find( ptr );
|
|
|
|
assert( it != m_data.externalNames.end() && strcmp( it->second.second, "???" ) == 0 );
|
|
|
|
const auto sl = StoreString( str, sz );
|
|
|
|
it->second.second = sl.ptr;
|
2019-08-16 17:22:23 +00:00
|
|
|
}
|
|
|
|
|
2019-11-07 00:29:11 +00:00
|
|
|
void Worker::AddFrameImageData( uint64_t ptr, const char* data, size_t sz )
|
2019-06-06 19:39:54 +00:00
|
|
|
{
|
2020-05-10 18:16:08 +00:00
|
|
|
assert( m_pendingFrameImageData.image == nullptr );
|
2019-07-19 19:46:58 +00:00
|
|
|
assert( sz % 8 == 0 );
|
2019-11-06 22:29:59 +00:00
|
|
|
// Input data buffer cannot be changed, as it is used as LZ4 dictionary.
|
|
|
|
if( m_frameImageBufferSize < sz )
|
|
|
|
{
|
|
|
|
m_frameImageBufferSize = sz;
|
|
|
|
delete[] m_frameImageBuffer;
|
|
|
|
m_frameImageBuffer = new char[sz];
|
|
|
|
}
|
|
|
|
auto src = (uint8_t*)data;
|
|
|
|
auto dst = (uint8_t*)m_frameImageBuffer;
|
2020-04-05 12:05:43 +00:00
|
|
|
memcpy( dst, src, sz );
|
|
|
|
m_texcomp.FixOrder( (char*)dst, sz/8 );
|
2020-04-05 13:04:18 +00:00
|
|
|
m_texcomp.Rdo( (char*)dst, sz/8 );
|
2020-05-10 18:16:08 +00:00
|
|
|
m_pendingFrameImageData.image = m_texcomp.Pack( m_frameImageBuffer, sz, m_pendingFrameImageData.csz, m_slab );
|
2019-06-06 19:39:54 +00:00
|
|
|
}
|
|
|
|
|
2020-03-25 19:04:55 +00:00
|
|
|
void Worker::AddSymbolCode( uint64_t ptr, const char* data, size_t sz )
|
|
|
|
{
|
2021-11-25 21:35:43 +00:00
|
|
|
assert( m_pendingSymbolCode > 0 );
|
|
|
|
m_pendingSymbolCode--;
|
2020-03-25 19:04:55 +00:00
|
|
|
|
|
|
|
auto code = (char*)m_slab.AllocBig( sz );
|
|
|
|
memcpy( code, data, sz );
|
2020-05-23 12:05:11 +00:00
|
|
|
m_data.symbolCode.emplace( ptr, MemoryBlock{ code, uint32_t( sz ) } );
|
2020-03-25 19:04:55 +00:00
|
|
|
m_data.symbolCodeSize += sz;
|
2020-04-01 19:43:03 +00:00
|
|
|
|
|
|
|
if( m_data.cpuArch == CpuArchUnknown ) return;
|
|
|
|
csh handle;
|
|
|
|
cs_err rval = CS_ERR_ARCH;
|
|
|
|
switch( m_data.cpuArch )
|
|
|
|
{
|
|
|
|
case CpuArchX86:
|
|
|
|
rval = cs_open( CS_ARCH_X86, CS_MODE_32, &handle );
|
|
|
|
break;
|
|
|
|
case CpuArchX64:
|
|
|
|
rval = cs_open( CS_ARCH_X86, CS_MODE_64, &handle );
|
|
|
|
break;
|
|
|
|
case CpuArchArm32:
|
|
|
|
rval = cs_open( CS_ARCH_ARM, CS_MODE_ARM, &handle );
|
|
|
|
break;
|
|
|
|
case CpuArchArm64:
|
|
|
|
rval = cs_open( CS_ARCH_ARM64, CS_MODE_ARM, &handle );
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
assert( false );
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
if( rval != CS_ERR_OK ) return;
|
|
|
|
cs_insn* insn;
|
|
|
|
size_t cnt = cs_disasm( handle, (const uint8_t*)code, sz, ptr, 0, &insn );
|
|
|
|
if( cnt > 0 )
|
|
|
|
{
|
2020-04-01 20:37:19 +00:00
|
|
|
m_pendingCodeInformation += cnt;
|
2020-04-01 19:43:03 +00:00
|
|
|
for( size_t i=0; i<cnt; i++ )
|
|
|
|
{
|
|
|
|
Query( ServerQueryCodeLocation, insn[i].address );
|
|
|
|
}
|
|
|
|
cs_free( insn, cnt );
|
|
|
|
}
|
|
|
|
cs_close( &handle );
|
2020-03-25 19:04:55 +00:00
|
|
|
}
|
|
|
|
|
2021-02-03 23:03:25 +00:00
|
|
|
|
|
|
|
void Worker::AddSourceCode( const char* data, size_t sz )
|
|
|
|
{
|
|
|
|
assert( !m_sourceCodeQuery.empty() );
|
|
|
|
auto file = m_sourceCodeQuery.front();
|
|
|
|
m_sourceCodeQuery.erase( m_sourceCodeQuery.begin() );
|
|
|
|
if( m_data.sourceFileCache.find( file ) != m_data.sourceFileCache.end() ) return;
|
|
|
|
auto src = (char*)m_slab.AllocBig( sz );
|
|
|
|
memcpy( src, data, sz );
|
|
|
|
m_data.sourceFileCache.emplace( file, MemoryBlock{ src, uint32_t( sz ) } );
|
|
|
|
}
|
|
|
|
|
2020-05-10 14:56:13 +00:00
|
|
|
CallstackFrameId Worker::PackPointer( uint64_t ptr ) const
|
|
|
|
{
|
|
|
|
assert( ( ( ptr & 0x3000000000000000 ) << 2 ) == ( ptr & 0xC000000000000000 ) );
|
|
|
|
CallstackFrameId id;
|
|
|
|
id.idx = ptr;
|
|
|
|
id.sel = 0;
|
|
|
|
id.custom = 0;
|
|
|
|
return id;
|
|
|
|
}
|
|
|
|
|
2019-03-03 15:39:13 +00:00
|
|
|
uint64_t Worker::GetCanonicalPointer( const CallstackFrameId& id ) const
|
|
|
|
{
|
|
|
|
assert( id.sel == 0 );
|
2020-02-29 22:40:21 +00:00
|
|
|
return ( id.idx & 0x3FFFFFFFFFFFFFFF ) | ( ( id.idx & 0x3000000000000000 ) << 2 );
|
2019-03-03 15:39:13 +00:00
|
|
|
}
|
|
|
|
|
2019-11-07 00:29:11 +00:00
|
|
|
void Worker::AddCallstackPayload( uint64_t ptr, const char* _data, size_t _sz )
|
2018-06-19 19:15:36 +00:00
|
|
|
{
|
2020-09-29 15:04:41 +00:00
|
|
|
assert( m_pendingCallstackId == 0 );
|
2018-06-19 19:15:36 +00:00
|
|
|
|
2019-03-03 15:50:18 +00:00
|
|
|
const auto sz = _sz / sizeof( uint64_t );
|
|
|
|
const auto memsize = sizeof( VarArray<CallstackFrameId> ) + sz * sizeof( CallstackFrameId );
|
2018-06-19 19:15:36 +00:00
|
|
|
auto mem = (char*)m_slab.AllocRaw( memsize );
|
|
|
|
|
2019-03-03 15:50:18 +00:00
|
|
|
auto data = (CallstackFrameId*)mem;
|
|
|
|
auto dst = data;
|
|
|
|
auto src = (uint64_t*)_data;
|
|
|
|
for( size_t i=0; i<sz; i++ )
|
|
|
|
{
|
|
|
|
*dst++ = PackPointer( *src++ );
|
|
|
|
}
|
2018-06-19 19:15:36 +00:00
|
|
|
|
2019-03-03 15:50:18 +00:00
|
|
|
auto arr = (VarArray<CallstackFrameId>*)( mem + sz * sizeof( CallstackFrameId ) );
|
|
|
|
new(arr) VarArray<CallstackFrameId>( sz, data );
|
2018-06-19 19:15:36 +00:00
|
|
|
|
2019-03-03 16:34:56 +00:00
|
|
|
uint32_t idx;
|
|
|
|
auto it = m_data.callstackMap.find( arr );
|
|
|
|
if( it == m_data.callstackMap.end() )
|
|
|
|
{
|
|
|
|
idx = m_data.callstackPayload.size();
|
|
|
|
m_data.callstackMap.emplace( arr, idx );
|
|
|
|
m_data.callstackPayload.push_back( arr );
|
|
|
|
|
|
|
|
for( auto& frame : *arr )
|
|
|
|
{
|
|
|
|
auto fit = m_data.callstackFrameMap.find( frame );
|
|
|
|
if( fit == m_data.callstackFrameMap.end() )
|
|
|
|
{
|
|
|
|
m_pendingCallstackFrames++;
|
2019-04-01 16:52:32 +00:00
|
|
|
Query( ServerQueryCallstackFrame, GetCanonicalPointer( frame ) );
|
2019-03-03 16:34:56 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
idx = it->second;
|
|
|
|
m_slab.Unalloc( memsize );
|
|
|
|
}
|
|
|
|
|
2019-03-05 18:30:17 +00:00
|
|
|
m_pendingCallstackId = idx;
|
2019-03-03 16:34:56 +00:00
|
|
|
}
|
|
|
|
|
2019-11-07 00:29:11 +00:00
|
|
|
void Worker::AddCallstackAllocPayload( uint64_t ptr, const char* data, size_t _sz )
|
2019-03-03 16:34:56 +00:00
|
|
|
{
|
|
|
|
CallstackFrameId stack[64];
|
2020-07-05 15:33:29 +00:00
|
|
|
uint8_t sz;
|
|
|
|
memcpy( &sz, data, 1 ); data++;
|
2019-03-03 16:34:56 +00:00
|
|
|
assert( sz <= 64 );
|
2020-07-05 15:33:29 +00:00
|
|
|
for( uint8_t i=0; i<sz; i++ )
|
2019-03-03 16:34:56 +00:00
|
|
|
{
|
2020-07-05 15:33:29 +00:00
|
|
|
uint16_t sz;
|
2019-03-03 16:34:56 +00:00
|
|
|
CallstackFrame cf;
|
|
|
|
memcpy( &cf.line, data, 4 ); data += 4;
|
2020-07-05 15:33:29 +00:00
|
|
|
memcpy( &sz, data, 2 ); data += 2;
|
2019-03-03 16:34:56 +00:00
|
|
|
cf.name = StoreString( data, sz ).idx; data += sz;
|
2020-07-05 15:33:29 +00:00
|
|
|
memcpy( &sz, data, 2 ); data += 2;
|
2019-03-03 16:34:56 +00:00
|
|
|
cf.file = StoreString( data, sz ).idx; data += sz;
|
2020-02-25 22:42:59 +00:00
|
|
|
cf.symAddr = 0;
|
2019-03-03 16:34:56 +00:00
|
|
|
CallstackFrameData cfd = { &cf, 1 };
|
|
|
|
|
|
|
|
CallstackFrameId id;
|
|
|
|
auto it = m_data.revFrameMap.find( &cfd );
|
|
|
|
if( it == m_data.revFrameMap.end() )
|
|
|
|
{
|
|
|
|
auto frame = m_slab.Alloc<CallstackFrame>();
|
|
|
|
memcpy( frame, &cf, sizeof( CallstackFrame ) );
|
2020-02-25 23:55:43 +00:00
|
|
|
auto frameData = m_slab.AllocInit<CallstackFrameData>();
|
2019-03-03 16:34:56 +00:00
|
|
|
frameData->data = frame;
|
|
|
|
frameData->size = 1;
|
|
|
|
id.idx = m_callstackAllocNextIdx++;
|
|
|
|
id.sel = 1;
|
2020-02-29 15:24:15 +00:00
|
|
|
id.custom = 0;
|
2019-03-03 16:34:56 +00:00
|
|
|
m_data.callstackFrameMap.emplace( id, frameData );
|
|
|
|
m_data.revFrameMap.emplace( frameData, id );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
id = it->second;
|
|
|
|
}
|
|
|
|
stack[i] = id;
|
|
|
|
}
|
|
|
|
|
2019-11-25 21:54:10 +00:00
|
|
|
VarArray<CallstackFrameId>* arr;
|
|
|
|
size_t memsize;
|
2020-09-29 15:04:41 +00:00
|
|
|
if( m_pendingCallstackId != 0 )
|
2019-11-25 21:54:10 +00:00
|
|
|
{
|
|
|
|
const auto nativeCs = m_data.callstackPayload[m_pendingCallstackId];
|
|
|
|
const auto nsz = nativeCs->size();
|
|
|
|
const auto tsz = sz + nsz;
|
2019-03-05 18:43:44 +00:00
|
|
|
|
2019-11-25 21:54:10 +00:00
|
|
|
memsize = sizeof( VarArray<CallstackFrameId> ) + tsz * sizeof( CallstackFrameId );
|
|
|
|
auto mem = (char*)m_slab.AllocRaw( memsize );
|
|
|
|
memcpy( mem, stack, sizeof( CallstackFrameId ) * sz );
|
|
|
|
memcpy( mem + sizeof( CallstackFrameId ) * sz, nativeCs->data(), sizeof( CallstackFrameId ) * nsz );
|
|
|
|
|
|
|
|
arr = (VarArray<CallstackFrameId>*)( mem + tsz * sizeof( CallstackFrameId ) );
|
|
|
|
new(arr) VarArray<CallstackFrameId>( tsz, (CallstackFrameId*)mem );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
memsize = sizeof( VarArray<CallstackFrameId> ) + sz * sizeof( CallstackFrameId );
|
|
|
|
auto mem = (char*)m_slab.AllocRaw( memsize );
|
|
|
|
memcpy( mem, stack, sizeof( CallstackFrameId ) * sz );
|
2019-03-03 16:34:56 +00:00
|
|
|
|
2019-11-25 21:54:10 +00:00
|
|
|
arr = (VarArray<CallstackFrameId>*)( mem + sz * sizeof( CallstackFrameId ) );
|
|
|
|
new(arr) VarArray<CallstackFrameId>( sz, (CallstackFrameId*)mem );
|
|
|
|
}
|
2019-03-03 16:34:56 +00:00
|
|
|
|
2018-06-19 19:15:36 +00:00
|
|
|
uint32_t idx;
|
|
|
|
auto it = m_data.callstackMap.find( arr );
|
|
|
|
if( it == m_data.callstackMap.end() )
|
|
|
|
{
|
|
|
|
idx = m_data.callstackPayload.size();
|
|
|
|
m_data.callstackMap.emplace( arr, idx );
|
|
|
|
m_data.callstackPayload.push_back( arr );
|
2018-06-19 22:25:26 +00:00
|
|
|
|
|
|
|
for( auto& frame : *arr )
|
|
|
|
{
|
|
|
|
auto fit = m_data.callstackFrameMap.find( frame );
|
|
|
|
if( fit == m_data.callstackFrameMap.end() )
|
|
|
|
{
|
2018-06-20 21:42:00 +00:00
|
|
|
m_pendingCallstackFrames++;
|
2019-04-01 16:52:32 +00:00
|
|
|
Query( ServerQueryCallstackFrame, GetCanonicalPointer( frame ) );
|
2018-06-19 22:25:26 +00:00
|
|
|
}
|
|
|
|
}
|
2018-06-19 19:15:36 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
idx = it->second;
|
|
|
|
m_slab.Unalloc( memsize );
|
|
|
|
}
|
|
|
|
|
2019-03-05 18:43:44 +00:00
|
|
|
m_pendingCallstackId = idx;
|
2018-06-19 19:15:36 +00:00
|
|
|
}
|
|
|
|
|
2021-11-27 14:06:58 +00:00
|
|
|
uint32_t Worker::MergeCallstacks( uint32_t first, uint32_t second )
|
|
|
|
{
|
|
|
|
const auto& cs1 = GetCallstack( first );
|
|
|
|
const auto& cs2 = GetCallstack( second );
|
|
|
|
|
|
|
|
const auto sz1 = cs1.size();
|
|
|
|
const auto sz2 = cs2.size();
|
|
|
|
const auto tsz = sz1 + sz2;
|
|
|
|
|
|
|
|
size_t memsize = sizeof( VarArray<CallstackFrameId> ) + tsz * sizeof( CallstackFrameId );
|
|
|
|
auto mem = (char*)m_slab.AllocRaw( memsize );
|
|
|
|
memcpy( mem, cs1.data(), sizeof( CallstackFrameId ) * sz1 );
|
|
|
|
memcpy( mem + sizeof( CallstackFrameId ) * sz1, cs2.data(), sizeof( CallstackFrameId ) * sz2 );
|
|
|
|
|
|
|
|
VarArray<CallstackFrameId>* arr = (VarArray<CallstackFrameId>*)( mem + tsz * sizeof( CallstackFrameId ) );
|
|
|
|
new(arr) VarArray<CallstackFrameId>( tsz, (CallstackFrameId*)mem );
|
|
|
|
|
|
|
|
uint32_t idx;
|
|
|
|
auto it = m_data.callstackMap.find( arr );
|
|
|
|
if( it == m_data.callstackMap.end() )
|
|
|
|
{
|
|
|
|
idx = m_data.callstackPayload.size();
|
|
|
|
m_data.callstackMap.emplace( arr, idx );
|
|
|
|
m_data.callstackPayload.push_back( arr );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
idx = it->second;
|
|
|
|
m_slab.Unalloc( memsize );
|
|
|
|
}
|
|
|
|
return idx;
|
|
|
|
}
|
|
|
|
|
2018-02-13 13:57:47 +00:00
|
|
|
void Worker::InsertPlot( PlotData* plot, int64_t time, double val )
|
|
|
|
{
|
|
|
|
if( plot->data.empty() )
|
|
|
|
{
|
|
|
|
plot->min = val;
|
|
|
|
plot->max = val;
|
2021-10-17 11:04:56 +00:00
|
|
|
plot->sum = val;
|
2019-11-03 13:50:11 +00:00
|
|
|
plot->data.push_back( { Int48( time ), val } );
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
if( plot->min > val ) plot->min = val;
|
|
|
|
else if( plot->max < val ) plot->max = val;
|
2021-10-17 11:04:56 +00:00
|
|
|
plot->sum += val;
|
2021-02-07 14:52:08 +00:00
|
|
|
plot->data.push_back( { Int48( time ), val } );
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-07 00:29:11 +00:00
|
|
|
void Worker::HandlePlotName( uint64_t name, const char* str, size_t sz )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
|
|
|
const auto sl = StoreString( str, sz );
|
2018-08-04 15:10:45 +00:00
|
|
|
m_data.plots.StringDiscovered( name, sl, m_data.strings, [this] ( PlotData* dst, PlotData* src ) {
|
2018-08-04 14:33:03 +00:00
|
|
|
for( auto& v : src->data )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2019-11-03 13:50:11 +00:00
|
|
|
InsertPlot( dst, v.time.Val(), v.val );
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
2018-08-04 14:33:03 +00:00
|
|
|
} );
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
2019-11-07 00:29:11 +00:00
|
|
|
void Worker::HandleFrameName( uint64_t name, const char* str, size_t sz )
|
2018-08-04 18:48:21 +00:00
|
|
|
{
|
|
|
|
const auto sl = StoreString( str, sz );
|
2019-01-06 20:11:36 +00:00
|
|
|
m_data.frames.StringDiscovered( name, sl, m_data.strings, [] ( FrameData* dst, FrameData* src ) {
|
2018-08-04 18:48:21 +00:00
|
|
|
auto sz = dst->frames.size();
|
|
|
|
dst->frames.insert( dst->frames.end(), src->frames.begin(), src->frames.end() );
|
2018-08-05 00:09:59 +00:00
|
|
|
std::inplace_merge( dst->frames.begin(), dst->frames.begin() + sz, dst->frames.end(), [] ( const auto& lhs, const auto& rhs ) { return lhs.start < rhs.start; } );
|
2018-08-04 18:48:21 +00:00
|
|
|
} );
|
|
|
|
}
|
|
|
|
|
2021-11-14 12:05:05 +00:00
|
|
|
void Worker::DoPostponedSymbols()
|
|
|
|
{
|
|
|
|
if( m_data.newSymbolsIndex >= 0 )
|
|
|
|
{
|
|
|
|
#ifdef NO_PARALLEL_SORT
|
|
|
|
pdqsort_branchless( m_data.symbolLoc.begin() + m_data.newSymbolsIndex, m_data.symbolLoc.end(), [] ( const auto& l, const auto& r ) { return l.addr < r.addr; } );
|
|
|
|
#else
|
|
|
|
std::sort( std::execution::par_unseq, m_data.symbolLoc.begin() + m_data.newSymbolsIndex, m_data.symbolLoc.end(), [] ( const auto& l, const auto& r ) { return l.addr < r.addr; } );
|
|
|
|
#endif
|
|
|
|
const auto ms = std::lower_bound( m_data.symbolLoc.begin(), m_data.symbolLoc.begin() + m_data.newSymbolsIndex, m_data.symbolLoc[m_data.newSymbolsIndex], [] ( const auto& l, const auto& r ) { return l.addr < r.addr; } );
|
|
|
|
std::inplace_merge( ms, m_data.symbolLoc.begin() + m_data.newSymbolsIndex, m_data.symbolLoc.end(), [] ( const auto& l, const auto& r ) { return l.addr < r.addr; } );
|
|
|
|
m_data.newSymbolsIndex = -1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void Worker::DoPostponedInlineSymbols()
|
|
|
|
{
|
|
|
|
if( m_data.newInlineSymbolsIndex >= 0 )
|
|
|
|
{
|
|
|
|
#ifdef NO_PARALLEL_SORT
|
|
|
|
pdqsort_branchless( m_data.symbolLocInline.begin() + m_data.newInlineSymbolsIndex, m_data.symbolLocInline.end() );
|
|
|
|
#else
|
|
|
|
std::sort( std::execution::par_unseq, m_data.symbolLocInline.begin() + m_data.newInlineSymbolsIndex, m_data.symbolLocInline.end() );
|
|
|
|
#endif
|
|
|
|
const auto ms = std::lower_bound( m_data.symbolLocInline.begin(), m_data.symbolLocInline.begin() + m_data.newInlineSymbolsIndex, m_data.symbolLocInline[m_data.newInlineSymbolsIndex] );
|
|
|
|
std::inplace_merge( ms, m_data.symbolLocInline.begin() + m_data.newInlineSymbolsIndex, m_data.symbolLocInline.end() );
|
|
|
|
m_data.newInlineSymbolsIndex = -1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-11-14 12:01:27 +00:00
|
|
|
void Worker::DoPostponedWorkAll()
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2021-11-14 12:01:27 +00:00
|
|
|
DoPostponedWork();
|
2021-11-14 12:05:05 +00:00
|
|
|
DoPostponedSymbols();
|
|
|
|
DoPostponedInlineSymbols();
|
2021-11-14 12:01:27 +00:00
|
|
|
|
2018-08-04 14:33:03 +00:00
|
|
|
for( auto& plot : m_data.plots.Data() )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2021-02-07 14:52:08 +00:00
|
|
|
if( !plot->data.is_sorted() ) plot->data.sort();
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
2021-11-14 12:01:27 +00:00
|
|
|
}
|
2021-02-07 17:29:29 +00:00
|
|
|
|
2021-11-14 12:01:27 +00:00
|
|
|
void Worker::DoPostponedWork()
|
|
|
|
{
|
2021-02-07 17:29:29 +00:00
|
|
|
#ifndef TRACY_NO_STATISTICS
|
|
|
|
if( m_data.newFramesWereReceived )
|
|
|
|
{
|
|
|
|
HandlePostponedSamples();
|
|
|
|
HandlePostponedGhostZones();
|
|
|
|
m_data.newFramesWereReceived = false;
|
|
|
|
}
|
2021-02-07 18:35:53 +00:00
|
|
|
|
2021-12-04 14:16:17 +00:00
|
|
|
if( m_identifySamples && m_data.newContextSwitchesReceived )
|
2021-11-13 01:44:54 +00:00
|
|
|
{
|
|
|
|
for( auto& td : m_data.threads )
|
|
|
|
{
|
|
|
|
if( !td->postponedSamples.empty() )
|
|
|
|
{
|
|
|
|
auto ctx = GetContextSwitchData( td->id );
|
|
|
|
if( ctx )
|
|
|
|
{
|
2021-11-14 22:48:50 +00:00
|
|
|
td->postponedSamples.ensure_sorted();
|
2021-11-13 01:44:54 +00:00
|
|
|
auto sit = td->postponedSamples.begin();
|
|
|
|
auto cit = std::lower_bound( ctx->v.begin(), ctx->v.end(), sit->time.Val(), [] ( const auto& l, const auto& r ) { return (uint64_t)l.End() < (uint64_t)r; } );
|
|
|
|
if( cit != ctx->v.end() )
|
|
|
|
{
|
|
|
|
do
|
|
|
|
{
|
|
|
|
if( sit->time.Val() == cit->Start() )
|
|
|
|
{
|
|
|
|
td->ctxSwitchSamples.push_back( *sit );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
ProcessCallstackSampleImplStats( *sit, *td );
|
|
|
|
}
|
|
|
|
if( ++sit == td->postponedSamples.end() ) break;
|
|
|
|
cit = std::lower_bound( cit, ctx->v.end(), sit->time.Val(), [] ( const auto& l, const auto& r ) { return (uint64_t)l.End() < (uint64_t)r; } );
|
|
|
|
}
|
|
|
|
while( cit != ctx->v.end() );
|
2021-11-14 22:55:44 +00:00
|
|
|
if( sit == td->postponedSamples.end() )
|
|
|
|
{
|
|
|
|
td->postponedSamples.clear();
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
td->postponedSamples.erase( td->postponedSamples.begin(), sit );
|
|
|
|
}
|
2021-11-13 01:44:54 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
m_data.newContextSwitchesReceived = false;
|
|
|
|
}
|
2021-02-07 17:29:29 +00:00
|
|
|
#endif
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
2020-02-29 18:31:51 +00:00
|
|
|
#ifndef TRACY_NO_STATISTICS
|
2020-02-29 13:12:04 +00:00
|
|
|
void Worker::HandlePostponedSamples()
|
|
|
|
{
|
2020-05-31 12:31:39 +00:00
|
|
|
assert( m_data.newFramesWereReceived );
|
2020-02-29 13:12:04 +00:00
|
|
|
if( m_data.postponedSamples.empty() ) return;
|
|
|
|
auto it = m_data.postponedSamples.begin();
|
|
|
|
do
|
|
|
|
{
|
|
|
|
UpdateSampleStatisticsPostponed( it );
|
|
|
|
}
|
|
|
|
while( it != m_data.postponedSamples.end() );
|
|
|
|
}
|
2020-05-31 12:31:39 +00:00
|
|
|
|
2020-05-31 19:17:21 +00:00
|
|
|
void Worker::GetStackWithInlines( Vector<InlineStackData>& ret, const VarArray<CallstackFrameId>& cs )
|
2020-05-31 12:51:33 +00:00
|
|
|
{
|
2020-05-31 19:17:21 +00:00
|
|
|
ret.clear();
|
2020-05-31 12:51:33 +00:00
|
|
|
int idx = cs.size() - 1;
|
|
|
|
do
|
|
|
|
{
|
|
|
|
auto& entry = cs[idx];
|
2020-05-31 19:17:21 +00:00
|
|
|
const auto frame = GetCallstackFrame( entry );
|
|
|
|
if( frame )
|
|
|
|
{
|
|
|
|
uint8_t i = frame->size;
|
|
|
|
do
|
|
|
|
{
|
|
|
|
i--;
|
|
|
|
ret.push_back( InlineStackData { frame->data[i].symAddr, entry, i } );
|
|
|
|
}
|
|
|
|
while( i != 0 );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
ret.push_back( InlineStackData{ GetCanonicalPointer( entry ), entry, 0 } );
|
|
|
|
}
|
|
|
|
}
|
|
|
|
while( idx-- > 0 );
|
|
|
|
}
|
|
|
|
|
|
|
|
int Worker::AddGhostZone( const VarArray<CallstackFrameId>& cs, Vector<GhostZone>* vec, uint64_t t )
|
|
|
|
{
|
|
|
|
static Vector<InlineStackData> stack;
|
|
|
|
GetStackWithInlines( stack, cs );
|
|
|
|
|
2020-06-11 10:18:44 +00:00
|
|
|
if( !vec->empty() && vec->back().end.Val() > (int64_t)t )
|
|
|
|
{
|
2020-06-15 15:41:00 +00:00
|
|
|
const auto refBackTime = vec->back().end.Val();
|
2020-06-11 10:18:44 +00:00
|
|
|
auto tmp = vec;
|
2020-06-11 10:27:17 +00:00
|
|
|
for(;;)
|
2020-06-11 10:18:44 +00:00
|
|
|
{
|
|
|
|
auto& back = tmp->back();
|
2020-06-15 15:41:00 +00:00
|
|
|
if( back.end.Val() != refBackTime ) break;
|
2020-06-11 10:18:44 +00:00
|
|
|
back.end.SetVal( t );
|
|
|
|
if( back.child < 0 ) break;
|
|
|
|
tmp = &m_data.ghostChildren[back.child];
|
|
|
|
}
|
|
|
|
}
|
2020-10-02 17:30:01 +00:00
|
|
|
const int64_t refBackTime = vec->empty() ? 0 : vec->back().end.Val();
|
2020-05-31 19:17:21 +00:00
|
|
|
int gcnt = 0;
|
2020-10-02 17:30:01 +00:00
|
|
|
size_t idx = 0;
|
2020-05-31 19:17:21 +00:00
|
|
|
while( !vec->empty() && idx < stack.size() )
|
|
|
|
{
|
|
|
|
auto& back = vec->back();
|
|
|
|
const auto& backKey = m_data.ghostFrames[back.frame.Val()];
|
|
|
|
const auto backFrame = GetCallstackFrame( backKey.frame );
|
|
|
|
if( !backFrame ) break;
|
|
|
|
const auto& inlineFrame = backFrame->data[backKey.inlineFrame];
|
|
|
|
if( inlineFrame.symAddr != stack[idx].symAddr ) break;
|
|
|
|
if( back.end.Val() != refBackTime ) break;
|
|
|
|
back.end.SetVal( t + m_samplingPeriod );
|
2020-06-11 10:27:17 +00:00
|
|
|
if( ++idx == stack.size() ) break;
|
2020-05-31 19:17:21 +00:00
|
|
|
if( back.child < 0 )
|
|
|
|
{
|
|
|
|
back.child = m_data.ghostChildren.size();
|
|
|
|
vec = &m_data.ghostChildren.push_next();
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
vec = &m_data.ghostChildren[back.child];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
while( idx < stack.size() )
|
|
|
|
{
|
|
|
|
gcnt++;
|
2020-05-31 12:51:33 +00:00
|
|
|
uint32_t fid;
|
2020-05-31 19:17:21 +00:00
|
|
|
GhostKey key { stack[idx].frame, stack[idx].inlineFrame };
|
|
|
|
auto it = m_data.ghostFramesMap.find( key );
|
2020-05-31 12:51:33 +00:00
|
|
|
if( it == m_data.ghostFramesMap.end() )
|
|
|
|
{
|
|
|
|
fid = uint32_t( m_data.ghostFrames.size() );
|
2020-05-31 19:17:21 +00:00
|
|
|
m_data.ghostFrames.push_back( key );
|
|
|
|
m_data.ghostFramesMap.emplace( key, fid );
|
2020-05-31 12:51:33 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
fid = it->second;
|
|
|
|
}
|
2020-05-31 19:17:21 +00:00
|
|
|
auto& zone = vec->push_next();
|
|
|
|
zone.start.SetVal( t );
|
|
|
|
zone.end.SetVal( t + m_samplingPeriod );
|
|
|
|
zone.frame.SetVal( fid );
|
|
|
|
if( ++idx == stack.size() )
|
2020-05-31 12:51:33 +00:00
|
|
|
{
|
|
|
|
zone.child = -1;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2020-05-31 19:17:21 +00:00
|
|
|
zone.child = m_data.ghostChildren.size();
|
|
|
|
vec = &m_data.ghostChildren.push_next();
|
2020-05-31 12:51:33 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return gcnt;
|
|
|
|
}
|
|
|
|
|
2020-05-31 12:31:39 +00:00
|
|
|
void Worker::HandlePostponedGhostZones()
|
|
|
|
{
|
|
|
|
assert( m_data.newFramesWereReceived );
|
|
|
|
if( !m_data.ghostZonesPostponed ) return;
|
|
|
|
bool postponed = false;
|
|
|
|
for( auto& td : m_data.threads )
|
|
|
|
{
|
|
|
|
while( td->ghostIdx != td->samples.size() )
|
|
|
|
{
|
|
|
|
const auto& sample = td->samples[td->ghostIdx];
|
|
|
|
const auto& cs = GetCallstack( sample.callstack.Val() );
|
|
|
|
const auto cssz = cs.size();
|
|
|
|
|
|
|
|
uint16_t i;
|
|
|
|
for( i=0; i<cssz; i++ ) if( !GetCallstackFrame( cs[i] ) ) break;
|
|
|
|
if( i != cssz )
|
|
|
|
{
|
|
|
|
postponed = true;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
td->ghostIdx++;
|
2020-05-31 12:51:33 +00:00
|
|
|
m_data.ghostCnt += AddGhostZone( cs, &td->ghostZones, sample.time.Val() );
|
2020-05-31 12:31:39 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
m_data.ghostZonesPostponed = postponed;
|
|
|
|
}
|
2020-02-29 18:31:51 +00:00
|
|
|
#endif
|
2020-02-29 13:12:04 +00:00
|
|
|
|
2020-07-25 22:35:41 +00:00
|
|
|
uint32_t Worker::GetSingleStringIdx()
|
|
|
|
{
|
|
|
|
assert( m_pendingSingleString.ptr != nullptr );
|
|
|
|
const auto idx = m_pendingSingleString.idx;
|
|
|
|
m_pendingSingleString.ptr = nullptr;
|
|
|
|
return idx;
|
|
|
|
}
|
|
|
|
|
2020-07-25 23:32:49 +00:00
|
|
|
uint32_t Worker::GetSecondStringIdx()
|
|
|
|
{
|
|
|
|
assert( m_pendingSecondString.ptr != nullptr );
|
|
|
|
const auto idx = m_pendingSecondString.idx;
|
|
|
|
m_pendingSecondString.ptr = nullptr;
|
|
|
|
return idx;
|
|
|
|
}
|
|
|
|
|
2019-11-07 00:29:11 +00:00
|
|
|
StringLocation Worker::StoreString( const char* str, size_t sz )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
|
|
|
StringLocation ret;
|
2019-02-12 19:23:14 +00:00
|
|
|
charutil::StringKey key = { str, sz };
|
|
|
|
auto sit = m_data.stringMap.find( key );
|
2018-02-13 13:57:47 +00:00
|
|
|
if( sit == m_data.stringMap.end() )
|
|
|
|
{
|
|
|
|
auto ptr = m_slab.Alloc<char>( sz+1 );
|
2018-03-19 14:41:28 +00:00
|
|
|
memcpy( ptr, str, sz );
|
|
|
|
ptr[sz] = '\0';
|
2018-02-13 13:57:47 +00:00
|
|
|
ret.ptr = ptr;
|
|
|
|
ret.idx = m_data.stringData.size();
|
2019-02-12 19:23:14 +00:00
|
|
|
m_data.stringMap.emplace( charutil::StringKey { ptr, sz }, m_data.stringData.size() );
|
2018-02-13 13:57:47 +00:00
|
|
|
m_data.stringData.push_back( ptr );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2019-02-12 19:23:14 +00:00
|
|
|
ret.ptr = sit->first.ptr;
|
2018-02-13 13:57:47 +00:00
|
|
|
ret.idx = sit->second;
|
|
|
|
}
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
2019-01-14 22:08:34 +00:00
|
|
|
bool Worker::Process( const QueueItem& ev )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
|
|
|
switch( ev.hdr.type )
|
|
|
|
{
|
2019-08-02 18:18:08 +00:00
|
|
|
case QueueType::ThreadContext:
|
|
|
|
ProcessThreadContext( ev.threadCtx );
|
|
|
|
break;
|
2018-02-13 13:57:47 +00:00
|
|
|
case QueueType::ZoneBegin:
|
|
|
|
ProcessZoneBegin( ev.zoneBegin );
|
|
|
|
break;
|
2018-06-21 23:07:25 +00:00
|
|
|
case QueueType::ZoneBeginCallstack:
|
|
|
|
ProcessZoneBeginCallstack( ev.zoneBegin );
|
|
|
|
break;
|
2020-07-26 12:35:04 +00:00
|
|
|
case QueueType::ZoneBeginAllocSrcLoc:
|
2020-05-10 17:20:59 +00:00
|
|
|
ProcessZoneBeginAllocSrcLoc( ev.zoneBeginLean );
|
2018-02-13 13:57:47 +00:00
|
|
|
break;
|
2020-07-26 12:35:04 +00:00
|
|
|
case QueueType::ZoneBeginAllocSrcLocCallstack:
|
2020-05-10 17:20:59 +00:00
|
|
|
ProcessZoneBeginAllocSrcLocCallstack( ev.zoneBeginLean );
|
2019-03-03 16:47:26 +00:00
|
|
|
break;
|
2018-02-13 13:57:47 +00:00
|
|
|
case QueueType::ZoneEnd:
|
2019-01-15 17:55:21 +00:00
|
|
|
ProcessZoneEnd( ev.zoneEnd );
|
2018-02-13 13:57:47 +00:00
|
|
|
break;
|
2019-01-14 21:56:10 +00:00
|
|
|
case QueueType::ZoneValidation:
|
|
|
|
ProcessZoneValidation( ev.zoneValidation );
|
|
|
|
break;
|
2018-02-13 13:57:47 +00:00
|
|
|
case QueueType::FrameMarkMsg:
|
|
|
|
ProcessFrameMark( ev.frameMark );
|
|
|
|
break;
|
2018-08-05 00:09:59 +00:00
|
|
|
case QueueType::FrameMarkMsgStart:
|
|
|
|
ProcessFrameMarkStart( ev.frameMark );
|
|
|
|
break;
|
|
|
|
case QueueType::FrameMarkMsgEnd:
|
|
|
|
ProcessFrameMarkEnd( ev.frameMark );
|
|
|
|
break;
|
2020-07-26 12:18:48 +00:00
|
|
|
case QueueType::FrameImage:
|
|
|
|
ProcessFrameImage( ev.frameImage );
|
2019-06-06 19:39:54 +00:00
|
|
|
break;
|
2018-02-13 13:57:47 +00:00
|
|
|
case QueueType::SourceLocation:
|
|
|
|
AddSourceLocation( ev.srcloc );
|
2019-04-01 17:37:39 +00:00
|
|
|
m_serverQuerySpaceLeft++;
|
2018-02-13 13:57:47 +00:00
|
|
|
break;
|
|
|
|
case QueueType::ZoneText:
|
2020-07-25 22:53:55 +00:00
|
|
|
ProcessZoneText();
|
2018-02-13 13:57:47 +00:00
|
|
|
break;
|
2018-06-29 14:12:17 +00:00
|
|
|
case QueueType::ZoneName:
|
2020-07-25 22:53:55 +00:00
|
|
|
ProcessZoneName();
|
2018-06-29 14:12:17 +00:00
|
|
|
break;
|
2020-11-27 11:37:35 +00:00
|
|
|
case QueueType::ZoneColor:
|
|
|
|
ProcessZoneColor( ev.zoneColor );
|
|
|
|
break;
|
2020-05-24 14:13:09 +00:00
|
|
|
case QueueType::ZoneValue:
|
|
|
|
ProcessZoneValue( ev.zoneValue );
|
|
|
|
break;
|
2018-02-13 13:57:47 +00:00
|
|
|
case QueueType::LockAnnounce:
|
|
|
|
ProcessLockAnnounce( ev.lockAnnounce );
|
|
|
|
break;
|
2018-12-16 19:39:30 +00:00
|
|
|
case QueueType::LockTerminate:
|
|
|
|
ProcessLockTerminate( ev.lockTerminate );
|
|
|
|
break;
|
2018-02-13 13:57:47 +00:00
|
|
|
case QueueType::LockWait:
|
|
|
|
ProcessLockWait( ev.lockWait );
|
|
|
|
break;
|
|
|
|
case QueueType::LockObtain:
|
|
|
|
ProcessLockObtain( ev.lockObtain );
|
|
|
|
break;
|
|
|
|
case QueueType::LockRelease:
|
|
|
|
ProcessLockRelease( ev.lockRelease );
|
|
|
|
break;
|
|
|
|
case QueueType::LockSharedWait:
|
|
|
|
ProcessLockSharedWait( ev.lockWait );
|
|
|
|
break;
|
|
|
|
case QueueType::LockSharedObtain:
|
|
|
|
ProcessLockSharedObtain( ev.lockObtain );
|
|
|
|
break;
|
|
|
|
case QueueType::LockSharedRelease:
|
2022-07-18 00:06:19 +00:00
|
|
|
ProcessLockSharedRelease( ev.lockReleaseShared );
|
2018-02-13 13:57:47 +00:00
|
|
|
break;
|
|
|
|
case QueueType::LockMark:
|
|
|
|
ProcessLockMark( ev.lockMark );
|
|
|
|
break;
|
2020-03-08 12:47:38 +00:00
|
|
|
case QueueType::LockName:
|
|
|
|
ProcessLockName( ev.lockName );
|
|
|
|
break;
|
2018-02-13 13:57:47 +00:00
|
|
|
case QueueType::PlotData:
|
|
|
|
ProcessPlotData( ev.plotData );
|
|
|
|
break;
|
2019-11-05 17:02:08 +00:00
|
|
|
case QueueType::PlotConfig:
|
|
|
|
ProcessPlotConfig( ev.plotConfig );
|
|
|
|
break;
|
2018-02-13 13:57:47 +00:00
|
|
|
case QueueType::Message:
|
|
|
|
ProcessMessage( ev.message );
|
|
|
|
break;
|
|
|
|
case QueueType::MessageLiteral:
|
2020-07-25 23:15:11 +00:00
|
|
|
ProcessMessageLiteral( ev.messageLiteral );
|
2018-02-13 13:57:47 +00:00
|
|
|
break;
|
2019-05-10 18:17:44 +00:00
|
|
|
case QueueType::MessageColor:
|
|
|
|
ProcessMessageColor( ev.messageColor );
|
|
|
|
break;
|
|
|
|
case QueueType::MessageLiteralColor:
|
2020-07-25 23:15:11 +00:00
|
|
|
ProcessMessageLiteralColor( ev.messageColorLiteral );
|
2019-05-10 18:17:44 +00:00
|
|
|
break;
|
2019-11-14 23:42:44 +00:00
|
|
|
case QueueType::MessageCallstack:
|
|
|
|
ProcessMessageCallstack( ev.message );
|
|
|
|
break;
|
|
|
|
case QueueType::MessageLiteralCallstack:
|
2020-07-25 23:15:11 +00:00
|
|
|
ProcessMessageLiteralCallstack( ev.messageLiteral );
|
2019-11-14 23:42:44 +00:00
|
|
|
break;
|
|
|
|
case QueueType::MessageColorCallstack:
|
|
|
|
ProcessMessageColorCallstack( ev.messageColor );
|
|
|
|
break;
|
|
|
|
case QueueType::MessageLiteralColorCallstack:
|
2020-07-25 23:15:11 +00:00
|
|
|
ProcessMessageLiteralColorCallstack( ev.messageColorLiteral );
|
2019-11-14 23:42:44 +00:00
|
|
|
break;
|
2019-07-12 16:30:45 +00:00
|
|
|
case QueueType::MessageAppInfo:
|
|
|
|
ProcessMessageAppInfo( ev.message );
|
|
|
|
break;
|
2018-02-13 13:57:47 +00:00
|
|
|
case QueueType::GpuNewContext:
|
|
|
|
ProcessGpuNewContext( ev.gpuNewContext );
|
|
|
|
break;
|
|
|
|
case QueueType::GpuZoneBegin:
|
2019-10-23 22:04:31 +00:00
|
|
|
ProcessGpuZoneBegin( ev.gpuZoneBegin, false );
|
2018-02-13 13:57:47 +00:00
|
|
|
break;
|
2018-06-21 23:56:32 +00:00
|
|
|
case QueueType::GpuZoneBeginCallstack:
|
2019-10-23 22:04:31 +00:00
|
|
|
ProcessGpuZoneBeginCallstack( ev.gpuZoneBegin, false );
|
2018-06-21 23:56:32 +00:00
|
|
|
break;
|
2021-01-15 19:33:45 +00:00
|
|
|
case QueueType::GpuZoneBeginAllocSrcLoc:
|
|
|
|
ProcessGpuZoneBeginAllocSrcLoc( ev.gpuZoneBeginLean, false );
|
|
|
|
break;
|
|
|
|
case QueueType::GpuZoneBeginAllocSrcLocCallstack:
|
|
|
|
ProcessGpuZoneBeginAllocSrcLocCallstack( ev.gpuZoneBeginLean, false );
|
|
|
|
break;
|
2018-02-13 13:57:47 +00:00
|
|
|
case QueueType::GpuZoneEnd:
|
2019-10-23 22:04:31 +00:00
|
|
|
ProcessGpuZoneEnd( ev.gpuZoneEnd, false );
|
|
|
|
break;
|
|
|
|
case QueueType::GpuZoneBeginSerial:
|
|
|
|
ProcessGpuZoneBegin( ev.gpuZoneBegin, true );
|
|
|
|
break;
|
|
|
|
case QueueType::GpuZoneBeginCallstackSerial:
|
|
|
|
ProcessGpuZoneBeginCallstack( ev.gpuZoneBegin, true );
|
|
|
|
break;
|
2021-01-15 19:33:45 +00:00
|
|
|
case QueueType::GpuZoneBeginAllocSrcLocSerial:
|
|
|
|
ProcessGpuZoneBeginAllocSrcLoc( ev.gpuZoneBeginLean, true );
|
|
|
|
break;
|
|
|
|
case QueueType::GpuZoneBeginAllocSrcLocCallstackSerial:
|
|
|
|
ProcessGpuZoneBeginAllocSrcLocCallstack( ev.gpuZoneBeginLean, true );
|
|
|
|
break;
|
2019-10-23 22:04:31 +00:00
|
|
|
case QueueType::GpuZoneEndSerial:
|
|
|
|
ProcessGpuZoneEnd( ev.gpuZoneEnd, true );
|
2018-02-13 13:57:47 +00:00
|
|
|
break;
|
|
|
|
case QueueType::GpuTime:
|
|
|
|
ProcessGpuTime( ev.gpuTime );
|
|
|
|
break;
|
2020-07-07 18:32:25 +00:00
|
|
|
case QueueType::GpuCalibration:
|
|
|
|
ProcessGpuCalibration( ev.gpuCalibration );
|
|
|
|
break;
|
2021-01-31 17:56:03 +00:00
|
|
|
case QueueType::GpuContextName:
|
|
|
|
ProcessGpuContextName( ev.gpuContextName );
|
|
|
|
break;
|
2018-03-31 19:56:05 +00:00
|
|
|
case QueueType::MemAlloc:
|
2018-04-01 00:03:34 +00:00
|
|
|
ProcessMemAlloc( ev.memAlloc );
|
2018-03-31 19:56:05 +00:00
|
|
|
break;
|
2020-09-23 23:23:10 +00:00
|
|
|
case QueueType::MemAllocNamed:
|
|
|
|
ProcessMemAllocNamed( ev.memAlloc );
|
|
|
|
break;
|
2018-03-31 19:56:05 +00:00
|
|
|
case QueueType::MemFree:
|
2018-04-01 00:03:34 +00:00
|
|
|
ProcessMemFree( ev.memFree );
|
2018-03-31 19:56:05 +00:00
|
|
|
break;
|
2020-09-23 23:23:10 +00:00
|
|
|
case QueueType::MemFreeNamed:
|
|
|
|
ProcessMemFreeNamed( ev.memFree );
|
|
|
|
break;
|
2018-06-19 16:52:45 +00:00
|
|
|
case QueueType::MemAllocCallstack:
|
|
|
|
ProcessMemAllocCallstack( ev.memAlloc );
|
|
|
|
break;
|
2020-09-23 23:23:10 +00:00
|
|
|
case QueueType::MemAllocCallstackNamed:
|
|
|
|
ProcessMemAllocCallstackNamed( ev.memAlloc );
|
|
|
|
break;
|
2018-06-19 16:52:45 +00:00
|
|
|
case QueueType::MemFreeCallstack:
|
|
|
|
ProcessMemFreeCallstack( ev.memFree );
|
|
|
|
break;
|
2020-09-23 23:23:10 +00:00
|
|
|
case QueueType::MemFreeCallstackNamed:
|
|
|
|
ProcessMemFreeCallstackNamed( ev.memFree );
|
|
|
|
break;
|
2021-01-15 19:49:39 +00:00
|
|
|
case QueueType::CallstackSerial:
|
|
|
|
ProcessCallstackSerial();
|
2018-06-19 16:52:45 +00:00
|
|
|
break;
|
2020-07-26 12:15:16 +00:00
|
|
|
case QueueType::Callstack:
|
2020-07-26 12:25:32 +00:00
|
|
|
case QueueType::CallstackAlloc:
|
2020-09-29 14:59:28 +00:00
|
|
|
ProcessCallstack();
|
2019-03-05 01:04:45 +00:00
|
|
|
break;
|
2020-07-26 12:28:13 +00:00
|
|
|
case QueueType::CallstackSample:
|
|
|
|
ProcessCallstackSample( ev.callstackSample );
|
2020-02-22 15:39:39 +00:00
|
|
|
break;
|
2021-12-21 13:18:32 +00:00
|
|
|
case QueueType::CallstackSampleContextSwitch:
|
|
|
|
ProcessCallstackSampleContextSwitch( ev.callstackSample );
|
|
|
|
break;
|
2019-01-20 18:11:48 +00:00
|
|
|
case QueueType::CallstackFrameSize:
|
|
|
|
ProcessCallstackFrameSize( ev.callstackFrameSize );
|
2019-04-01 17:37:39 +00:00
|
|
|
m_serverQuerySpaceLeft++;
|
2019-01-20 18:11:48 +00:00
|
|
|
break;
|
2018-06-19 23:07:09 +00:00
|
|
|
case QueueType::CallstackFrame:
|
2020-09-30 13:49:29 +00:00
|
|
|
ProcessCallstackFrame( ev.callstackFrame, true );
|
2018-06-19 23:07:09 +00:00
|
|
|
break;
|
2020-02-26 21:35:15 +00:00
|
|
|
case QueueType::SymbolInformation:
|
2020-02-27 11:49:48 +00:00
|
|
|
ProcessSymbolInformation( ev.symbolInformation );
|
2020-03-25 19:33:50 +00:00
|
|
|
m_serverQuerySpaceLeft++;
|
2020-02-26 21:35:15 +00:00
|
|
|
break;
|
2020-04-01 19:43:03 +00:00
|
|
|
case QueueType::CodeInformation:
|
2020-04-01 20:37:19 +00:00
|
|
|
ProcessCodeInformation( ev.codeInformation );
|
2020-04-01 19:43:03 +00:00
|
|
|
m_serverQuerySpaceLeft++;
|
|
|
|
break;
|
2018-02-13 13:57:47 +00:00
|
|
|
case QueueType::Terminate:
|
|
|
|
m_terminate = true;
|
|
|
|
break;
|
2018-07-10 19:33:22 +00:00
|
|
|
case QueueType::KeepAlive:
|
|
|
|
break;
|
2018-08-19 23:03:16 +00:00
|
|
|
case QueueType::Crash:
|
|
|
|
m_crashed = true;
|
|
|
|
break;
|
2018-08-20 00:07:31 +00:00
|
|
|
case QueueType::CrashReport:
|
|
|
|
ProcessCrashReport( ev.crashReport );
|
|
|
|
break;
|
2019-02-21 21:45:39 +00:00
|
|
|
case QueueType::SysTimeReport:
|
|
|
|
ProcessSysTime( ev.sysTime );
|
|
|
|
break;
|
2019-08-12 22:13:50 +00:00
|
|
|
case QueueType::ContextSwitch:
|
|
|
|
ProcessContextSwitch( ev.contextSwitch );
|
|
|
|
break;
|
2019-08-17 15:05:29 +00:00
|
|
|
case QueueType::ThreadWakeup:
|
|
|
|
ProcessThreadWakeup( ev.threadWakeup );
|
|
|
|
break;
|
2019-08-17 20:32:41 +00:00
|
|
|
case QueueType::TidToPid:
|
|
|
|
ProcessTidToPid( ev.tidToPid );
|
|
|
|
break;
|
2021-05-19 00:31:20 +00:00
|
|
|
case QueueType::HwSampleCpuCycle:
|
|
|
|
ProcessHwSampleCpuCycle( ev.hwSample );
|
|
|
|
break;
|
|
|
|
case QueueType::HwSampleInstructionRetired:
|
|
|
|
ProcessHwSampleInstructionRetired( ev.hwSample );
|
|
|
|
break;
|
2021-05-20 00:19:11 +00:00
|
|
|
case QueueType::HwSampleCacheReference:
|
|
|
|
ProcessHwSampleCacheReference( ev.hwSample );
|
|
|
|
break;
|
|
|
|
case QueueType::HwSampleCacheMiss:
|
|
|
|
ProcessHwSampleCacheMiss( ev.hwSample );
|
|
|
|
break;
|
|
|
|
case QueueType::HwSampleBranchRetired:
|
|
|
|
ProcessHwSampleBranchRetired( ev.hwSample );
|
|
|
|
break;
|
|
|
|
case QueueType::HwSampleBranchMiss:
|
|
|
|
ProcessHwSampleBranchMiss( ev.hwSample );
|
|
|
|
break;
|
2019-11-25 22:59:48 +00:00
|
|
|
case QueueType::ParamSetup:
|
|
|
|
ProcessParamSetup( ev.paramSetup );
|
|
|
|
break;
|
2021-02-03 20:47:03 +00:00
|
|
|
case QueueType::AckServerQueryNoop:
|
2020-03-25 19:37:26 +00:00
|
|
|
m_serverQuerySpaceLeft++;
|
|
|
|
break;
|
2021-02-03 23:03:25 +00:00
|
|
|
case QueueType::AckSourceCodeNotAvailable:
|
|
|
|
assert( !m_sourceCodeQuery.empty() );
|
|
|
|
m_sourceCodeQuery.erase( m_sourceCodeQuery.begin() );
|
|
|
|
m_serverQuerySpaceLeft++;
|
|
|
|
break;
|
2021-11-25 21:44:56 +00:00
|
|
|
case QueueType::AckSymbolCodeNotAvailable:
|
|
|
|
m_pendingSymbolCode--;
|
|
|
|
m_serverQuerySpaceLeft++;
|
|
|
|
break;
|
2019-11-29 21:41:41 +00:00
|
|
|
case QueueType::CpuTopology:
|
|
|
|
ProcessCpuTopology( ev.cpuTopology );
|
|
|
|
break;
|
2020-09-23 13:15:39 +00:00
|
|
|
case QueueType::MemNamePayload:
|
|
|
|
ProcessMemNamePayload( ev.memName );
|
|
|
|
break;
|
2021-11-02 00:53:10 +00:00
|
|
|
case QueueType::FiberEnter:
|
|
|
|
ProcessFiberEnter( ev.fiberEnter );
|
|
|
|
break;
|
|
|
|
case QueueType::FiberLeave:
|
|
|
|
ProcessFiberLeave( ev.fiberLeave );
|
|
|
|
break;
|
2018-02-13 13:57:47 +00:00
|
|
|
default:
|
|
|
|
assert( false );
|
|
|
|
break;
|
|
|
|
}
|
2019-01-14 22:08:34 +00:00
|
|
|
|
2019-01-15 17:55:21 +00:00
|
|
|
return m_failure == Failure::None;
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
2019-08-02 18:18:08 +00:00
|
|
|
void Worker::ProcessThreadContext( const QueueThreadContext& ev )
|
|
|
|
{
|
2019-10-23 22:04:31 +00:00
|
|
|
m_refTimeThread = 0;
|
2019-11-10 16:17:07 +00:00
|
|
|
if( m_threadCtx != ev.thread )
|
|
|
|
{
|
|
|
|
m_threadCtx = ev.thread;
|
|
|
|
m_threadCtxData = RetrieveThread( ev.thread );
|
|
|
|
}
|
2019-08-02 18:18:08 +00:00
|
|
|
}
|
|
|
|
|
2022-06-21 23:55:10 +00:00
|
|
|
static tracy_force_inline int64_t RefTime( int64_t& reference, int64_t delta )
|
|
|
|
{
|
|
|
|
const auto refTime = reference + delta;
|
|
|
|
reference = refTime;
|
|
|
|
return refTime;
|
|
|
|
}
|
|
|
|
|
2018-06-21 23:07:25 +00:00
|
|
|
void Worker::ProcessZoneBeginImpl( ZoneEvent* zone, const QueueZoneBegin& ev )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
|
|
|
CheckSourceLocation( ev.srcloc );
|
|
|
|
|
2022-06-21 23:55:10 +00:00
|
|
|
const auto start = TscTime( RefTime( m_refTimeThread, ev.time ) );
|
2020-02-12 19:16:14 +00:00
|
|
|
zone->SetStartSrcLoc( start, ShrinkSourceLocation( ev.srcloc ) );
|
2019-09-30 23:05:37 +00:00
|
|
|
zone->SetEnd( -1 );
|
|
|
|
zone->SetChild( -1 );
|
2018-02-13 13:57:47 +00:00
|
|
|
|
2019-10-24 21:23:52 +00:00
|
|
|
if( m_data.lastTime < start ) m_data.lastTime = start;
|
2018-02-13 13:57:47 +00:00
|
|
|
|
2021-11-06 17:55:29 +00:00
|
|
|
NewZone( zone );
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
2021-01-15 19:20:34 +00:00
|
|
|
void Worker::ProcessZoneBeginAllocSrcLocImpl( ZoneEvent* zone, const QueueZoneBeginLean& ev )
|
|
|
|
{
|
|
|
|
assert( m_pendingSourceLocationPayload != 0 );
|
|
|
|
|
2022-06-21 23:55:10 +00:00
|
|
|
const auto start = TscTime( RefTime( m_refTimeThread, ev.time ) );
|
2021-01-15 19:20:34 +00:00
|
|
|
zone->SetStartSrcLoc( start, m_pendingSourceLocationPayload );
|
|
|
|
zone->SetEnd( -1 );
|
|
|
|
zone->SetChild( -1 );
|
|
|
|
|
|
|
|
if( m_data.lastTime < start ) m_data.lastTime = start;
|
|
|
|
|
2021-11-06 17:55:29 +00:00
|
|
|
NewZone( zone );
|
2021-01-15 19:20:34 +00:00
|
|
|
|
|
|
|
m_pendingSourceLocationPayload = 0;
|
|
|
|
}
|
|
|
|
|
2019-11-10 20:26:57 +00:00
|
|
|
ZoneEvent* Worker::AllocZoneEvent()
|
|
|
|
{
|
|
|
|
ZoneEvent* ret;
|
2019-11-10 23:04:45 +00:00
|
|
|
#ifndef TRACY_NO_STATISTICS
|
|
|
|
ret = m_slab.Alloc<ZoneEvent>();
|
|
|
|
#else
|
2019-11-10 20:26:57 +00:00
|
|
|
if( m_zoneEventPool.empty() )
|
|
|
|
{
|
|
|
|
ret = m_slab.Alloc<ZoneEvent>();
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
ret = m_zoneEventPool.back_and_pop();
|
|
|
|
}
|
2019-11-10 23:04:45 +00:00
|
|
|
#endif
|
2020-01-26 14:57:55 +00:00
|
|
|
ret->extra = 0;
|
2019-11-10 20:26:57 +00:00
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
2018-06-21 23:07:25 +00:00
|
|
|
void Worker::ProcessZoneBegin( const QueueZoneBegin& ev )
|
|
|
|
{
|
2019-11-10 20:26:57 +00:00
|
|
|
auto zone = AllocZoneEvent();
|
2018-06-21 23:07:25 +00:00
|
|
|
ProcessZoneBeginImpl( zone, ev );
|
|
|
|
}
|
|
|
|
|
|
|
|
void Worker::ProcessZoneBeginCallstack( const QueueZoneBegin& ev )
|
|
|
|
{
|
2019-11-10 20:26:57 +00:00
|
|
|
auto zone = AllocZoneEvent();
|
2018-06-21 23:07:25 +00:00
|
|
|
ProcessZoneBeginImpl( zone, ev );
|
2021-11-06 17:55:29 +00:00
|
|
|
auto td = GetCurrentThreadData();
|
|
|
|
auto it = m_nextCallstack.find( td->id );
|
2020-09-29 14:59:28 +00:00
|
|
|
assert( it != m_nextCallstack.end() );
|
|
|
|
auto& extra = RequestZoneExtra( *zone );
|
|
|
|
extra.callstack.SetVal( it->second );
|
|
|
|
it->second = 0;
|
2018-06-21 23:07:25 +00:00
|
|
|
}
|
|
|
|
|
2020-05-10 17:20:59 +00:00
|
|
|
void Worker::ProcessZoneBeginAllocSrcLoc( const QueueZoneBeginLean& ev )
|
2019-03-03 16:47:26 +00:00
|
|
|
{
|
2019-11-10 20:26:57 +00:00
|
|
|
auto zone = AllocZoneEvent();
|
2019-03-03 16:47:26 +00:00
|
|
|
ProcessZoneBeginAllocSrcLocImpl( zone, ev );
|
|
|
|
}
|
|
|
|
|
2020-05-10 17:20:59 +00:00
|
|
|
void Worker::ProcessZoneBeginAllocSrcLocCallstack( const QueueZoneBeginLean& ev )
|
2019-03-03 16:47:26 +00:00
|
|
|
{
|
2019-11-10 20:26:57 +00:00
|
|
|
auto zone = AllocZoneEvent();
|
2019-03-03 16:47:26 +00:00
|
|
|
ProcessZoneBeginAllocSrcLocImpl( zone, ev );
|
2021-11-06 17:55:29 +00:00
|
|
|
auto td = GetCurrentThreadData();
|
|
|
|
auto it = m_nextCallstack.find( td->id );
|
2020-09-29 14:59:28 +00:00
|
|
|
assert( it != m_nextCallstack.end() );
|
|
|
|
auto& extra = RequestZoneExtra( *zone );
|
|
|
|
extra.callstack.SetVal( it->second );
|
|
|
|
it->second = 0;
|
2019-03-03 16:47:26 +00:00
|
|
|
}
|
|
|
|
|
2019-01-15 17:55:21 +00:00
|
|
|
void Worker::ProcessZoneEnd( const QueueZoneEnd& ev )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2021-11-06 17:55:29 +00:00
|
|
|
auto td = GetCurrentThreadData();
|
2020-04-30 17:05:13 +00:00
|
|
|
if( td->zoneIdStack.empty() )
|
|
|
|
{
|
2021-11-06 17:55:29 +00:00
|
|
|
ZoneDoubleEndFailure( td->id, td->timeline.empty() ? nullptr : td->timeline.back() );
|
2020-04-30 17:05:13 +00:00
|
|
|
return;
|
|
|
|
}
|
2019-01-14 22:08:34 +00:00
|
|
|
auto zoneId = td->zoneIdStack.back_and_pop();
|
2019-01-14 22:22:31 +00:00
|
|
|
if( zoneId != td->nextZoneId )
|
|
|
|
{
|
2021-11-06 17:55:29 +00:00
|
|
|
ZoneStackFailure( td->id, td->stack.back() );
|
2019-01-15 17:55:21 +00:00
|
|
|
return;
|
2019-01-14 22:22:31 +00:00
|
|
|
}
|
2019-01-14 22:08:34 +00:00
|
|
|
td->nextZoneId = 0;
|
|
|
|
|
2018-02-13 13:57:47 +00:00
|
|
|
auto& stack = td->stack;
|
|
|
|
assert( !stack.empty() );
|
|
|
|
auto zone = stack.back_and_pop();
|
2019-09-30 23:05:37 +00:00
|
|
|
assert( zone->End() == -1 );
|
2021-06-05 18:13:57 +00:00
|
|
|
const auto isReentry = td->DecStackCount( zone->SrcLoc() );
|
2022-06-21 23:55:10 +00:00
|
|
|
const auto timeEnd = TscTime( RefTime( m_refTimeThread, ev.time ) );
|
2019-10-24 21:23:52 +00:00
|
|
|
zone->SetEnd( timeEnd );
|
|
|
|
assert( timeEnd >= zone->Start() );
|
2018-02-13 13:57:47 +00:00
|
|
|
|
2019-10-24 21:23:52 +00:00
|
|
|
if( m_data.lastTime < timeEnd ) m_data.lastTime = timeEnd;
|
2018-03-18 19:15:45 +00:00
|
|
|
|
2020-01-24 01:17:38 +00:00
|
|
|
if( zone->HasChildren() )
|
2019-03-26 21:06:00 +00:00
|
|
|
{
|
2020-01-24 01:17:38 +00:00
|
|
|
auto& childVec = m_data.zoneChildren[zone->Child()];
|
2019-03-26 21:06:00 +00:00
|
|
|
const auto sz = childVec.size();
|
|
|
|
if( sz <= 8 * 1024 )
|
|
|
|
{
|
2019-11-02 15:17:20 +00:00
|
|
|
Vector<short_ptr<ZoneEvent>> fitVec;
|
2019-11-10 23:04:45 +00:00
|
|
|
#ifndef TRACY_NO_STATISTICS
|
|
|
|
fitVec.reserve_exact( sz, m_slab );
|
|
|
|
memcpy( fitVec.data(), childVec.data(), sz * sizeof( short_ptr<ZoneEvent> ) );
|
|
|
|
#else
|
2019-11-10 20:35:41 +00:00
|
|
|
fitVec.set_magic();
|
|
|
|
auto& fv = *((Vector<ZoneEvent>*)&fitVec);
|
|
|
|
fv.reserve_exact( sz, m_slab );
|
|
|
|
auto dst = fv.data();
|
|
|
|
for( auto& ze : childVec )
|
|
|
|
{
|
|
|
|
ZoneEvent* src = ze;
|
|
|
|
memcpy( dst++, src, sizeof( ZoneEvent ) );
|
|
|
|
m_zoneEventPool.push_back( src );
|
|
|
|
}
|
2019-11-10 23:04:45 +00:00
|
|
|
#endif
|
2019-03-26 22:02:39 +00:00
|
|
|
fitVec.swap( childVec );
|
2019-03-26 21:06:00 +00:00
|
|
|
m_data.zoneVectorCache.push_back( std::move( fitVec ) );
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-03-18 19:15:45 +00:00
|
|
|
#ifndef TRACY_NO_STATISTICS
|
2019-10-24 22:33:44 +00:00
|
|
|
assert( !td->childTimeStack.empty() );
|
|
|
|
const auto timeSpan = timeEnd - zone->Start();
|
2018-03-18 19:15:45 +00:00
|
|
|
if( timeSpan > 0 )
|
|
|
|
{
|
2021-02-07 18:35:53 +00:00
|
|
|
ZoneThreadData ztd;
|
2020-01-23 18:03:03 +00:00
|
|
|
ztd.SetZone( zone );
|
2021-11-06 17:55:29 +00:00
|
|
|
ztd.SetThread( CompressThread( td->id ) );
|
2021-02-07 18:35:53 +00:00
|
|
|
auto slz = GetSourceLocationZones( zone->SrcLoc() );
|
|
|
|
slz->zones.push_back( ztd );
|
2019-10-25 19:29:55 +00:00
|
|
|
if( slz->min > timeSpan ) slz->min = timeSpan;
|
|
|
|
if( slz->max < timeSpan ) slz->max = timeSpan;
|
|
|
|
slz->total += timeSpan;
|
|
|
|
slz->sumSq += double( timeSpan ) * timeSpan;
|
2019-10-24 22:33:44 +00:00
|
|
|
const auto selfSpan = timeSpan - td->childTimeStack.back_and_pop();
|
2019-10-25 19:29:55 +00:00
|
|
|
if( slz->selfMin > selfSpan ) slz->selfMin = selfSpan;
|
|
|
|
if( slz->selfMax < selfSpan ) slz->selfMax = selfSpan;
|
|
|
|
slz->selfTotal += selfSpan;
|
2021-06-23 18:43:46 +00:00
|
|
|
if( !isReentry )
|
2021-06-05 18:28:16 +00:00
|
|
|
{
|
|
|
|
slz->nonReentrantCount++;
|
|
|
|
if( slz->nonReentrantMin > timeSpan ) slz->nonReentrantMin = timeSpan;
|
|
|
|
if( slz->nonReentrantMax < timeSpan ) slz->nonReentrantMax = timeSpan;
|
|
|
|
slz->nonReentrantTotal += timeSpan;
|
|
|
|
}
|
2019-10-24 22:33:44 +00:00
|
|
|
if( !td->childTimeStack.empty() )
|
2018-06-05 22:39:22 +00:00
|
|
|
{
|
2019-10-24 22:33:44 +00:00
|
|
|
td->childTimeStack.back() += timeSpan;
|
2018-06-05 22:39:22 +00:00
|
|
|
}
|
2019-10-24 22:33:44 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
td->childTimeStack.pop_back();
|
2018-03-18 19:15:45 +00:00
|
|
|
}
|
2020-01-23 18:03:03 +00:00
|
|
|
#else
|
|
|
|
CountZoneStatistics( zone );
|
2018-03-18 19:15:45 +00:00
|
|
|
#endif
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
2019-01-14 22:22:31 +00:00
|
|
|
void Worker::ZoneStackFailure( uint64_t thread, const ZoneEvent* ev )
|
|
|
|
{
|
|
|
|
m_failure = Failure::ZoneStack;
|
|
|
|
m_failureData.thread = thread;
|
2019-08-15 18:12:09 +00:00
|
|
|
m_failureData.srcloc = ev->SrcLoc();
|
2019-01-14 22:22:31 +00:00
|
|
|
}
|
|
|
|
|
2020-04-30 17:05:13 +00:00
|
|
|
void Worker::ZoneDoubleEndFailure( uint64_t thread, const ZoneEvent* ev )
|
|
|
|
{
|
|
|
|
m_failure = Failure::ZoneDoubleEnd;
|
|
|
|
m_failureData.thread = thread;
|
|
|
|
m_failureData.srcloc = ev ? ev->SrcLoc() : 0;
|
|
|
|
}
|
|
|
|
|
2021-10-10 12:14:33 +00:00
|
|
|
void Worker::ZoneTextFailure( uint64_t thread, const char* text )
|
2019-01-16 00:17:01 +00:00
|
|
|
{
|
|
|
|
m_failure = Failure::ZoneText;
|
|
|
|
m_failureData.thread = thread;
|
2021-10-10 12:14:33 +00:00
|
|
|
m_failureData.message = text;
|
2019-01-16 00:17:01 +00:00
|
|
|
}
|
|
|
|
|
2021-10-10 12:12:13 +00:00
|
|
|
void Worker::ZoneValueFailure( uint64_t thread, uint64_t value )
|
2021-10-10 12:05:21 +00:00
|
|
|
{
|
2021-10-10 12:12:13 +00:00
|
|
|
char buf[128];
|
|
|
|
if( (int64_t)value < 0 )
|
|
|
|
{
|
|
|
|
sprintf( buf, "Zone value was: %" PRIu64 " (unsigned), %" PRIi64 " (signed)", value, (int64_t)value );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
sprintf( buf, "Zone value was: %" PRIu64, value );
|
|
|
|
}
|
|
|
|
|
2021-10-10 12:05:21 +00:00
|
|
|
m_failure = Failure::ZoneValue;
|
|
|
|
m_failureData.thread = thread;
|
2021-10-10 12:12:13 +00:00
|
|
|
m_failureData.message = buf;
|
2021-10-10 12:05:21 +00:00
|
|
|
}
|
|
|
|
|
2020-11-27 11:37:35 +00:00
|
|
|
void Worker::ZoneColorFailure( uint64_t thread )
|
|
|
|
{
|
|
|
|
m_failure = Failure::ZoneColor;
|
|
|
|
m_failureData.thread = thread;
|
|
|
|
}
|
|
|
|
|
2019-01-16 00:17:01 +00:00
|
|
|
void Worker::ZoneNameFailure( uint64_t thread )
|
|
|
|
{
|
|
|
|
m_failure = Failure::ZoneName;
|
|
|
|
m_failureData.thread = thread;
|
|
|
|
}
|
|
|
|
|
2019-01-15 17:56:26 +00:00
|
|
|
void Worker::MemFreeFailure( uint64_t thread )
|
|
|
|
{
|
|
|
|
m_failure = Failure::MemFree;
|
|
|
|
m_failureData.thread = thread;
|
2021-01-15 19:49:39 +00:00
|
|
|
m_failureData.callstack = m_serialNextCallstack;
|
2019-01-15 17:56:26 +00:00
|
|
|
}
|
|
|
|
|
2021-03-09 21:10:29 +00:00
|
|
|
void Worker::MemAllocTwiceFailure( uint64_t thread )
|
|
|
|
{
|
|
|
|
m_failure = Failure::MemAllocTwice;
|
|
|
|
m_failureData.thread = thread;
|
|
|
|
m_failureData.callstack = m_serialNextCallstack;
|
|
|
|
}
|
|
|
|
|
2019-02-28 18:32:42 +00:00
|
|
|
void Worker::FrameEndFailure()
|
|
|
|
{
|
2019-11-10 16:17:07 +00:00
|
|
|
m_failure = Failure::FrameEnd;
|
2019-02-28 18:32:42 +00:00
|
|
|
}
|
|
|
|
|
2019-06-09 11:44:53 +00:00
|
|
|
void Worker::FrameImageIndexFailure()
|
|
|
|
{
|
|
|
|
m_failure = Failure::FrameImageIndex;
|
|
|
|
}
|
|
|
|
|
|
|
|
void Worker::FrameImageTwiceFailure()
|
|
|
|
{
|
|
|
|
m_failure = Failure::FrameImageTwice;
|
|
|
|
}
|
|
|
|
|
2021-11-02 00:47:31 +00:00
|
|
|
void Worker::FiberLeaveFailure()
|
|
|
|
{
|
|
|
|
m_failure = Failure::FiberLeave;
|
|
|
|
}
|
|
|
|
|
2019-01-14 21:56:10 +00:00
|
|
|
void Worker::ProcessZoneValidation( const QueueZoneValidation& ev )
|
|
|
|
{
|
2021-11-06 17:55:29 +00:00
|
|
|
auto td = GetCurrentThreadData();
|
2019-01-14 21:56:10 +00:00
|
|
|
td->nextZoneId = ev.id;
|
|
|
|
}
|
|
|
|
|
2018-02-13 13:57:47 +00:00
|
|
|
void Worker::ProcessFrameMark( const QueueFrameMark& ev )
|
|
|
|
{
|
2018-08-04 17:47:09 +00:00
|
|
|
auto fd = m_data.frames.Retrieve( ev.name, [this] ( uint64_t name ) {
|
|
|
|
auto fd = m_slab.AllocInit<FrameData>();
|
|
|
|
fd->name = name;
|
2018-08-05 00:09:59 +00:00
|
|
|
fd->continuous = 1;
|
2018-08-04 17:47:09 +00:00
|
|
|
return fd;
|
|
|
|
}, [this] ( uint64_t name ) {
|
2019-04-01 16:52:32 +00:00
|
|
|
Query( ServerQueryFrameName, name );
|
2018-08-04 17:47:09 +00:00
|
|
|
} );
|
|
|
|
|
2019-06-27 11:04:27 +00:00
|
|
|
int32_t frameImage = -1;
|
2020-06-29 22:53:26 +00:00
|
|
|
if( ev.name == 0 )
|
2019-06-27 11:04:27 +00:00
|
|
|
{
|
2020-06-29 22:53:26 +00:00
|
|
|
auto fis = m_frameImageStaging.find( fd->frames.size() );
|
|
|
|
if( fis != m_frameImageStaging.end() )
|
|
|
|
{
|
|
|
|
frameImage = fis->second;
|
|
|
|
m_frameImageStaging.erase( fis );
|
|
|
|
}
|
2019-06-27 11:04:27 +00:00
|
|
|
}
|
|
|
|
|
2018-08-05 00:09:59 +00:00
|
|
|
assert( fd->continuous == 1 );
|
2022-06-21 23:30:01 +00:00
|
|
|
const auto time = TscTime( ev.time );
|
2019-03-21 20:24:07 +00:00
|
|
|
assert( fd->frames.empty() || fd->frames.back().start <= time );
|
2019-06-27 11:04:27 +00:00
|
|
|
fd->frames.push_back( FrameEvent{ time, -1, frameImage } );
|
2019-10-24 21:23:52 +00:00
|
|
|
if( m_data.lastTime < time ) m_data.lastTime = time;
|
2019-09-16 19:31:43 +00:00
|
|
|
|
|
|
|
#ifndef TRACY_NO_STATISTICS
|
|
|
|
const auto timeSpan = GetFrameTime( *fd, fd->frames.size() - 1 );
|
|
|
|
if( timeSpan > 0 )
|
|
|
|
{
|
|
|
|
fd->min = std::min( fd->min, timeSpan );
|
|
|
|
fd->max = std::max( fd->max, timeSpan );
|
|
|
|
fd->total += timeSpan;
|
|
|
|
fd->sumSq += double( timeSpan ) * timeSpan;
|
|
|
|
}
|
|
|
|
#endif
|
2018-08-05 00:09:59 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void Worker::ProcessFrameMarkStart( const QueueFrameMark& ev )
|
|
|
|
{
|
|
|
|
auto fd = m_data.frames.Retrieve( ev.name, [this] ( uint64_t name ) {
|
|
|
|
auto fd = m_slab.AllocInit<FrameData>();
|
|
|
|
fd->name = name;
|
|
|
|
fd->continuous = 0;
|
|
|
|
return fd;
|
|
|
|
}, [this] ( uint64_t name ) {
|
2019-04-01 16:52:32 +00:00
|
|
|
Query( ServerQueryFrameName, name );
|
2018-08-05 00:09:59 +00:00
|
|
|
} );
|
|
|
|
|
|
|
|
assert( fd->continuous == 0 );
|
2022-06-21 23:30:01 +00:00
|
|
|
const auto time = TscTime( ev.time );
|
2019-03-21 20:24:07 +00:00
|
|
|
assert( fd->frames.empty() || ( fd->frames.back().end <= time && fd->frames.back().end != -1 ) );
|
2019-06-06 19:44:48 +00:00
|
|
|
fd->frames.push_back( FrameEvent{ time, -1, -1 } );
|
2019-10-24 21:23:52 +00:00
|
|
|
if( m_data.lastTime < time ) m_data.lastTime = time;
|
2018-08-05 00:09:59 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void Worker::ProcessFrameMarkEnd( const QueueFrameMark& ev )
|
|
|
|
{
|
|
|
|
auto fd = m_data.frames.Retrieve( ev.name, [this] ( uint64_t name ) {
|
|
|
|
auto fd = m_slab.AllocInit<FrameData>();
|
|
|
|
fd->name = name;
|
|
|
|
fd->continuous = 0;
|
|
|
|
return fd;
|
|
|
|
}, [this] ( uint64_t name ) {
|
2019-04-01 16:52:32 +00:00
|
|
|
Query( ServerQueryFrameName, name );
|
2018-08-05 00:09:59 +00:00
|
|
|
} );
|
|
|
|
|
|
|
|
assert( fd->continuous == 0 );
|
2022-06-21 23:30:01 +00:00
|
|
|
const auto time = TscTime( ev.time );
|
2018-08-05 00:09:59 +00:00
|
|
|
if( fd->frames.empty() )
|
|
|
|
{
|
2019-02-28 18:32:42 +00:00
|
|
|
FrameEndFailure();
|
2018-08-05 00:09:59 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
assert( fd->frames.back().end == -1 );
|
|
|
|
fd->frames.back().end = time;
|
2019-10-24 21:23:52 +00:00
|
|
|
if( m_data.lastTime < time ) m_data.lastTime = time;
|
2019-09-16 19:31:43 +00:00
|
|
|
|
|
|
|
#ifndef TRACY_NO_STATISTICS
|
|
|
|
const auto timeSpan = GetFrameTime( *fd, fd->frames.size() - 1 );
|
|
|
|
if( timeSpan > 0 )
|
|
|
|
{
|
|
|
|
fd->min = std::min( fd->min, timeSpan );
|
|
|
|
fd->max = std::max( fd->max, timeSpan );
|
|
|
|
fd->total += timeSpan;
|
|
|
|
fd->sumSq += double( timeSpan ) * timeSpan;
|
|
|
|
}
|
|
|
|
#endif
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
2020-07-26 12:18:48 +00:00
|
|
|
void Worker::ProcessFrameImage( const QueueFrameImage& ev )
|
2019-06-06 19:39:54 +00:00
|
|
|
{
|
2020-05-10 18:16:08 +00:00
|
|
|
assert( m_pendingFrameImageData.image != nullptr );
|
2019-06-06 19:39:54 +00:00
|
|
|
|
2019-06-09 11:44:53 +00:00
|
|
|
auto& frames = m_data.framesBase->frames;
|
2019-08-26 17:09:12 +00:00
|
|
|
const auto fidx = int64_t( ev.frame ) - int64_t( m_data.frameOffset ) + 1;
|
2019-06-09 13:37:49 +00:00
|
|
|
if( m_onDemand && fidx <= 1 )
|
|
|
|
{
|
2020-05-10 18:16:08 +00:00
|
|
|
m_pendingFrameImageData.image = nullptr;
|
2019-06-09 13:37:49 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
else if( fidx <= 0 )
|
2019-06-09 11:44:53 +00:00
|
|
|
{
|
|
|
|
FrameImageIndexFailure();
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2019-06-06 19:39:54 +00:00
|
|
|
auto fi = m_slab.Alloc<FrameImage>();
|
2020-05-10 18:16:08 +00:00
|
|
|
fi->ptr = m_pendingFrameImageData.image;
|
|
|
|
fi->csz = m_pendingFrameImageData.csz;
|
2019-06-06 19:39:54 +00:00
|
|
|
fi->w = ev.w;
|
|
|
|
fi->h = ev.h;
|
2019-08-26 17:09:12 +00:00
|
|
|
fi->frameRef = uint32_t( fidx );
|
2019-06-12 13:28:32 +00:00
|
|
|
fi->flip = ev.flip;
|
2019-06-06 19:39:54 +00:00
|
|
|
|
|
|
|
const auto idx = m_data.frameImage.size();
|
2019-06-06 20:15:30 +00:00
|
|
|
m_data.frameImage.push_back( fi );
|
2020-05-10 18:16:08 +00:00
|
|
|
m_pendingFrameImageData.image = nullptr;
|
2019-06-27 11:04:27 +00:00
|
|
|
|
2020-03-01 00:48:20 +00:00
|
|
|
if( fidx >= (int64_t)frames.size() )
|
2019-06-27 11:04:27 +00:00
|
|
|
{
|
|
|
|
if( m_frameImageStaging.find( fidx ) != m_frameImageStaging.end() )
|
|
|
|
{
|
|
|
|
FrameImageTwiceFailure();
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
m_frameImageStaging.emplace( fidx, idx );
|
|
|
|
}
|
|
|
|
else if( frames[fidx].frameImage >= 0 )
|
|
|
|
{
|
|
|
|
FrameImageTwiceFailure();
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
frames[fidx].frameImage = idx;
|
|
|
|
}
|
2019-06-06 19:39:54 +00:00
|
|
|
}
|
|
|
|
|
2020-07-25 22:53:55 +00:00
|
|
|
void Worker::ProcessZoneText()
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2019-10-24 20:34:18 +00:00
|
|
|
auto td = RetrieveThread( m_threadCtx );
|
2021-11-06 17:55:29 +00:00
|
|
|
if( !td )
|
2019-01-16 00:17:01 +00:00
|
|
|
{
|
2022-03-10 12:40:18 +00:00
|
|
|
ZoneTextFailure( m_threadCtx, m_pendingSingleString.ptr );
|
2021-11-06 17:55:29 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
if( td->fiber ) td = td->fiber;
|
|
|
|
if( td->stack.empty() || td->nextZoneId != td->zoneIdStack.back() )
|
|
|
|
{
|
|
|
|
ZoneTextFailure( td->id, m_pendingSingleString.ptr );
|
2019-01-16 00:17:01 +00:00
|
|
|
return;
|
|
|
|
}
|
2018-02-13 13:57:47 +00:00
|
|
|
|
2020-07-25 22:53:55 +00:00
|
|
|
const auto ptr = m_pendingSingleString.ptr;
|
|
|
|
const auto idx = GetSingleStringIdx();
|
|
|
|
|
2019-01-16 00:17:01 +00:00
|
|
|
td->nextZoneId = 0;
|
2018-02-13 13:57:47 +00:00
|
|
|
auto& stack = td->stack;
|
|
|
|
auto zone = stack.back();
|
2020-02-20 22:37:55 +00:00
|
|
|
auto& extra = RequestZoneExtra( *zone );
|
2020-01-26 14:57:55 +00:00
|
|
|
if( !extra.text.Active() )
|
2019-12-18 12:33:01 +00:00
|
|
|
{
|
2020-07-25 22:53:55 +00:00
|
|
|
extra.text = StringIdx( idx );
|
2019-12-18 12:33:01 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2020-01-26 14:57:55 +00:00
|
|
|
const auto str0 = GetString( extra.text );
|
2020-07-25 22:53:55 +00:00
|
|
|
const auto str1 = ptr;
|
2019-12-18 12:33:01 +00:00
|
|
|
const auto len0 = strlen( str0 );
|
|
|
|
const auto len1 = strlen( str1 );
|
2020-05-24 14:17:54 +00:00
|
|
|
const auto bsz = len0+len1+1;
|
|
|
|
if( m_tmpBufSize < bsz )
|
|
|
|
{
|
|
|
|
delete[] m_tmpBuf;
|
|
|
|
m_tmpBuf = new char[bsz];
|
|
|
|
m_tmpBufSize = bsz;
|
|
|
|
}
|
|
|
|
char* buf = m_tmpBuf;
|
2019-12-18 12:33:01 +00:00
|
|
|
memcpy( buf, str0, len0 );
|
|
|
|
buf[len0] = '\n';
|
|
|
|
memcpy( buf+len0+1, str1, len1 );
|
2020-05-24 14:17:54 +00:00
|
|
|
extra.text = StringIdx( StoreString( buf, bsz ).idx );
|
2019-12-18 12:33:01 +00:00
|
|
|
}
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
2020-07-25 22:53:55 +00:00
|
|
|
void Worker::ProcessZoneName()
|
2018-06-29 14:12:17 +00:00
|
|
|
{
|
2019-10-24 20:34:18 +00:00
|
|
|
auto td = RetrieveThread( m_threadCtx );
|
2021-11-06 17:55:29 +00:00
|
|
|
if( !td )
|
2019-01-16 00:17:01 +00:00
|
|
|
{
|
2022-03-10 12:40:18 +00:00
|
|
|
ZoneNameFailure( m_threadCtx );
|
2021-11-06 17:55:29 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
if( td->fiber ) td = td->fiber;
|
|
|
|
if( td->stack.empty() || td->nextZoneId != td->zoneIdStack.back() )
|
|
|
|
{
|
|
|
|
ZoneNameFailure( td->id );
|
2019-01-16 00:17:01 +00:00
|
|
|
return;
|
|
|
|
}
|
2018-06-29 14:12:17 +00:00
|
|
|
|
2019-01-16 00:17:01 +00:00
|
|
|
td->nextZoneId = 0;
|
2018-06-29 14:12:17 +00:00
|
|
|
auto& stack = td->stack;
|
|
|
|
auto zone = stack.back();
|
2020-02-20 22:37:55 +00:00
|
|
|
auto& extra = RequestZoneExtra( *zone );
|
2020-07-25 22:53:55 +00:00
|
|
|
extra.name = StringIdx( GetSingleStringIdx() );
|
2018-06-29 14:12:17 +00:00
|
|
|
}
|
|
|
|
|
2020-11-27 11:37:35 +00:00
|
|
|
void Worker::ProcessZoneColor( const QueueZoneColor& ev )
|
|
|
|
{
|
|
|
|
auto td = RetrieveThread( m_threadCtx );
|
2021-11-06 17:55:29 +00:00
|
|
|
if( !td )
|
|
|
|
{
|
2022-03-10 12:40:18 +00:00
|
|
|
ZoneColorFailure( m_threadCtx );
|
2021-11-06 17:55:29 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
if( td->fiber ) td = td->fiber;
|
|
|
|
if( td->stack.empty() || td->nextZoneId != td->zoneIdStack.back() )
|
2020-11-27 11:37:35 +00:00
|
|
|
{
|
2021-11-06 17:55:29 +00:00
|
|
|
ZoneColorFailure( td->id );
|
2020-11-27 11:37:35 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
td->nextZoneId = 0;
|
|
|
|
auto& stack = td->stack;
|
|
|
|
auto zone = stack.back();
|
|
|
|
auto& extra = RequestZoneExtra( *zone );
|
2021-03-09 01:14:57 +00:00
|
|
|
const uint32_t color = ( ev.r << 16 ) | ( ev.g << 8 ) | ev.b;
|
2020-11-27 11:37:35 +00:00
|
|
|
extra.color = color;
|
|
|
|
}
|
|
|
|
|
2020-05-24 14:13:09 +00:00
|
|
|
void Worker::ProcessZoneValue( const QueueZoneValue& ev )
|
|
|
|
{
|
|
|
|
char tmp[32];
|
|
|
|
const auto tsz = sprintf( tmp, "%" PRIu64, ev.value );
|
|
|
|
|
|
|
|
auto td = RetrieveThread( m_threadCtx );
|
2021-11-06 17:55:29 +00:00
|
|
|
if( !td )
|
|
|
|
{
|
2022-03-10 12:40:18 +00:00
|
|
|
ZoneValueFailure( m_threadCtx, ev.value );
|
2021-11-06 17:55:29 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
if( td->fiber ) td = td->fiber;
|
|
|
|
if( td->stack.empty() || td->nextZoneId != td->zoneIdStack.back() )
|
2020-05-24 14:13:09 +00:00
|
|
|
{
|
2021-11-06 17:55:29 +00:00
|
|
|
ZoneValueFailure( td->id, ev.value );
|
2020-05-24 14:13:09 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
td->nextZoneId = 0;
|
|
|
|
auto& stack = td->stack;
|
|
|
|
auto zone = stack.back();
|
|
|
|
auto& extra = RequestZoneExtra( *zone );
|
|
|
|
if( !extra.text.Active() )
|
|
|
|
{
|
|
|
|
extra.text = StringIdx( StoreString( tmp, tsz ).idx );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
const auto str0 = GetString( extra.text );
|
|
|
|
const auto len0 = strlen( str0 );
|
2020-05-24 14:17:54 +00:00
|
|
|
const auto bsz = len0+tsz+1;
|
|
|
|
if( m_tmpBufSize < bsz )
|
|
|
|
{
|
|
|
|
delete[] m_tmpBuf;
|
|
|
|
m_tmpBuf = new char[bsz];
|
|
|
|
m_tmpBufSize = bsz;
|
|
|
|
}
|
|
|
|
char* buf = m_tmpBuf;
|
2020-05-24 14:13:09 +00:00
|
|
|
memcpy( buf, str0, len0 );
|
|
|
|
buf[len0] = '\n';
|
|
|
|
memcpy( buf+len0+1, tmp, tsz );
|
2020-05-24 14:17:54 +00:00
|
|
|
extra.text = StringIdx( StoreString( buf, bsz ).idx );
|
2020-05-24 14:13:09 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-02-13 13:57:47 +00:00
|
|
|
void Worker::ProcessLockAnnounce( const QueueLockAnnounce& ev )
|
|
|
|
{
|
|
|
|
auto it = m_data.lockMap.find( ev.id );
|
2020-07-26 11:54:40 +00:00
|
|
|
assert( it == m_data.lockMap.end() );
|
|
|
|
auto lm = m_slab.AllocInit<LockMap>();
|
|
|
|
lm->srcloc = ShrinkSourceLocation( ev.lckloc );
|
|
|
|
lm->type = ev.type;
|
2022-06-21 23:30:01 +00:00
|
|
|
lm->timeAnnounce = TscTime( ev.time );
|
2020-07-26 11:54:40 +00:00
|
|
|
lm->timeTerminate = 0;
|
|
|
|
lm->valid = true;
|
|
|
|
lm->isContended = false;
|
2022-07-18 00:06:19 +00:00
|
|
|
lm->lockingThread = 0;
|
2020-07-26 11:54:40 +00:00
|
|
|
m_data.lockMap.emplace( ev.id, lm );
|
2018-02-13 13:57:47 +00:00
|
|
|
CheckSourceLocation( ev.lckloc );
|
|
|
|
}
|
|
|
|
|
2018-12-16 19:39:30 +00:00
|
|
|
void Worker::ProcessLockTerminate( const QueueLockTerminate& ev )
|
|
|
|
{
|
2018-12-16 19:46:02 +00:00
|
|
|
auto it = m_data.lockMap.find( ev.id );
|
2020-07-26 11:54:40 +00:00
|
|
|
assert( it != m_data.lockMap.end() );
|
2022-06-21 23:30:01 +00:00
|
|
|
it->second->timeTerminate = TscTime( ev.time );
|
2018-12-16 19:39:30 +00:00
|
|
|
}
|
|
|
|
|
2018-02-13 13:57:47 +00:00
|
|
|
void Worker::ProcessLockWait( const QueueLockWait& ev )
|
|
|
|
{
|
|
|
|
auto it = m_data.lockMap.find( ev.id );
|
2020-07-26 11:54:40 +00:00
|
|
|
assert( it != m_data.lockMap.end() );
|
|
|
|
auto& lock = *it->second;
|
2018-02-13 13:57:47 +00:00
|
|
|
|
2020-07-26 11:54:40 +00:00
|
|
|
auto lev = lock.type == LockType::Lockable ? m_slab.Alloc<LockEvent>() : m_slab.Alloc<LockEventShared>();
|
2022-06-21 23:55:10 +00:00
|
|
|
const auto time = TscTime( RefTime( m_refTimeSerial, ev.time ) );
|
2019-10-24 21:25:04 +00:00
|
|
|
lev->SetTime( time );
|
2019-08-15 18:39:16 +00:00
|
|
|
lev->SetSrcLoc( 0 );
|
2018-02-13 13:57:47 +00:00
|
|
|
lev->type = LockEvent::Type::Wait;
|
|
|
|
|
2020-07-26 11:54:40 +00:00
|
|
|
InsertLockEvent( lock, lev, ev.thread, time );
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void Worker::ProcessLockObtain( const QueueLockObtain& ev )
|
|
|
|
{
|
2019-03-16 01:09:50 +00:00
|
|
|
auto it = m_data.lockMap.find( ev.id );
|
|
|
|
assert( it != m_data.lockMap.end() );
|
|
|
|
auto& lock = *it->second;
|
2018-02-13 13:57:47 +00:00
|
|
|
|
|
|
|
auto lev = lock.type == LockType::Lockable ? m_slab.Alloc<LockEvent>() : m_slab.Alloc<LockEventShared>();
|
2022-06-21 23:55:10 +00:00
|
|
|
const auto time = TscTime( RefTime( m_refTimeSerial, ev.time ) );
|
2019-10-24 21:25:04 +00:00
|
|
|
lev->SetTime( time );
|
2019-08-15 18:39:16 +00:00
|
|
|
lev->SetSrcLoc( 0 );
|
2018-02-13 13:57:47 +00:00
|
|
|
lev->type = LockEvent::Type::Obtain;
|
|
|
|
|
2019-10-24 21:25:04 +00:00
|
|
|
InsertLockEvent( lock, lev, ev.thread, time );
|
2022-07-18 00:06:19 +00:00
|
|
|
lock.lockingThread = ev.thread;
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void Worker::ProcessLockRelease( const QueueLockRelease& ev )
|
|
|
|
{
|
2019-03-16 01:09:50 +00:00
|
|
|
auto it = m_data.lockMap.find( ev.id );
|
|
|
|
assert( it != m_data.lockMap.end() );
|
|
|
|
auto& lock = *it->second;
|
2018-02-13 13:57:47 +00:00
|
|
|
|
|
|
|
auto lev = lock.type == LockType::Lockable ? m_slab.Alloc<LockEvent>() : m_slab.Alloc<LockEventShared>();
|
2022-06-21 23:55:10 +00:00
|
|
|
const auto time = TscTime( RefTime( m_refTimeSerial, ev.time ) );
|
2019-10-24 21:25:04 +00:00
|
|
|
lev->SetTime( time );
|
2019-08-15 18:39:16 +00:00
|
|
|
lev->SetSrcLoc( 0 );
|
2018-02-13 13:57:47 +00:00
|
|
|
lev->type = LockEvent::Type::Release;
|
|
|
|
|
2022-07-18 00:06:19 +00:00
|
|
|
InsertLockEvent( lock, lev, lock.lockingThread, time );
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void Worker::ProcessLockSharedWait( const QueueLockWait& ev )
|
|
|
|
{
|
|
|
|
auto it = m_data.lockMap.find( ev.id );
|
2020-07-26 11:54:40 +00:00
|
|
|
assert( it != m_data.lockMap.end() );
|
|
|
|
auto& lock = *it->second;
|
2018-02-13 13:57:47 +00:00
|
|
|
|
2020-07-26 11:54:40 +00:00
|
|
|
assert( lock.type == LockType::SharedLockable );
|
2018-02-13 13:57:47 +00:00
|
|
|
auto lev = m_slab.Alloc<LockEventShared>();
|
2022-06-21 23:55:10 +00:00
|
|
|
const auto time = TscTime( RefTime( m_refTimeSerial, ev.time ) );
|
2019-10-24 21:25:04 +00:00
|
|
|
lev->SetTime( time );
|
2019-08-15 18:39:16 +00:00
|
|
|
lev->SetSrcLoc( 0 );
|
2018-02-13 13:57:47 +00:00
|
|
|
lev->type = LockEvent::Type::WaitShared;
|
|
|
|
|
2020-07-26 11:54:40 +00:00
|
|
|
InsertLockEvent( lock, lev, ev.thread, time );
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void Worker::ProcessLockSharedObtain( const QueueLockObtain& ev )
|
|
|
|
{
|
2019-03-16 01:09:50 +00:00
|
|
|
auto it = m_data.lockMap.find( ev.id );
|
|
|
|
assert( it != m_data.lockMap.end() );
|
|
|
|
auto& lock = *it->second;
|
2018-02-13 13:57:47 +00:00
|
|
|
|
|
|
|
assert( lock.type == LockType::SharedLockable );
|
|
|
|
auto lev = m_slab.Alloc<LockEventShared>();
|
2022-06-21 23:55:10 +00:00
|
|
|
const auto time = TscTime( RefTime( m_refTimeSerial, ev.time ) );
|
2019-10-24 21:25:04 +00:00
|
|
|
lev->SetTime( time );
|
2019-08-15 18:39:16 +00:00
|
|
|
lev->SetSrcLoc( 0 );
|
2018-02-13 13:57:47 +00:00
|
|
|
lev->type = LockEvent::Type::ObtainShared;
|
|
|
|
|
2019-10-24 21:25:04 +00:00
|
|
|
InsertLockEvent( lock, lev, ev.thread, time );
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
2022-07-18 00:06:19 +00:00
|
|
|
void Worker::ProcessLockSharedRelease( const QueueLockReleaseShared& ev )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2019-03-16 01:09:50 +00:00
|
|
|
auto it = m_data.lockMap.find( ev.id );
|
|
|
|
assert( it != m_data.lockMap.end() );
|
|
|
|
auto& lock = *it->second;
|
2018-02-13 13:57:47 +00:00
|
|
|
|
|
|
|
assert( lock.type == LockType::SharedLockable );
|
|
|
|
auto lev = m_slab.Alloc<LockEventShared>();
|
2022-06-21 23:55:10 +00:00
|
|
|
const auto time = TscTime( RefTime( m_refTimeSerial, ev.time ) );
|
2019-10-24 21:25:04 +00:00
|
|
|
lev->SetTime( time );
|
2019-08-15 18:39:16 +00:00
|
|
|
lev->SetSrcLoc( 0 );
|
2018-02-13 13:57:47 +00:00
|
|
|
lev->type = LockEvent::Type::ReleaseShared;
|
|
|
|
|
2019-10-24 21:25:04 +00:00
|
|
|
InsertLockEvent( lock, lev, ev.thread, time );
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void Worker::ProcessLockMark( const QueueLockMark& ev )
|
|
|
|
{
|
|
|
|
CheckSourceLocation( ev.srcloc );
|
|
|
|
auto lit = m_data.lockMap.find( ev.id );
|
|
|
|
assert( lit != m_data.lockMap.end() );
|
2019-03-16 01:09:50 +00:00
|
|
|
auto& lockmap = *lit->second;
|
2019-08-12 11:51:01 +00:00
|
|
|
auto tid = lockmap.threadMap.find( ev.thread );
|
2018-02-13 13:57:47 +00:00
|
|
|
assert( tid != lockmap.threadMap.end() );
|
|
|
|
const auto thread = tid->second;
|
|
|
|
auto it = lockmap.timeline.end();
|
|
|
|
for(;;)
|
|
|
|
{
|
|
|
|
--it;
|
2019-03-16 13:18:43 +00:00
|
|
|
if( it->ptr->thread == thread )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2019-03-16 13:18:43 +00:00
|
|
|
switch( it->ptr->type )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
|
|
|
case LockEvent::Type::Obtain:
|
|
|
|
case LockEvent::Type::ObtainShared:
|
|
|
|
case LockEvent::Type::Wait:
|
|
|
|
case LockEvent::Type::WaitShared:
|
2019-08-15 18:39:16 +00:00
|
|
|
it->ptr->SetSrcLoc( ShrinkSourceLocation( ev.srcloc ) );
|
2018-02-13 13:57:47 +00:00
|
|
|
return;
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-03-08 12:47:38 +00:00
|
|
|
void Worker::ProcessLockName( const QueueLockName& ev )
|
|
|
|
{
|
|
|
|
auto lit = m_data.lockMap.find( ev.id );
|
|
|
|
assert( lit != m_data.lockMap.end() );
|
2020-07-25 23:22:09 +00:00
|
|
|
lit->second->customName = StringIdx( GetSingleStringIdx() );
|
2020-03-08 12:47:38 +00:00
|
|
|
}
|
|
|
|
|
2018-02-13 13:57:47 +00:00
|
|
|
void Worker::ProcessPlotData( const QueuePlotData& ev )
|
|
|
|
{
|
2020-11-23 21:48:55 +00:00
|
|
|
switch( ev.type )
|
|
|
|
{
|
|
|
|
case PlotDataType::Double:
|
|
|
|
if( !isfinite( ev.data.d ) ) return;
|
|
|
|
break;
|
|
|
|
case PlotDataType::Float:
|
|
|
|
if( !isfinite( ev.data.f ) ) return;
|
|
|
|
break;
|
2020-11-23 21:58:40 +00:00
|
|
|
default:
|
|
|
|
break;
|
2020-11-23 21:48:55 +00:00
|
|
|
}
|
|
|
|
|
2018-08-04 14:33:03 +00:00
|
|
|
PlotData* plot = m_data.plots.Retrieve( ev.name, [this] ( uint64_t name ) {
|
|
|
|
auto plot = m_slab.AllocInit<PlotData>();
|
|
|
|
plot->name = name;
|
|
|
|
plot->type = PlotType::User;
|
2019-11-05 17:02:08 +00:00
|
|
|
plot->format = PlotValueFormatting::Number;
|
2018-08-04 14:33:03 +00:00
|
|
|
return plot;
|
|
|
|
}, [this]( uint64_t name ) {
|
2019-04-01 16:52:32 +00:00
|
|
|
Query( ServerQueryPlotName, name );
|
2018-08-04 14:33:03 +00:00
|
|
|
} );
|
2018-02-13 13:57:47 +00:00
|
|
|
|
2022-06-21 23:55:10 +00:00
|
|
|
const auto time = TscTime( RefTime( m_refTimeThread, ev.time ) );
|
2019-10-24 21:23:52 +00:00
|
|
|
if( m_data.lastTime < time ) m_data.lastTime = time;
|
2018-02-13 13:57:47 +00:00
|
|
|
switch( ev.type )
|
|
|
|
{
|
|
|
|
case PlotDataType::Double:
|
|
|
|
InsertPlot( plot, time, ev.data.d );
|
|
|
|
break;
|
|
|
|
case PlotDataType::Float:
|
|
|
|
InsertPlot( plot, time, (double)ev.data.f );
|
|
|
|
break;
|
|
|
|
case PlotDataType::Int:
|
|
|
|
InsertPlot( plot, time, (double)ev.data.i );
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
assert( false );
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-05 17:02:08 +00:00
|
|
|
void Worker::ProcessPlotConfig( const QueuePlotConfig& ev )
|
|
|
|
{
|
2019-11-09 23:00:15 +00:00
|
|
|
PlotData* plot = m_data.plots.Retrieve( ev.name, [this] ( uint64_t name ) {
|
2019-11-05 17:02:08 +00:00
|
|
|
auto plot = m_slab.AllocInit<PlotData>();
|
|
|
|
plot->name = name;
|
|
|
|
plot->type = PlotType::User;
|
|
|
|
return plot;
|
|
|
|
}, [this]( uint64_t name ) {
|
|
|
|
Query( ServerQueryPlotName, name );
|
|
|
|
} );
|
|
|
|
|
|
|
|
plot->format = (PlotValueFormatting)ev.type;
|
|
|
|
}
|
|
|
|
|
2018-02-13 13:57:47 +00:00
|
|
|
void Worker::ProcessMessage( const QueueMessage& ev )
|
|
|
|
{
|
2021-11-06 17:55:29 +00:00
|
|
|
auto td = GetCurrentThreadData();
|
2018-02-13 13:57:47 +00:00
|
|
|
auto msg = m_slab.Alloc<MessageData>();
|
2022-06-21 23:30:01 +00:00
|
|
|
const auto time = TscTime( ev.time );
|
2019-10-24 21:23:52 +00:00
|
|
|
msg->time = time;
|
2020-07-25 23:15:11 +00:00
|
|
|
msg->ref = StringRef( StringRef::Type::Idx, GetSingleStringIdx() );
|
2021-11-06 17:55:29 +00:00
|
|
|
msg->thread = CompressThread( td->id );
|
2019-05-10 18:21:35 +00:00
|
|
|
msg->color = 0xFFFFFFFF;
|
2019-11-14 23:42:44 +00:00
|
|
|
msg->callstack.SetVal( 0 );
|
2019-10-24 21:23:52 +00:00
|
|
|
if( m_data.lastTime < time ) m_data.lastTime = time;
|
2019-11-10 16:23:04 +00:00
|
|
|
InsertMessageData( msg );
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
2020-07-25 23:15:11 +00:00
|
|
|
void Worker::ProcessMessageLiteral( const QueueMessageLiteral& ev )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2021-11-06 17:55:29 +00:00
|
|
|
auto td = GetCurrentThreadData();
|
2018-02-13 13:57:47 +00:00
|
|
|
CheckString( ev.text );
|
|
|
|
auto msg = m_slab.Alloc<MessageData>();
|
2022-06-21 23:30:01 +00:00
|
|
|
const auto time = TscTime( ev.time );
|
2019-10-24 21:23:52 +00:00
|
|
|
msg->time = time;
|
2018-02-13 13:57:47 +00:00
|
|
|
msg->ref = StringRef( StringRef::Type::Ptr, ev.text );
|
2021-11-06 17:55:29 +00:00
|
|
|
msg->thread = CompressThread( td->id );
|
2019-05-10 18:21:35 +00:00
|
|
|
msg->color = 0xFFFFFFFF;
|
2019-11-14 23:42:44 +00:00
|
|
|
msg->callstack.SetVal( 0 );
|
2019-10-24 21:23:52 +00:00
|
|
|
if( m_data.lastTime < time ) m_data.lastTime = time;
|
2019-11-10 16:23:04 +00:00
|
|
|
InsertMessageData( msg );
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
2019-05-10 18:17:44 +00:00
|
|
|
void Worker::ProcessMessageColor( const QueueMessageColor& ev )
|
|
|
|
{
|
2021-11-06 17:55:29 +00:00
|
|
|
auto td = GetCurrentThreadData();
|
2019-05-10 18:17:44 +00:00
|
|
|
auto msg = m_slab.Alloc<MessageData>();
|
2022-06-21 23:30:01 +00:00
|
|
|
const auto time = TscTime( ev.time );
|
2019-10-24 21:23:52 +00:00
|
|
|
msg->time = time;
|
2020-07-25 23:15:11 +00:00
|
|
|
msg->ref = StringRef( StringRef::Type::Idx, GetSingleStringIdx() );
|
2021-11-06 17:55:29 +00:00
|
|
|
msg->thread = CompressThread( td->id );
|
2019-05-10 18:21:35 +00:00
|
|
|
msg->color = 0xFF000000 | ( ev.r << 16 ) | ( ev.g << 8 ) | ev.b;
|
2019-11-14 23:42:44 +00:00
|
|
|
msg->callstack.SetVal( 0 );
|
2019-10-24 21:23:52 +00:00
|
|
|
if( m_data.lastTime < time ) m_data.lastTime = time;
|
2019-11-10 16:23:04 +00:00
|
|
|
InsertMessageData( msg );
|
2019-05-10 18:17:44 +00:00
|
|
|
}
|
|
|
|
|
2020-07-25 23:15:11 +00:00
|
|
|
void Worker::ProcessMessageLiteralColor( const QueueMessageColorLiteral& ev )
|
2019-05-10 18:17:44 +00:00
|
|
|
{
|
2021-11-06 17:55:29 +00:00
|
|
|
auto td = GetCurrentThreadData();
|
2019-05-10 18:17:44 +00:00
|
|
|
CheckString( ev.text );
|
|
|
|
auto msg = m_slab.Alloc<MessageData>();
|
2022-06-21 23:30:01 +00:00
|
|
|
const auto time = TscTime( ev.time );
|
2019-10-24 21:23:52 +00:00
|
|
|
msg->time = time;
|
2019-05-10 18:17:44 +00:00
|
|
|
msg->ref = StringRef( StringRef::Type::Ptr, ev.text );
|
2021-11-06 17:55:29 +00:00
|
|
|
msg->thread = CompressThread( td->id );
|
2019-05-10 18:21:35 +00:00
|
|
|
msg->color = 0xFF000000 | ( ev.r << 16 ) | ( ev.g << 8 ) | ev.b;
|
2019-11-14 23:42:44 +00:00
|
|
|
msg->callstack.SetVal( 0 );
|
2019-10-24 21:23:52 +00:00
|
|
|
if( m_data.lastTime < time ) m_data.lastTime = time;
|
2019-11-10 16:23:04 +00:00
|
|
|
InsertMessageData( msg );
|
2019-05-10 18:17:44 +00:00
|
|
|
}
|
|
|
|
|
2019-11-14 23:42:44 +00:00
|
|
|
void Worker::ProcessMessageCallstack( const QueueMessage& ev )
|
|
|
|
{
|
2021-11-06 17:55:29 +00:00
|
|
|
auto td = GetCurrentThreadData();
|
2019-11-14 23:42:44 +00:00
|
|
|
ProcessMessage( ev );
|
2021-11-06 17:55:29 +00:00
|
|
|
auto it = m_nextCallstack.find( td->id );
|
2020-09-29 14:59:28 +00:00
|
|
|
assert( it != m_nextCallstack.end() );
|
2021-11-06 17:55:29 +00:00
|
|
|
td->messages.back()->callstack.SetVal( it->second );
|
2020-09-29 14:59:28 +00:00
|
|
|
it->second = 0;
|
2019-11-14 23:42:44 +00:00
|
|
|
}
|
|
|
|
|
2020-07-25 23:15:11 +00:00
|
|
|
void Worker::ProcessMessageLiteralCallstack( const QueueMessageLiteral& ev )
|
2019-11-14 23:42:44 +00:00
|
|
|
{
|
2021-11-06 17:55:29 +00:00
|
|
|
auto td = GetCurrentThreadData();
|
2019-11-14 23:42:44 +00:00
|
|
|
ProcessMessageLiteral( ev );
|
2021-11-06 17:55:29 +00:00
|
|
|
auto it = m_nextCallstack.find( td->id );
|
2020-09-29 14:59:28 +00:00
|
|
|
assert( it != m_nextCallstack.end() );
|
2021-11-06 17:55:29 +00:00
|
|
|
td->messages.back()->callstack.SetVal( it->second );
|
2020-09-29 14:59:28 +00:00
|
|
|
it->second = 0;
|
2019-11-14 23:42:44 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void Worker::ProcessMessageColorCallstack( const QueueMessageColor& ev )
|
|
|
|
{
|
2021-11-06 17:55:29 +00:00
|
|
|
auto td = GetCurrentThreadData();
|
2019-11-14 23:42:44 +00:00
|
|
|
ProcessMessageColor( ev );
|
2021-11-06 17:55:29 +00:00
|
|
|
auto it = m_nextCallstack.find( td->id );
|
2020-09-29 14:59:28 +00:00
|
|
|
assert( it != m_nextCallstack.end() );
|
2021-11-06 17:55:29 +00:00
|
|
|
td->messages.back()->callstack.SetVal( it->second );
|
2020-09-29 14:59:28 +00:00
|
|
|
it->second = 0;
|
2019-11-14 23:42:44 +00:00
|
|
|
}
|
|
|
|
|
2020-07-25 23:15:11 +00:00
|
|
|
void Worker::ProcessMessageLiteralColorCallstack( const QueueMessageColorLiteral& ev )
|
2019-11-14 23:42:44 +00:00
|
|
|
{
|
2021-11-06 17:55:29 +00:00
|
|
|
auto td = GetCurrentThreadData();
|
2019-11-14 23:42:44 +00:00
|
|
|
ProcessMessageLiteralColor( ev );
|
2021-11-06 17:55:29 +00:00
|
|
|
auto it = m_nextCallstack.find( td->id );
|
2020-09-29 14:59:28 +00:00
|
|
|
assert( it != m_nextCallstack.end() );
|
2021-11-06 17:55:29 +00:00
|
|
|
td->messages.back()->callstack.SetVal( it->second );
|
2020-09-29 14:59:28 +00:00
|
|
|
it->second = 0;
|
2019-11-14 23:42:44 +00:00
|
|
|
}
|
|
|
|
|
2019-07-12 16:30:45 +00:00
|
|
|
void Worker::ProcessMessageAppInfo( const QueueMessage& ev )
|
|
|
|
{
|
2020-07-25 23:15:11 +00:00
|
|
|
m_data.appInfo.push_back( StringRef( StringRef::Type::Idx, GetSingleStringIdx() ) );
|
2022-06-21 23:30:01 +00:00
|
|
|
const auto time = TscTime( ev.time );
|
2019-10-24 21:23:52 +00:00
|
|
|
if( m_data.lastTime < time ) m_data.lastTime = time;
|
2019-07-12 16:30:45 +00:00
|
|
|
}
|
|
|
|
|
2018-02-13 13:57:47 +00:00
|
|
|
void Worker::ProcessGpuNewContext( const QueueGpuNewContext& ev )
|
|
|
|
{
|
2018-06-22 13:14:44 +00:00
|
|
|
assert( !m_gpuCtxMap[ev.context] );
|
2020-05-27 16:16:53 +00:00
|
|
|
assert( ev.type != GpuContextType::Invalid );
|
2018-02-13 13:57:47 +00:00
|
|
|
|
2018-06-17 16:49:56 +00:00
|
|
|
int64_t gpuTime;
|
|
|
|
if( ev.period == 1.f )
|
|
|
|
{
|
|
|
|
gpuTime = ev.gpuTime;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
gpuTime = int64_t( double( ev.period ) * ev.gpuTime ); // precision loss
|
|
|
|
}
|
|
|
|
|
2022-06-21 23:30:01 +00:00
|
|
|
const auto cpuTime = TscTime( ev.cpuTime );
|
2018-02-13 13:57:47 +00:00
|
|
|
auto gpu = m_slab.AllocInit<GpuCtxData>();
|
2020-08-15 00:14:29 +00:00
|
|
|
memset( (char*)gpu->query, 0, sizeof( gpu->query ) );
|
2020-07-07 19:09:37 +00:00
|
|
|
gpu->timeDiff = cpuTime - gpuTime;
|
2018-02-13 13:57:47 +00:00
|
|
|
gpu->thread = ev.thread;
|
2018-06-17 16:27:16 +00:00
|
|
|
gpu->period = ev.period;
|
2018-02-13 13:57:47 +00:00
|
|
|
gpu->count = 0;
|
2020-05-27 16:16:53 +00:00
|
|
|
gpu->type = ev.type;
|
2020-06-15 23:42:52 +00:00
|
|
|
gpu->hasPeriod = ev.period != 1.f;
|
2020-07-07 19:09:37 +00:00
|
|
|
gpu->hasCalibration = ev.flags & GpuContextCalibration;
|
|
|
|
gpu->calibratedGpuTime = gpuTime;
|
|
|
|
gpu->calibratedCpuTime = cpuTime;
|
|
|
|
gpu->calibrationMod = 1.;
|
2021-06-09 18:38:06 +00:00
|
|
|
gpu->lastGpuTime = 0;
|
|
|
|
gpu->overflow = 0;
|
|
|
|
gpu->overflowMul = 0;
|
2018-02-13 13:57:47 +00:00
|
|
|
m_data.gpuData.push_back( gpu );
|
2018-06-22 13:14:44 +00:00
|
|
|
m_gpuCtxMap[ev.context] = gpu;
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
2019-10-23 22:04:31 +00:00
|
|
|
void Worker::ProcessGpuZoneBeginImpl( GpuEvent* zone, const QueueGpuZoneBegin& ev, bool serial )
|
2021-01-15 19:33:45 +00:00
|
|
|
{
|
|
|
|
CheckSourceLocation( ev.srcloc );
|
|
|
|
zone->SetSrcLoc( ShrinkSourceLocation( ev.srcloc ) );
|
|
|
|
ProcessGpuZoneBeginImplCommon( zone, ev, serial );
|
|
|
|
}
|
|
|
|
|
|
|
|
void Worker::ProcessGpuZoneBeginAllocSrcLocImpl( GpuEvent* zone, const QueueGpuZoneBeginLean& ev, bool serial )
|
|
|
|
{
|
|
|
|
assert( m_pendingSourceLocationPayload != 0 );
|
|
|
|
zone->SetSrcLoc( m_pendingSourceLocationPayload );
|
2021-01-22 01:17:12 +00:00
|
|
|
ProcessGpuZoneBeginImplCommon( zone, ev, serial );
|
2021-01-15 19:33:45 +00:00
|
|
|
m_pendingSourceLocationPayload = 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
void Worker::ProcessGpuZoneBeginImplCommon( GpuEvent* zone, const QueueGpuZoneBeginLean& ev, bool serial )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2019-10-13 12:13:04 +00:00
|
|
|
m_data.gpuCnt++;
|
|
|
|
|
2021-01-15 19:25:07 +00:00
|
|
|
auto ctx = m_gpuCtxMap[ev.context].get();
|
2018-06-22 13:14:44 +00:00
|
|
|
assert( ctx );
|
2018-02-13 13:57:47 +00:00
|
|
|
|
2019-10-23 22:04:31 +00:00
|
|
|
int64_t cpuTime;
|
|
|
|
if( serial )
|
|
|
|
{
|
2022-06-21 23:55:10 +00:00
|
|
|
cpuTime = RefTime( m_refTimeSerial, ev.cpuTime );
|
2019-10-23 22:04:31 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2022-06-21 23:55:10 +00:00
|
|
|
cpuTime = RefTime( m_refTimeThread, ev.cpuTime );
|
2019-10-23 22:04:31 +00:00
|
|
|
}
|
2022-06-21 23:30:01 +00:00
|
|
|
const auto time = TscTime( cpuTime );
|
2019-10-24 21:23:52 +00:00
|
|
|
zone->SetCpuStart( time );
|
2019-10-13 12:36:59 +00:00
|
|
|
zone->SetCpuEnd( -1 );
|
2019-10-30 22:50:37 +00:00
|
|
|
zone->SetGpuStart( -1 );
|
|
|
|
zone->SetGpuEnd( -1 );
|
2019-10-01 19:48:52 +00:00
|
|
|
zone->callstack.SetVal( 0 );
|
2019-10-30 22:50:37 +00:00
|
|
|
zone->SetChild( -1 );
|
2018-06-22 00:12:42 +00:00
|
|
|
|
2019-09-23 15:27:49 +00:00
|
|
|
uint64_t ztid;
|
2018-06-22 00:12:42 +00:00
|
|
|
if( ctx->thread == 0 )
|
|
|
|
{
|
2020-06-09 16:17:43 +00:00
|
|
|
// Vulkan, OpenCL and Direct3D 12 contexts are not bound to any single thread.
|
2019-10-13 12:36:59 +00:00
|
|
|
zone->SetThread( CompressThread( ev.thread ) );
|
2019-09-23 15:27:49 +00:00
|
|
|
ztid = ev.thread;
|
2018-06-22 00:12:42 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2021-05-01 22:55:49 +00:00
|
|
|
// OpenGL and Direct3D11 doesn't need per-zone thread id. It still can be sent,
|
2018-06-22 00:12:42 +00:00
|
|
|
// because it may be needed for callstack collection purposes.
|
2019-10-13 12:36:59 +00:00
|
|
|
zone->SetThread( 0 );
|
2019-09-23 15:27:49 +00:00
|
|
|
ztid = 0;
|
2018-06-22 00:12:42 +00:00
|
|
|
}
|
2018-02-13 13:57:47 +00:00
|
|
|
|
2019-10-24 21:23:52 +00:00
|
|
|
if( m_data.lastTime < time ) m_data.lastTime = time;
|
2018-02-13 13:57:47 +00:00
|
|
|
|
2019-09-23 15:27:49 +00:00
|
|
|
auto td = ctx->threadData.find( ztid );
|
|
|
|
if( td == ctx->threadData.end() )
|
|
|
|
{
|
|
|
|
td = ctx->threadData.emplace( ztid, GpuCtxThreadData {} ).first;
|
|
|
|
}
|
|
|
|
auto timeline = &td->second.timeline;
|
|
|
|
auto& stack = td->second.stack;
|
|
|
|
if( !stack.empty() )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2019-09-23 15:27:49 +00:00
|
|
|
auto back = stack.back();
|
2019-10-30 22:50:37 +00:00
|
|
|
if( back->Child() < 0 )
|
2018-07-22 17:47:01 +00:00
|
|
|
{
|
2019-10-30 22:50:37 +00:00
|
|
|
back->SetChild( int32_t( m_data.gpuChildren.size() ) );
|
2019-11-02 14:52:34 +00:00
|
|
|
m_data.gpuChildren.push_back( Vector<short_ptr<GpuEvent>>() );
|
2018-07-22 17:47:01 +00:00
|
|
|
}
|
2019-10-30 22:50:37 +00:00
|
|
|
timeline = &m_data.gpuChildren[back->Child()];
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
timeline->push_back( zone );
|
2019-09-23 15:27:49 +00:00
|
|
|
stack.push_back( zone );
|
2018-06-22 14:37:54 +00:00
|
|
|
|
|
|
|
assert( !ctx->query[ev.queryId] );
|
|
|
|
ctx->query[ev.queryId] = zone;
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
2019-10-23 22:04:31 +00:00
|
|
|
void Worker::ProcessGpuZoneBegin( const QueueGpuZoneBegin& ev, bool serial )
|
2018-06-21 23:56:32 +00:00
|
|
|
{
|
2019-02-15 00:31:58 +00:00
|
|
|
auto zone = m_slab.Alloc<GpuEvent>();
|
2019-10-23 22:04:31 +00:00
|
|
|
ProcessGpuZoneBeginImpl( zone, ev, serial );
|
2018-06-21 23:56:32 +00:00
|
|
|
}
|
|
|
|
|
2019-10-23 22:04:31 +00:00
|
|
|
void Worker::ProcessGpuZoneBeginCallstack( const QueueGpuZoneBegin& ev, bool serial )
|
2018-06-21 23:56:32 +00:00
|
|
|
{
|
2019-02-15 00:31:58 +00:00
|
|
|
auto zone = m_slab.Alloc<GpuEvent>();
|
2019-10-23 22:04:31 +00:00
|
|
|
ProcessGpuZoneBeginImpl( zone, ev, serial );
|
2021-01-15 20:40:51 +00:00
|
|
|
if( serial )
|
|
|
|
{
|
|
|
|
assert( m_serialNextCallstack != 0 );
|
|
|
|
zone->callstack.SetVal( m_serialNextCallstack );
|
|
|
|
m_serialNextCallstack = 0;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2021-11-06 17:55:29 +00:00
|
|
|
auto td = GetCurrentThreadData();
|
|
|
|
auto it = m_nextCallstack.find( td->id );
|
2021-01-15 20:40:51 +00:00
|
|
|
assert( it != m_nextCallstack.end() );
|
|
|
|
zone->callstack.SetVal( it->second );
|
|
|
|
it->second = 0;
|
|
|
|
}
|
2018-06-21 23:56:32 +00:00
|
|
|
}
|
|
|
|
|
2021-01-15 19:33:45 +00:00
|
|
|
void Worker::ProcessGpuZoneBeginAllocSrcLoc( const QueueGpuZoneBeginLean& ev, bool serial )
|
|
|
|
{
|
|
|
|
auto zone = m_slab.Alloc<GpuEvent>();
|
|
|
|
ProcessGpuZoneBeginAllocSrcLocImpl( zone, ev, serial );
|
|
|
|
}
|
|
|
|
|
|
|
|
void Worker::ProcessGpuZoneBeginAllocSrcLocCallstack( const QueueGpuZoneBeginLean& ev, bool serial )
|
|
|
|
{
|
|
|
|
auto zone = m_slab.Alloc<GpuEvent>();
|
|
|
|
ProcessGpuZoneBeginAllocSrcLocImpl( zone, ev, serial );
|
2021-01-15 20:40:51 +00:00
|
|
|
if( serial )
|
|
|
|
{
|
|
|
|
assert( m_serialNextCallstack != 0 );
|
|
|
|
zone->callstack.SetVal( m_serialNextCallstack );
|
|
|
|
m_serialNextCallstack = 0;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2021-11-06 17:55:29 +00:00
|
|
|
auto td = GetCurrentThreadData();
|
|
|
|
auto it = m_nextCallstack.find( td->id );
|
2021-01-15 20:40:51 +00:00
|
|
|
assert( it != m_nextCallstack.end() );
|
|
|
|
zone->callstack.SetVal( it->second );
|
|
|
|
it->second = 0;
|
|
|
|
}
|
2021-01-15 19:33:45 +00:00
|
|
|
}
|
|
|
|
|
2019-10-23 22:04:31 +00:00
|
|
|
void Worker::ProcessGpuZoneEnd( const QueueGpuZoneEnd& ev, bool serial )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2018-06-22 13:14:44 +00:00
|
|
|
auto ctx = m_gpuCtxMap[ev.context];
|
|
|
|
assert( ctx );
|
2018-02-13 13:57:47 +00:00
|
|
|
|
2019-09-23 15:27:49 +00:00
|
|
|
auto td = ctx->threadData.find( ev.thread );
|
|
|
|
assert( td != ctx->threadData.end() );
|
|
|
|
|
|
|
|
assert( !td->second.stack.empty() );
|
|
|
|
auto zone = td->second.stack.back_and_pop();
|
2018-06-22 14:37:54 +00:00
|
|
|
|
|
|
|
assert( !ctx->query[ev.queryId] );
|
|
|
|
ctx->query[ev.queryId] = zone;
|
2018-02-13 13:57:47 +00:00
|
|
|
|
2019-10-23 22:04:31 +00:00
|
|
|
int64_t cpuTime;
|
|
|
|
if( serial )
|
|
|
|
{
|
2022-06-21 23:55:10 +00:00
|
|
|
cpuTime = RefTime( m_refTimeSerial, ev.cpuTime );
|
2019-10-23 22:04:31 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2022-06-21 23:55:10 +00:00
|
|
|
cpuTime = RefTime( m_refTimeThread, ev.cpuTime );
|
2019-10-23 22:04:31 +00:00
|
|
|
}
|
2022-06-21 23:30:01 +00:00
|
|
|
const auto time = TscTime( cpuTime );
|
2019-10-24 21:23:52 +00:00
|
|
|
zone->SetCpuEnd( time );
|
|
|
|
if( m_data.lastTime < time ) m_data.lastTime = time;
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void Worker::ProcessGpuTime( const QueueGpuTime& ev )
|
|
|
|
{
|
2018-06-22 13:14:44 +00:00
|
|
|
auto ctx = m_gpuCtxMap[ev.context];
|
|
|
|
assert( ctx );
|
2018-02-13 13:57:47 +00:00
|
|
|
|
2022-06-21 23:55:10 +00:00
|
|
|
int64_t tgpu = RefTime( m_refTimeGpu, ev.gpuTime );
|
2021-10-10 22:29:24 +00:00
|
|
|
if( tgpu < ctx->lastGpuTime - ( 1u << 31 ) )
|
2021-06-09 18:38:06 +00:00
|
|
|
{
|
|
|
|
if( ctx->overflow == 0 )
|
|
|
|
{
|
|
|
|
ctx->overflow = uint64_t( 1 ) << ( 64 - TracyLzcnt( ctx->lastGpuTime ) );
|
|
|
|
}
|
|
|
|
ctx->overflowMul++;
|
|
|
|
}
|
|
|
|
ctx->lastGpuTime = tgpu;
|
|
|
|
if( ctx->overflow != 0 )
|
|
|
|
{
|
|
|
|
tgpu += ctx->overflow * ctx->overflowMul;
|
|
|
|
}
|
2019-10-25 17:52:01 +00:00
|
|
|
|
2018-06-17 16:49:56 +00:00
|
|
|
int64_t gpuTime;
|
2020-06-15 23:42:52 +00:00
|
|
|
if( !ctx->hasPeriod )
|
2018-06-17 16:49:56 +00:00
|
|
|
{
|
2020-07-07 19:09:37 +00:00
|
|
|
if( !ctx->hasCalibration )
|
|
|
|
{
|
2021-06-09 18:38:06 +00:00
|
|
|
gpuTime = tgpu + ctx->timeDiff;
|
2020-07-07 19:09:37 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2021-06-09 18:38:06 +00:00
|
|
|
gpuTime = int64_t( ( tgpu - ctx->calibratedGpuTime ) * ctx->calibrationMod + ctx->calibratedCpuTime );
|
2020-07-07 19:09:37 +00:00
|
|
|
}
|
2018-06-17 16:49:56 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2020-07-07 19:09:37 +00:00
|
|
|
if( !ctx->hasCalibration )
|
|
|
|
{
|
2021-06-09 18:38:06 +00:00
|
|
|
gpuTime = int64_t( double( ctx->period ) * tgpu ) + ctx->timeDiff; // precision loss
|
2020-07-07 19:09:37 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2021-06-09 18:38:06 +00:00
|
|
|
gpuTime = int64_t( ( double( ctx->period ) * tgpu - ctx->calibratedGpuTime ) * ctx->calibrationMod + ctx->calibratedCpuTime );
|
2020-07-07 19:09:37 +00:00
|
|
|
}
|
2018-06-17 16:49:56 +00:00
|
|
|
}
|
|
|
|
|
2018-06-22 14:37:54 +00:00
|
|
|
auto zone = ctx->query[ev.queryId];
|
|
|
|
assert( zone );
|
|
|
|
ctx->query[ev.queryId] = nullptr;
|
|
|
|
|
2019-10-30 22:50:37 +00:00
|
|
|
if( zone->GpuStart() < 0 )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2020-07-07 19:09:37 +00:00
|
|
|
zone->SetGpuStart( gpuTime );
|
2018-02-13 13:57:47 +00:00
|
|
|
ctx->count++;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2020-07-07 19:09:37 +00:00
|
|
|
zone->SetGpuEnd( gpuTime );
|
2022-01-23 14:47:33 +00:00
|
|
|
#ifndef TRACY_NO_STATISTICS
|
|
|
|
const auto gpuStart = zone->GpuStart();
|
|
|
|
const auto timeSpan = gpuTime - gpuStart;
|
|
|
|
if( timeSpan > 0 )
|
|
|
|
{
|
|
|
|
GpuZoneThreadData ztd;
|
|
|
|
ztd.SetZone( zone );
|
|
|
|
ztd.SetThread( zone->Thread() );
|
|
|
|
auto slz = GetGpuSourceLocationZones( zone->SrcLoc() );
|
|
|
|
slz->zones.push_back( ztd );
|
|
|
|
if( slz->min > timeSpan ) slz->min = timeSpan;
|
|
|
|
if( slz->max < timeSpan ) slz->max = timeSpan;
|
|
|
|
slz->total += timeSpan;
|
|
|
|
slz->sumSq += double( timeSpan ) * timeSpan;
|
|
|
|
}
|
|
|
|
#else
|
|
|
|
CountZoneStatistics( zone );
|
|
|
|
#endif
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
2021-06-09 19:08:28 +00:00
|
|
|
if( m_data.lastTime < gpuTime ) m_data.lastTime = gpuTime;
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
2020-07-07 18:32:25 +00:00
|
|
|
void Worker::ProcessGpuCalibration( const QueueGpuCalibration& ev )
|
|
|
|
{
|
2020-07-07 19:09:37 +00:00
|
|
|
auto ctx = m_gpuCtxMap[ev.context];
|
|
|
|
assert( ctx );
|
|
|
|
assert( ctx->hasCalibration );
|
|
|
|
|
|
|
|
int64_t gpuTime;
|
|
|
|
if( !ctx->hasPeriod )
|
|
|
|
{
|
|
|
|
gpuTime = ev.gpuTime;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
gpuTime = int64_t( double( ctx->period ) * ev.gpuTime ); // precision loss
|
|
|
|
}
|
2020-07-07 18:32:25 +00:00
|
|
|
|
2020-07-07 19:09:37 +00:00
|
|
|
const auto cpuDelta = ev.cpuDelta;
|
|
|
|
const auto gpuDelta = gpuTime - ctx->calibratedGpuTime;
|
|
|
|
ctx->calibrationMod = double( cpuDelta ) / gpuDelta;
|
|
|
|
ctx->calibratedGpuTime = gpuTime;
|
2022-06-21 23:30:01 +00:00
|
|
|
ctx->calibratedCpuTime = TscTime( ev.cpuTime );
|
2020-07-07 18:32:25 +00:00
|
|
|
}
|
|
|
|
|
2021-01-31 17:56:03 +00:00
|
|
|
void Worker::ProcessGpuContextName( const QueueGpuContextName& ev )
|
|
|
|
{
|
|
|
|
auto ctx = m_gpuCtxMap[ev.context];
|
|
|
|
assert( ctx );
|
|
|
|
const auto idx = GetSingleStringIdx();
|
|
|
|
ctx->name = StringIdx( idx );
|
|
|
|
}
|
|
|
|
|
2020-09-29 14:59:28 +00:00
|
|
|
MemEvent* Worker::ProcessMemAllocImpl( uint64_t memname, MemData& memdata, const QueueMemAlloc& ev )
|
2018-04-01 00:03:34 +00:00
|
|
|
{
|
2021-03-09 21:10:29 +00:00
|
|
|
if( memdata.active.find( ev.ptr ) != memdata.active.end() )
|
|
|
|
{
|
|
|
|
MemAllocTwiceFailure( ev.thread );
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
2022-06-21 23:55:10 +00:00
|
|
|
const auto time = TscTime( RefTime( m_refTimeSerial, ev.time ) );
|
2019-10-24 21:23:52 +00:00
|
|
|
if( m_data.lastTime < time ) m_data.lastTime = time;
|
2018-08-27 23:48:19 +00:00
|
|
|
NoticeThread( ev.thread );
|
2018-04-01 00:03:34 +00:00
|
|
|
|
2020-09-23 13:53:17 +00:00
|
|
|
assert( memdata.data.empty() || memdata.data.back().TimeAlloc() <= time );
|
2018-04-01 00:03:34 +00:00
|
|
|
|
2020-09-23 13:53:17 +00:00
|
|
|
memdata.active.emplace( ev.ptr, memdata.data.size() );
|
2018-04-01 00:03:34 +00:00
|
|
|
|
2018-04-10 14:06:01 +00:00
|
|
|
const auto ptr = ev.ptr;
|
|
|
|
uint32_t lo;
|
|
|
|
uint16_t hi;
|
|
|
|
memcpy( &lo, ev.size, 4 );
|
|
|
|
memcpy( &hi, ev.size+4, 2 );
|
|
|
|
const uint64_t size = lo | ( uint64_t( hi ) << 32 );
|
|
|
|
|
2020-09-23 13:53:17 +00:00
|
|
|
auto& mem = memdata.data.push_next();
|
2019-10-30 21:01:13 +00:00
|
|
|
mem.SetPtr( ptr );
|
|
|
|
mem.SetSize( size );
|
2020-02-12 23:54:54 +00:00
|
|
|
mem.SetTimeThreadAlloc( time, CompressThread( ev.thread ) );
|
|
|
|
mem.SetTimeThreadFree( -1, 0 );
|
2019-10-30 21:01:13 +00:00
|
|
|
mem.SetCsAlloc( 0 );
|
2019-10-01 20:36:22 +00:00
|
|
|
mem.csFree.SetVal( 0 );
|
2018-04-01 00:03:34 +00:00
|
|
|
|
2020-09-23 13:53:17 +00:00
|
|
|
const auto low = memdata.low;
|
|
|
|
const auto high = memdata.high;
|
2018-04-10 14:06:01 +00:00
|
|
|
const auto ptrend = ptr + size;
|
|
|
|
|
2020-09-23 13:53:17 +00:00
|
|
|
memdata.low = std::min( low, ptr );
|
|
|
|
memdata.high = std::max( high, ptrend );
|
|
|
|
memdata.usage += size;
|
2018-04-28 13:49:12 +00:00
|
|
|
|
2020-09-23 13:53:17 +00:00
|
|
|
MemAllocChanged( memname, memdata, time );
|
2020-09-29 14:59:28 +00:00
|
|
|
return &mem;
|
2018-04-01 00:03:34 +00:00
|
|
|
}
|
|
|
|
|
2020-09-29 14:59:28 +00:00
|
|
|
MemEvent* Worker::ProcessMemFreeImpl( uint64_t memname, MemData& memdata, const QueueMemFree& ev )
|
2018-04-01 00:03:34 +00:00
|
|
|
{
|
2022-06-21 23:55:10 +00:00
|
|
|
const auto refTime = RefTime( m_refTimeSerial, ev.time );
|
2019-11-07 15:14:23 +00:00
|
|
|
|
2020-09-23 13:53:17 +00:00
|
|
|
auto it = memdata.active.find( ev.ptr );
|
|
|
|
if( it == memdata.active.end() )
|
2018-07-11 23:35:32 +00:00
|
|
|
{
|
2021-04-21 18:53:55 +00:00
|
|
|
if( ev.ptr == 0 ) return nullptr;
|
|
|
|
|
2019-06-13 12:15:17 +00:00
|
|
|
if( !m_ignoreMemFreeFaults )
|
2019-01-15 17:56:26 +00:00
|
|
|
{
|
2020-03-29 21:01:57 +00:00
|
|
|
CheckThreadString( ev.thread );
|
2019-01-15 17:56:26 +00:00
|
|
|
MemFreeFailure( ev.thread );
|
|
|
|
}
|
2020-09-29 14:59:28 +00:00
|
|
|
return nullptr;
|
2018-07-11 23:35:32 +00:00
|
|
|
}
|
|
|
|
|
2022-06-21 23:30:01 +00:00
|
|
|
const auto time = TscTime( refTime );
|
2019-10-24 21:23:52 +00:00
|
|
|
if( m_data.lastTime < time ) m_data.lastTime = time;
|
2018-08-27 23:48:19 +00:00
|
|
|
NoticeThread( ev.thread );
|
|
|
|
|
2020-09-23 13:53:17 +00:00
|
|
|
memdata.frees.push_back( it->second );
|
|
|
|
auto& mem = memdata.data[it->second];
|
2020-02-12 23:54:54 +00:00
|
|
|
mem.SetTimeThreadFree( time, CompressThread( ev.thread ) );
|
2020-09-23 13:53:17 +00:00
|
|
|
memdata.usage -= mem.Size();
|
|
|
|
memdata.active.erase( it );
|
2018-04-28 13:49:12 +00:00
|
|
|
|
2020-09-23 13:53:17 +00:00
|
|
|
MemAllocChanged( memname, memdata, time );
|
2020-09-29 14:59:28 +00:00
|
|
|
return &mem;
|
2018-04-28 13:49:12 +00:00
|
|
|
}
|
|
|
|
|
2020-09-29 14:59:28 +00:00
|
|
|
MemEvent* Worker::ProcessMemAlloc( const QueueMemAlloc& ev )
|
2020-09-23 13:53:17 +00:00
|
|
|
{
|
|
|
|
assert( m_memNamePayload == 0 );
|
2020-09-29 14:59:28 +00:00
|
|
|
return ProcessMemAllocImpl( 0, *m_data.memory, ev );
|
2020-09-23 13:53:17 +00:00
|
|
|
}
|
|
|
|
|
2020-09-29 14:59:28 +00:00
|
|
|
MemEvent* Worker::ProcessMemAllocNamed( const QueueMemAlloc& ev )
|
2020-09-23 23:23:10 +00:00
|
|
|
{
|
|
|
|
assert( m_memNamePayload != 0 );
|
|
|
|
auto memname = m_memNamePayload;
|
|
|
|
m_memNamePayload = 0;
|
|
|
|
auto it = m_data.memNameMap.find( memname );
|
|
|
|
if( it == m_data.memNameMap.end() )
|
|
|
|
{
|
|
|
|
CheckString( memname );
|
|
|
|
it = m_data.memNameMap.emplace( memname, m_slab.AllocInit<MemData>() ).first;
|
2020-09-25 14:36:03 +00:00
|
|
|
it->second->name = memname;
|
2020-09-23 23:23:10 +00:00
|
|
|
}
|
2020-09-29 14:59:28 +00:00
|
|
|
return ProcessMemAllocImpl( memname, *it->second, ev );
|
2020-09-23 23:23:10 +00:00
|
|
|
}
|
|
|
|
|
2020-09-29 14:59:28 +00:00
|
|
|
MemEvent* Worker::ProcessMemFree( const QueueMemFree& ev )
|
2020-09-23 13:53:17 +00:00
|
|
|
{
|
|
|
|
assert( m_memNamePayload == 0 );
|
|
|
|
return ProcessMemFreeImpl( 0, *m_data.memory, ev );
|
|
|
|
}
|
|
|
|
|
2020-09-29 14:59:28 +00:00
|
|
|
MemEvent* Worker::ProcessMemFreeNamed( const QueueMemFree& ev )
|
2020-09-23 23:23:10 +00:00
|
|
|
{
|
|
|
|
assert( m_memNamePayload != 0 );
|
|
|
|
auto memname = m_memNamePayload;
|
|
|
|
m_memNamePayload = 0;
|
|
|
|
auto it = m_data.memNameMap.find( memname );
|
|
|
|
if( it == m_data.memNameMap.end() )
|
|
|
|
{
|
|
|
|
CheckString( memname );
|
|
|
|
it = m_data.memNameMap.emplace( memname, m_slab.AllocInit<MemData>() ).first;
|
2020-09-25 14:36:03 +00:00
|
|
|
it->second->name = memname;
|
2020-09-23 23:23:10 +00:00
|
|
|
}
|
|
|
|
return ProcessMemFreeImpl( memname, *it->second, ev );
|
|
|
|
}
|
|
|
|
|
2018-06-19 16:52:45 +00:00
|
|
|
void Worker::ProcessMemAllocCallstack( const QueueMemAlloc& ev )
|
|
|
|
{
|
2020-09-29 14:59:28 +00:00
|
|
|
auto mem = ProcessMemAlloc( ev );
|
2021-01-15 19:49:39 +00:00
|
|
|
assert( m_serialNextCallstack != 0 );
|
2021-03-09 21:10:29 +00:00
|
|
|
if( mem ) mem->SetCsAlloc( m_serialNextCallstack );
|
2021-01-15 19:49:39 +00:00
|
|
|
m_serialNextCallstack = 0;
|
2018-06-19 16:52:45 +00:00
|
|
|
}
|
|
|
|
|
2020-09-23 23:23:10 +00:00
|
|
|
void Worker::ProcessMemAllocCallstackNamed( const QueueMemAlloc& ev )
|
|
|
|
{
|
|
|
|
assert( m_memNamePayload != 0 );
|
|
|
|
auto memname = m_memNamePayload;
|
|
|
|
m_memNamePayload = 0;
|
|
|
|
auto it = m_data.memNameMap.find( memname );
|
|
|
|
if( it == m_data.memNameMap.end() )
|
|
|
|
{
|
|
|
|
CheckString( memname );
|
|
|
|
it = m_data.memNameMap.emplace( memname, m_slab.AllocInit<MemData>() ).first;
|
2020-09-25 14:36:03 +00:00
|
|
|
it->second->name = memname;
|
2020-09-23 23:23:10 +00:00
|
|
|
}
|
2020-09-29 14:59:28 +00:00
|
|
|
auto mem = ProcessMemAllocImpl( memname, *it->second, ev );
|
2021-01-15 19:49:39 +00:00
|
|
|
assert( m_serialNextCallstack != 0 );
|
2021-03-09 21:10:29 +00:00
|
|
|
if( mem ) mem->SetCsAlloc( m_serialNextCallstack );
|
2021-01-15 19:49:39 +00:00
|
|
|
m_serialNextCallstack = 0;
|
2020-09-23 23:23:10 +00:00
|
|
|
}
|
|
|
|
|
2018-06-19 16:52:45 +00:00
|
|
|
void Worker::ProcessMemFreeCallstack( const QueueMemFree& ev )
|
|
|
|
{
|
2020-09-29 14:59:28 +00:00
|
|
|
auto mem = ProcessMemFree( ev );
|
2021-01-15 19:49:39 +00:00
|
|
|
assert( m_serialNextCallstack != 0 );
|
|
|
|
if( mem ) mem->csFree.SetVal( m_serialNextCallstack );
|
|
|
|
m_serialNextCallstack = 0;
|
2018-06-19 16:52:45 +00:00
|
|
|
}
|
|
|
|
|
2020-09-23 23:23:10 +00:00
|
|
|
void Worker::ProcessMemFreeCallstackNamed( const QueueMemFree& ev )
|
|
|
|
{
|
|
|
|
assert( m_memNamePayload != 0 );
|
|
|
|
auto memname = m_memNamePayload;
|
|
|
|
m_memNamePayload = 0;
|
|
|
|
auto it = m_data.memNameMap.find( memname );
|
|
|
|
if( it == m_data.memNameMap.end() )
|
|
|
|
{
|
|
|
|
CheckString( memname );
|
|
|
|
it = m_data.memNameMap.emplace( memname, m_slab.AllocInit<MemData>() ).first;
|
2020-09-25 14:36:03 +00:00
|
|
|
it->second->name = memname;
|
2020-09-23 23:23:10 +00:00
|
|
|
}
|
2020-09-29 14:59:28 +00:00
|
|
|
auto mem = ProcessMemFreeImpl( memname, *it->second, ev );
|
2021-01-15 19:49:39 +00:00
|
|
|
assert( m_serialNextCallstack != 0 );
|
|
|
|
if( mem ) mem->csFree.SetVal( m_serialNextCallstack );
|
|
|
|
m_serialNextCallstack = 0;
|
2020-09-23 23:23:10 +00:00
|
|
|
}
|
|
|
|
|
2021-01-15 19:49:39 +00:00
|
|
|
void Worker::ProcessCallstackSerial()
|
2018-06-19 16:52:45 +00:00
|
|
|
{
|
2020-09-29 15:04:41 +00:00
|
|
|
assert( m_pendingCallstackId != 0 );
|
2021-01-15 19:49:39 +00:00
|
|
|
assert( m_serialNextCallstack == 0 );
|
|
|
|
m_serialNextCallstack = m_pendingCallstackId;
|
2020-09-29 15:04:41 +00:00
|
|
|
m_pendingCallstackId = 0;
|
2018-06-19 16:52:45 +00:00
|
|
|
}
|
|
|
|
|
2020-05-10 17:43:12 +00:00
|
|
|
void Worker::ProcessCallstack()
|
2018-06-21 23:15:49 +00:00
|
|
|
{
|
2020-09-29 15:04:41 +00:00
|
|
|
assert( m_pendingCallstackId != 0 );
|
2021-11-06 17:55:29 +00:00
|
|
|
auto td = GetCurrentThreadData();
|
|
|
|
auto it = m_nextCallstack.find( td->id );
|
|
|
|
if( it == m_nextCallstack.end() ) it = m_nextCallstack.emplace( td->id, 0 ).first;
|
2020-09-29 14:59:28 +00:00
|
|
|
assert( it->second == 0 );
|
|
|
|
it->second = m_pendingCallstackId;
|
2020-09-29 15:04:41 +00:00
|
|
|
m_pendingCallstackId = 0;
|
2019-03-05 01:04:45 +00:00
|
|
|
}
|
|
|
|
|
2021-12-21 13:18:14 +00:00
|
|
|
void Worker::ProcessCallstackSampleInsertSample( const SampleData& sd, ThreadData& td )
|
2020-02-22 15:39:39 +00:00
|
|
|
{
|
2021-11-13 01:26:11 +00:00
|
|
|
const auto t = sd.time.Val();
|
2021-06-14 23:59:49 +00:00
|
|
|
if( td.samples.empty() )
|
2020-02-22 15:39:39 +00:00
|
|
|
{
|
2021-06-14 23:59:49 +00:00
|
|
|
td.samples.push_back( sd );
|
2020-02-22 15:39:39 +00:00
|
|
|
}
|
2021-12-02 01:06:05 +00:00
|
|
|
else if( t != 0 && td.samples.back().time.Val() >= t )
|
2021-11-27 14:29:10 +00:00
|
|
|
{
|
|
|
|
m_inconsistentSamples = true;
|
|
|
|
auto it = std::lower_bound( td.samples.begin(), td.samples.end(), t, []( const auto& lhs, const auto& rhs ) { return lhs.time.Val() < rhs; } );
|
|
|
|
assert( it != td.samples.end() );
|
|
|
|
if( it->time.Val() != t )
|
|
|
|
{
|
|
|
|
td.samples.push_back_non_empty( sd );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
const auto mcs = MergeCallstacks( it->callstack.Val(), sd.callstack.Val() );
|
|
|
|
it->callstack.SetVal( mcs );
|
|
|
|
|
|
|
|
// This is a fixup of an already processed sample. Fixing stats is non-trivial, so just exit here.
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
2020-02-22 15:39:39 +00:00
|
|
|
else
|
|
|
|
{
|
2021-06-14 23:59:49 +00:00
|
|
|
td.samples.push_back_non_empty( sd );
|
2020-02-22 15:39:39 +00:00
|
|
|
}
|
2020-02-27 15:48:50 +00:00
|
|
|
|
2021-11-27 14:29:10 +00:00
|
|
|
const auto callstack = sd.callstack.Val();
|
|
|
|
const auto& cs = GetCallstack( callstack );
|
|
|
|
const auto& ip = cs[0];
|
|
|
|
if( GetCanonicalPointer( ip ) >> 63 != 0 ) td.kernelSampleCnt++;
|
|
|
|
m_data.samplesCnt++;
|
2021-12-21 13:18:14 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void Worker::ProcessCallstackSampleImpl( const SampleData& sd, ThreadData& td )
|
|
|
|
{
|
|
|
|
ProcessCallstackSampleInsertSample( sd, td );
|
2021-11-27 14:29:10 +00:00
|
|
|
|
2020-02-27 15:48:50 +00:00
|
|
|
#ifndef TRACY_NO_STATISTICS
|
2021-12-21 13:18:14 +00:00
|
|
|
const auto t = sd.time.Val();
|
2021-12-04 14:16:17 +00:00
|
|
|
if( t == 0 || !m_identifySamples )
|
2020-03-24 22:54:30 +00:00
|
|
|
{
|
2021-12-02 01:06:05 +00:00
|
|
|
ProcessCallstackSampleImplStats( sd, td );
|
2021-11-13 01:40:32 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2021-12-02 01:06:05 +00:00
|
|
|
bool postpone = false;
|
|
|
|
auto ctx = GetContextSwitchData( td.id );
|
|
|
|
if( !ctx )
|
2021-11-13 01:40:32 +00:00
|
|
|
{
|
|
|
|
postpone = true;
|
|
|
|
}
|
2021-12-02 01:06:05 +00:00
|
|
|
else
|
2021-11-13 01:40:32 +00:00
|
|
|
{
|
2021-12-02 01:06:05 +00:00
|
|
|
auto it = std::lower_bound( ctx->v.begin(), ctx->v.end(), sd.time.Val(), [] ( const auto& l, const auto& r ) { return (uint64_t)l.End() < (uint64_t)r; } );
|
|
|
|
if( it == ctx->v.end() )
|
|
|
|
{
|
|
|
|
postpone = true;
|
|
|
|
}
|
|
|
|
else if( sd.time.Val() == it->Start() )
|
|
|
|
{
|
|
|
|
td.ctxSwitchSamples.push_back( sd );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
ProcessCallstackSampleImplStats( sd, td );
|
|
|
|
}
|
2021-11-13 01:40:32 +00:00
|
|
|
}
|
2021-12-02 01:06:05 +00:00
|
|
|
if( postpone )
|
2021-11-13 01:40:32 +00:00
|
|
|
{
|
2021-12-02 01:06:05 +00:00
|
|
|
td.postponedSamples.push_back( sd );
|
2021-11-13 01:40:32 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
}
|
2021-09-04 14:11:41 +00:00
|
|
|
|
2021-11-13 01:40:32 +00:00
|
|
|
#ifndef TRACY_NO_STATISTICS
|
|
|
|
void Worker::ProcessCallstackSampleImplStats( const SampleData& sd, ThreadData& td )
|
|
|
|
{
|
|
|
|
const auto t = sd.time.Val();
|
|
|
|
const auto callstack = sd.callstack.Val();
|
|
|
|
const auto& cs = GetCallstack( callstack );
|
|
|
|
const auto& ip = cs[0];
|
|
|
|
|
|
|
|
uint16_t tid = CompressThread( td.id );
|
|
|
|
|
|
|
|
auto frame = GetCallstackFrame( ip );
|
|
|
|
if( frame )
|
|
|
|
{
|
|
|
|
const auto symAddr = frame->data[0].symAddr;
|
|
|
|
auto it = m_data.instructionPointersMap.find( symAddr );
|
|
|
|
if( it == m_data.instructionPointersMap.end() )
|
2020-03-24 22:54:30 +00:00
|
|
|
{
|
2021-11-13 01:40:32 +00:00
|
|
|
m_data.instructionPointersMap.emplace( symAddr, unordered_flat_map<CallstackFrameId, uint32_t, CallstackFrameIdHash, CallstackFrameIdCompare> { { ip, 1 } } );
|
2020-03-24 22:54:30 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2021-11-13 01:40:32 +00:00
|
|
|
auto fit = it->second.find( ip );
|
|
|
|
if( fit == it->second.end() )
|
2020-03-24 22:54:30 +00:00
|
|
|
{
|
2021-11-13 01:40:32 +00:00
|
|
|
it->second.emplace( ip, 1 );
|
2020-03-24 22:54:30 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2021-11-13 01:40:32 +00:00
|
|
|
fit->second++;
|
2020-03-24 22:54:30 +00:00
|
|
|
}
|
2021-11-13 01:40:32 +00:00
|
|
|
}
|
|
|
|
auto sit = m_data.symbolSamples.find( symAddr );
|
|
|
|
if( sit == m_data.symbolSamples.end() )
|
|
|
|
{
|
|
|
|
m_data.symbolSamples.emplace( symAddr, Vector<SampleDataRange>( SampleDataRange { sd.time, tid, ip } ) );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
if( sit->second.back().time.Val() <= sd.time.Val() )
|
2020-08-09 19:04:03 +00:00
|
|
|
{
|
2021-11-13 01:40:32 +00:00
|
|
|
sit->second.push_back_non_empty( SampleDataRange { sd.time, tid, ip } );
|
2020-08-09 19:04:03 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2021-11-13 01:40:32 +00:00
|
|
|
auto iit = std::upper_bound( sit->second.begin(), sit->second.end(), sd.time.Val(), [] ( const auto& lhs, const auto& rhs ) { return lhs < rhs.time.Val(); } );
|
|
|
|
sit->second.insert( iit, SampleDataRange { sd.time, tid, ip } );
|
2020-08-09 19:04:03 +00:00
|
|
|
}
|
2020-03-24 22:54:30 +00:00
|
|
|
}
|
|
|
|
}
|
2021-11-13 01:40:32 +00:00
|
|
|
else
|
|
|
|
{
|
|
|
|
auto it = m_data.pendingInstructionPointers.find( ip );
|
|
|
|
if( it == m_data.pendingInstructionPointers.end() )
|
|
|
|
{
|
|
|
|
m_data.pendingInstructionPointers.emplace( ip, 1 );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
it->second++;
|
|
|
|
}
|
|
|
|
auto sit = m_data.pendingSymbolSamples.find( ip );
|
|
|
|
if( sit == m_data.pendingSymbolSamples.end() )
|
|
|
|
{
|
|
|
|
m_data.pendingSymbolSamples.emplace( ip, Vector<SampleDataRange>( SampleDataRange { sd.time, tid, ip } ) );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
sit->second.push_back_non_empty( SampleDataRange { sd.time, tid, ip } );
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-11-27 01:03:59 +00:00
|
|
|
auto childAddr = GetCanonicalPointer( cs[0] );
|
2021-04-18 20:44:37 +00:00
|
|
|
for( uint16_t i=1; i<cs.size(); i++ )
|
|
|
|
{
|
|
|
|
auto addr = GetCanonicalPointer( cs[i] );
|
|
|
|
auto it = m_data.childSamples.find( addr );
|
|
|
|
if( it == m_data.childSamples.end() )
|
|
|
|
{
|
2021-11-27 01:03:59 +00:00
|
|
|
m_data.childSamples.emplace( addr, Vector<ChildSample>( ChildSample { sd.time, childAddr } ) );
|
2021-04-18 20:44:37 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2021-11-27 01:03:59 +00:00
|
|
|
it->second.push_back_non_empty( ChildSample { sd.time, childAddr } );
|
2021-04-18 20:44:37 +00:00
|
|
|
}
|
2021-11-27 01:03:59 +00:00
|
|
|
childAddr = addr;
|
2021-04-18 20:44:37 +00:00
|
|
|
}
|
2020-05-31 12:17:54 +00:00
|
|
|
|
2021-06-14 23:59:49 +00:00
|
|
|
const auto framesKnown = UpdateSampleStatistics( callstack, 1, true );
|
2021-12-02 01:06:05 +00:00
|
|
|
|
|
|
|
if( t != 0 )
|
2020-05-31 12:17:54 +00:00
|
|
|
{
|
2021-12-02 01:06:05 +00:00
|
|
|
assert( td.samples.size() > td.ghostIdx );
|
|
|
|
if( framesKnown && td.ghostIdx + 1 == td.samples.size() )
|
|
|
|
{
|
|
|
|
td.ghostIdx++;
|
|
|
|
m_data.ghostCnt += AddGhostZone( cs, &td.ghostZones, t );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
m_data.ghostZonesPostponed = true;
|
|
|
|
}
|
2020-03-21 17:26:42 +00:00
|
|
|
}
|
2021-06-14 23:59:49 +00:00
|
|
|
}
|
2021-11-13 01:40:32 +00:00
|
|
|
#endif
|
2021-06-14 23:59:49 +00:00
|
|
|
|
|
|
|
void Worker::ProcessCallstackSample( const QueueCallstackSample& ev )
|
|
|
|
{
|
|
|
|
assert( m_pendingCallstackId != 0 );
|
|
|
|
const auto callstack = m_pendingCallstackId;
|
2020-09-29 15:04:41 +00:00
|
|
|
m_pendingCallstackId = 0;
|
2021-06-14 23:59:49 +00:00
|
|
|
|
2022-06-21 23:55:10 +00:00
|
|
|
const auto refTime = RefTime( m_refTimeCtx, ev.time );
|
2022-06-21 23:30:01 +00:00
|
|
|
const auto t = refTime == 0 ? 0 : TscTime( refTime );
|
2021-06-14 23:59:49 +00:00
|
|
|
|
2021-06-15 00:25:12 +00:00
|
|
|
auto& td = *NoticeThread( ev.thread );
|
|
|
|
|
2021-06-14 23:59:49 +00:00
|
|
|
SampleData sd;
|
|
|
|
sd.time.SetVal( t );
|
|
|
|
sd.callstack.SetVal( callstack );
|
|
|
|
|
2021-12-02 01:06:05 +00:00
|
|
|
if( m_combineSamples && t != 0 )
|
2021-06-15 00:25:12 +00:00
|
|
|
{
|
|
|
|
const auto pendingTime = td.pendingSample.time.Val();
|
|
|
|
if( pendingTime == 0 )
|
|
|
|
{
|
|
|
|
td.pendingSample = sd;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
if( pendingTime == t )
|
|
|
|
{
|
2021-11-27 14:06:58 +00:00
|
|
|
const auto mcs = MergeCallstacks( td.pendingSample.callstack.Val(), callstack );
|
|
|
|
sd.callstack.SetVal( mcs );
|
2021-11-13 01:26:11 +00:00
|
|
|
ProcessCallstackSampleImpl( sd, td );
|
2021-06-15 00:25:12 +00:00
|
|
|
td.pendingSample.time.Clear();
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2021-11-13 01:26:11 +00:00
|
|
|
ProcessCallstackSampleImpl( td.pendingSample, td );
|
2021-06-15 00:25:12 +00:00
|
|
|
td.pendingSample = sd;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2021-11-13 01:26:11 +00:00
|
|
|
ProcessCallstackSampleImpl( sd, td );
|
2021-06-15 00:25:12 +00:00
|
|
|
}
|
2020-02-22 15:39:39 +00:00
|
|
|
}
|
|
|
|
|
2021-12-21 13:18:32 +00:00
|
|
|
void Worker::ProcessCallstackSampleContextSwitch( const QueueCallstackSample& ev )
|
|
|
|
{
|
|
|
|
assert( m_pendingCallstackId != 0 );
|
|
|
|
const auto callstack = m_pendingCallstackId;
|
|
|
|
m_pendingCallstackId = 0;
|
|
|
|
|
2022-06-21 23:55:10 +00:00
|
|
|
const auto refTime = RefTime( m_refTimeCtx, ev.time );
|
2022-06-21 23:30:01 +00:00
|
|
|
const auto t = refTime == 0 ? 0 : TscTime( refTime );
|
2021-12-21 13:18:32 +00:00
|
|
|
|
|
|
|
auto& td = *NoticeThread( ev.thread );
|
|
|
|
|
|
|
|
SampleData sd;
|
|
|
|
sd.time.SetVal( t );
|
|
|
|
sd.callstack.SetVal( callstack );
|
|
|
|
|
|
|
|
ProcessCallstackSampleInsertSample( sd, td );
|
|
|
|
|
|
|
|
td.ctxSwitchSamples.push_back( sd );
|
|
|
|
}
|
|
|
|
|
2019-01-20 18:11:48 +00:00
|
|
|
void Worker::ProcessCallstackFrameSize( const QueueCallstackFrameSize& ev )
|
2018-06-19 23:07:09 +00:00
|
|
|
{
|
2019-01-20 18:11:48 +00:00
|
|
|
assert( !m_callstackFrameStaging );
|
|
|
|
assert( m_pendingCallstackSubframes == 0 );
|
2018-06-20 21:42:00 +00:00
|
|
|
assert( m_pendingCallstackFrames > 0 );
|
|
|
|
m_pendingCallstackFrames--;
|
2019-01-20 18:11:48 +00:00
|
|
|
m_pendingCallstackSubframes = ev.size;
|
2020-02-29 18:31:51 +00:00
|
|
|
#ifndef TRACY_NO_STATISTICS
|
2020-02-29 13:12:04 +00:00
|
|
|
m_data.newFramesWereReceived = true;
|
2020-02-29 18:31:51 +00:00
|
|
|
#endif
|
2018-06-20 12:57:48 +00:00
|
|
|
|
2020-07-25 22:38:59 +00:00
|
|
|
const auto idx = GetSingleStringIdx();
|
2020-02-25 23:55:43 +00:00
|
|
|
|
2019-01-20 18:11:48 +00:00
|
|
|
// Frames may be duplicated due to recursion
|
2019-03-03 15:50:18 +00:00
|
|
|
auto fmit = m_data.callstackFrameMap.find( PackPointer( ev.ptr ) );
|
2019-01-20 18:11:48 +00:00
|
|
|
if( fmit == m_data.callstackFrameMap.end() )
|
|
|
|
{
|
|
|
|
m_callstackFrameStaging = m_slab.Alloc<CallstackFrameData>();
|
|
|
|
m_callstackFrameStaging->size = ev.size;
|
|
|
|
m_callstackFrameStaging->data = m_slab.Alloc<CallstackFrame>( ev.size );
|
2020-07-25 22:38:59 +00:00
|
|
|
m_callstackFrameStaging->imageName = StringIdx( idx );
|
2019-01-20 18:11:48 +00:00
|
|
|
|
|
|
|
m_callstackFrameStagingPtr = ev.ptr;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-09-30 13:49:29 +00:00
|
|
|
void Worker::ProcessCallstackFrame( const QueueCallstackFrame& ev, bool querySymbols )
|
2019-01-20 18:11:48 +00:00
|
|
|
{
|
|
|
|
assert( m_pendingCallstackSubframes > 0 );
|
|
|
|
|
2020-07-25 22:41:49 +00:00
|
|
|
const auto nitidx = GetSingleStringIdx();
|
2020-07-25 23:32:49 +00:00
|
|
|
const auto fitidx = GetSecondStringIdx();
|
2018-06-19 23:07:09 +00:00
|
|
|
|
2019-01-20 18:11:48 +00:00
|
|
|
if( m_callstackFrameStaging )
|
2018-06-19 23:07:09 +00:00
|
|
|
{
|
2019-01-20 18:11:48 +00:00
|
|
|
const auto idx = m_callstackFrameStaging->size - m_pendingCallstackSubframes;
|
2021-05-23 21:16:54 +00:00
|
|
|
const auto file = StringIdx( fitidx );
|
|
|
|
|
|
|
|
if( m_pendingCallstackSubframes > 1 && idx == 0 )
|
|
|
|
{
|
|
|
|
auto fstr = GetString( file );
|
|
|
|
auto flen = strlen( fstr );
|
2021-06-09 00:13:00 +00:00
|
|
|
if( flen >= s_tracySkipSubframesMinLen )
|
2021-05-23 21:16:54 +00:00
|
|
|
{
|
|
|
|
auto ptr = s_tracySkipSubframes;
|
|
|
|
do
|
|
|
|
{
|
|
|
|
if( flen >= ptr->len && memcmp( fstr + flen - ptr->len, ptr->str, ptr->len ) == 0 )
|
|
|
|
{
|
|
|
|
m_pendingCallstackSubframes--;
|
|
|
|
m_callstackFrameStaging->size--;
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
ptr++;
|
|
|
|
}
|
|
|
|
while( ptr->str );
|
|
|
|
}
|
|
|
|
}
|
2018-06-19 23:07:09 +00:00
|
|
|
|
2020-02-29 12:58:11 +00:00
|
|
|
const auto name = StringIdx( nitidx );
|
2020-02-27 11:39:05 +00:00
|
|
|
m_callstackFrameStaging->data[idx].name = name;
|
2020-02-27 13:35:00 +00:00
|
|
|
m_callstackFrameStaging->data[idx].file = file;
|
2019-01-20 18:11:48 +00:00
|
|
|
m_callstackFrameStaging->data[idx].line = ev.line;
|
2020-02-25 22:42:59 +00:00
|
|
|
m_callstackFrameStaging->data[idx].symAddr = ev.symAddr;
|
2018-06-19 23:07:09 +00:00
|
|
|
|
2020-09-30 13:49:29 +00:00
|
|
|
if( querySymbols && ev.symAddr != 0 && m_data.symbolMap.find( ev.symAddr ) == m_data.symbolMap.end() && m_pendingSymbols.find( ev.symAddr ) == m_pendingSymbols.end() )
|
2020-02-26 21:35:15 +00:00
|
|
|
{
|
2020-07-25 22:43:24 +00:00
|
|
|
m_pendingSymbols.emplace( ev.symAddr, SymbolPending { name, m_callstackFrameStaging->imageName, file, ev.line, ev.symLen, idx < m_callstackFrameStaging->size - 1 } );
|
2020-02-26 21:35:15 +00:00
|
|
|
Query( ServerQuerySymbol, ev.symAddr );
|
|
|
|
}
|
|
|
|
|
2020-05-23 12:53:32 +00:00
|
|
|
StringRef ref( StringRef::Idx, fitidx );
|
|
|
|
auto cit = m_checkedFileStrings.find( ref );
|
2022-05-01 12:25:07 +00:00
|
|
|
if( cit == m_checkedFileStrings.end() ) CacheSource( ref, m_callstackFrameStaging->imageName );
|
2020-05-23 12:53:32 +00:00
|
|
|
|
2020-03-24 22:54:30 +00:00
|
|
|
const auto frameId = PackPointer( m_callstackFrameStagingPtr );
|
|
|
|
#ifndef TRACY_NO_STATISTICS
|
|
|
|
auto it = m_data.pendingInstructionPointers.find( frameId );
|
|
|
|
if( it != m_data.pendingInstructionPointers.end() )
|
|
|
|
{
|
|
|
|
if( ev.symAddr != 0 )
|
|
|
|
{
|
|
|
|
auto sit = m_data.instructionPointersMap.find( ev.symAddr );
|
|
|
|
if( sit == m_data.instructionPointersMap.end() )
|
|
|
|
{
|
|
|
|
m_data.instructionPointersMap.emplace( ev.symAddr, unordered_flat_map<CallstackFrameId, uint32_t, CallstackFrameIdHash, CallstackFrameIdCompare> { { it->first, it->second } } );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
assert( sit->second.find( it->first ) == sit->second.end() );
|
|
|
|
sit->second.emplace( it->first, it->second );
|
|
|
|
}
|
|
|
|
}
|
|
|
|
m_data.pendingInstructionPointers.erase( it );
|
|
|
|
}
|
2020-08-09 19:04:03 +00:00
|
|
|
auto pit = m_data.pendingSymbolSamples.find( frameId );
|
|
|
|
if( pit != m_data.pendingSymbolSamples.end() )
|
|
|
|
{
|
|
|
|
if( ev.symAddr != 0 )
|
|
|
|
{
|
|
|
|
auto sit = m_data.symbolSamples.find( ev.symAddr );
|
|
|
|
if( sit == m_data.symbolSamples.end() )
|
|
|
|
{
|
2020-08-10 12:52:51 +00:00
|
|
|
pdqsort_branchless( pit->second.begin(), pit->second.end(), [] ( const auto& lhs, const auto& rhs ) { return lhs.time.Val() < rhs.time.Val(); } );
|
2020-08-09 19:04:03 +00:00
|
|
|
m_data.symbolSamples.emplace( ev.symAddr, std::move( pit->second ) );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
for( auto& v : pit->second )
|
|
|
|
{
|
2020-08-10 12:52:51 +00:00
|
|
|
if( sit->second.back().time.Val() <= v.time.Val() )
|
2020-08-09 19:04:03 +00:00
|
|
|
{
|
|
|
|
sit->second.push_back_non_empty( v );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2020-08-10 12:52:51 +00:00
|
|
|
auto iit = std::upper_bound( sit->second.begin(), sit->second.end(), v.time.Val(), [] ( const auto& lhs, const auto& rhs ) { return lhs < rhs.time.Val(); } );
|
2020-08-09 19:04:03 +00:00
|
|
|
sit->second.insert( iit, v );
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
m_data.pendingSymbolSamples.erase( pit );
|
|
|
|
}
|
2020-03-24 22:54:30 +00:00
|
|
|
#endif
|
|
|
|
|
2019-01-20 18:11:48 +00:00
|
|
|
if( --m_pendingCallstackSubframes == 0 )
|
|
|
|
{
|
2020-03-24 22:54:30 +00:00
|
|
|
assert( m_data.callstackFrameMap.find( frameId ) == m_data.callstackFrameMap.end() );
|
|
|
|
m_data.callstackFrameMap.emplace( frameId, m_callstackFrameStaging );
|
2019-01-20 18:11:48 +00:00
|
|
|
m_callstackFrameStaging = nullptr;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
m_pendingCallstackSubframes--;
|
2018-06-19 23:07:09 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-02-27 11:49:48 +00:00
|
|
|
void Worker::ProcessSymbolInformation( const QueueSymbolInformation& ev )
|
2020-02-26 21:35:15 +00:00
|
|
|
{
|
|
|
|
auto it = m_pendingSymbols.find( ev.symAddr );
|
|
|
|
assert( it != m_pendingSymbols.end() );
|
|
|
|
|
2020-07-25 22:35:41 +00:00
|
|
|
const auto idx = GetSingleStringIdx();
|
2020-02-26 21:35:15 +00:00
|
|
|
|
|
|
|
SymbolData sd;
|
2020-02-27 11:39:05 +00:00
|
|
|
sd.name = it->second.name;
|
2020-07-25 22:31:54 +00:00
|
|
|
sd.file = StringIdx( idx );
|
2020-02-26 21:35:15 +00:00
|
|
|
sd.line = ev.line;
|
2020-02-27 11:39:05 +00:00
|
|
|
sd.imageName = it->second.imageName;
|
2020-02-27 13:35:00 +00:00
|
|
|
sd.callFile = it->second.file;
|
|
|
|
sd.callLine = it->second.line;
|
2020-02-27 14:28:58 +00:00
|
|
|
sd.isInline = it->second.isInline;
|
2020-03-25 17:32:36 +00:00
|
|
|
sd.size.SetVal( it->second.size );
|
2020-02-26 21:35:15 +00:00
|
|
|
m_data.symbolMap.emplace( ev.symAddr, std::move( sd ) );
|
|
|
|
|
2021-11-25 21:28:05 +00:00
|
|
|
if( m_codeTransfer && it->second.size > 0 && it->second.size <= 128*1024 )
|
2020-03-25 19:04:55 +00:00
|
|
|
{
|
2021-11-25 21:35:43 +00:00
|
|
|
m_pendingSymbolCode++;
|
2020-03-25 19:04:55 +00:00
|
|
|
Query( ServerQuerySymbolCode, ev.symAddr, it->second.size );
|
|
|
|
}
|
|
|
|
|
2020-03-30 15:10:59 +00:00
|
|
|
if( !it->second.isInline )
|
|
|
|
{
|
2020-09-06 23:47:11 +00:00
|
|
|
if( m_data.newSymbolsIndex < 0 ) m_data.newSymbolsIndex = int64_t( m_data.symbolLoc.size() );
|
2020-03-30 15:10:59 +00:00
|
|
|
m_data.symbolLoc.push_back( SymbolLocation { ev.symAddr, it->second.size } );
|
|
|
|
}
|
2020-04-08 10:44:12 +00:00
|
|
|
else
|
|
|
|
{
|
2020-09-06 23:47:11 +00:00
|
|
|
if( m_data.newInlineSymbolsIndex < 0 ) m_data.newInlineSymbolsIndex = int64_t( m_data.symbolLocInline.size() );
|
2020-04-08 10:44:12 +00:00
|
|
|
m_data.symbolLocInline.push_back( ev.symAddr );
|
|
|
|
}
|
2020-03-27 16:34:51 +00:00
|
|
|
|
2020-07-25 22:31:54 +00:00
|
|
|
StringRef ref( StringRef::Idx, idx );
|
2020-05-23 12:53:32 +00:00
|
|
|
auto cit = m_checkedFileStrings.find( ref );
|
2022-05-01 12:25:07 +00:00
|
|
|
if( cit == m_checkedFileStrings.end() ) CacheSource( ref, it->second.imageName );
|
2020-05-23 12:53:32 +00:00
|
|
|
|
2020-02-26 21:35:15 +00:00
|
|
|
m_pendingSymbols.erase( it );
|
|
|
|
}
|
|
|
|
|
2020-04-01 20:37:19 +00:00
|
|
|
void Worker::ProcessCodeInformation( const QueueCodeInformation& ev )
|
|
|
|
{
|
|
|
|
assert( m_pendingCodeInformation > 0 );
|
|
|
|
m_pendingCodeInformation--;
|
|
|
|
|
2020-07-25 22:35:41 +00:00
|
|
|
const auto idx = GetSingleStringIdx();
|
2021-10-16 14:29:06 +00:00
|
|
|
const uint64_t ptr = ev.symAddr + ev.ptrOffset;
|
2020-04-01 20:37:19 +00:00
|
|
|
|
|
|
|
if( ev.line != 0 )
|
|
|
|
{
|
2021-10-16 14:29:06 +00:00
|
|
|
assert( m_data.codeAddressToLocation.find( ptr ) == m_data.codeAddressToLocation.end() );
|
2020-07-25 22:28:52 +00:00
|
|
|
const auto packed = PackFileLine( idx, ev.line );
|
2021-10-16 14:29:06 +00:00
|
|
|
m_data.codeAddressToLocation.emplace( ptr, packed );
|
2020-04-02 00:01:58 +00:00
|
|
|
|
|
|
|
auto lit = m_data.locationCodeAddressList.find( packed );
|
|
|
|
if( lit == m_data.locationCodeAddressList.end() )
|
|
|
|
{
|
2021-10-16 14:29:06 +00:00
|
|
|
m_data.locationCodeAddressList.emplace( packed, Vector<uint64_t>( ptr ) );
|
2020-04-02 00:01:58 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2021-10-16 14:29:06 +00:00
|
|
|
const bool needSort = lit->second.back() > ptr;
|
|
|
|
lit->second.push_back( ptr );
|
2020-04-02 00:01:58 +00:00
|
|
|
if( needSort ) pdqsort_branchless( lit->second.begin(), lit->second.end() );
|
|
|
|
}
|
2020-05-23 12:53:32 +00:00
|
|
|
|
2020-07-25 22:28:52 +00:00
|
|
|
StringRef ref( StringRef::Idx, idx );
|
2020-05-23 12:53:32 +00:00
|
|
|
auto cit = m_checkedFileStrings.find( ref );
|
2022-05-01 12:25:07 +00:00
|
|
|
if( cit == m_checkedFileStrings.end() )
|
|
|
|
{
|
2022-05-01 23:11:45 +00:00
|
|
|
uint64_t baseAddr = 0;
|
|
|
|
if( HasSymbolCode( ev.symAddr ) )
|
|
|
|
{
|
|
|
|
baseAddr = ev.symAddr;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
const auto parentAddr = GetSymbolForAddress( ev.symAddr );
|
|
|
|
if( parentAddr != 0 && HasSymbolCode( parentAddr ) )
|
|
|
|
{
|
|
|
|
baseAddr = parentAddr;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
const SymbolData* sym = baseAddr == 0 ? nullptr : GetSymbolData( baseAddr );
|
|
|
|
if( !sym )
|
2022-05-01 12:25:07 +00:00
|
|
|
{
|
|
|
|
CacheSource( ref );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2022-05-01 23:11:45 +00:00
|
|
|
CacheSource( ref, sym->imageName );
|
2022-05-01 12:25:07 +00:00
|
|
|
}
|
|
|
|
}
|
2020-04-01 20:37:19 +00:00
|
|
|
}
|
2021-06-19 17:07:35 +00:00
|
|
|
if( ev.symAddr != 0 )
|
|
|
|
{
|
2021-10-16 14:29:06 +00:00
|
|
|
assert( m_data.codeSymbolMap.find( ptr ) == m_data.codeSymbolMap.end() );
|
|
|
|
m_data.codeSymbolMap.emplace( ptr, ev.symAddr );
|
2021-06-19 17:07:35 +00:00
|
|
|
}
|
2020-04-01 20:37:19 +00:00
|
|
|
}
|
|
|
|
|
2018-08-20 00:07:31 +00:00
|
|
|
void Worker::ProcessCrashReport( const QueueCrashReport& ev )
|
|
|
|
{
|
|
|
|
CheckString( ev.text );
|
|
|
|
|
2021-11-06 17:55:29 +00:00
|
|
|
auto td = GetCurrentThreadData();
|
|
|
|
m_data.crashEvent.thread = td->id;
|
2022-06-21 23:30:01 +00:00
|
|
|
m_data.crashEvent.time = TscTime( ev.time );
|
2019-03-26 20:41:44 +00:00
|
|
|
m_data.crashEvent.message = ev.text;
|
2020-09-29 14:59:28 +00:00
|
|
|
|
2021-11-06 17:55:29 +00:00
|
|
|
auto it = m_nextCallstack.find( td->id );
|
2020-09-29 14:59:28 +00:00
|
|
|
if( it != m_nextCallstack.end() && it->second != 0 )
|
|
|
|
{
|
|
|
|
m_data.crashEvent.callstack = it->second;
|
|
|
|
it->second = 0;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
m_data.crashEvent.callstack = 0;
|
|
|
|
}
|
2018-08-20 00:07:31 +00:00
|
|
|
}
|
|
|
|
|
2019-02-21 21:45:39 +00:00
|
|
|
void Worker::ProcessSysTime( const QueueSysTime& ev )
|
|
|
|
{
|
2022-06-21 23:30:01 +00:00
|
|
|
const auto time = TscTime( ev.time );
|
2019-10-24 21:23:52 +00:00
|
|
|
if( m_data.lastTime < time ) m_data.lastTime = time;
|
2019-02-21 21:45:39 +00:00
|
|
|
const auto val = ev.sysTime;
|
|
|
|
if( !m_sysTimePlot )
|
|
|
|
{
|
|
|
|
m_sysTimePlot = m_slab.AllocInit<PlotData>();
|
|
|
|
m_sysTimePlot->name = 0;
|
|
|
|
m_sysTimePlot->type = PlotType::SysTime;
|
2019-11-05 17:02:08 +00:00
|
|
|
m_sysTimePlot->format = PlotValueFormatting::Percentage;
|
2019-02-21 21:45:39 +00:00
|
|
|
m_sysTimePlot->min = val;
|
|
|
|
m_sysTimePlot->max = val;
|
2021-10-17 11:04:56 +00:00
|
|
|
m_sysTimePlot->sum = val;
|
2019-02-21 21:45:39 +00:00
|
|
|
m_sysTimePlot->data.push_back( { time, val } );
|
|
|
|
m_data.plots.Data().push_back( m_sysTimePlot );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
assert( !m_sysTimePlot->data.empty() );
|
2019-11-03 13:50:11 +00:00
|
|
|
assert( m_sysTimePlot->data.back().time.Val() <= time );
|
2019-02-21 21:45:39 +00:00
|
|
|
if( m_sysTimePlot->min > val ) m_sysTimePlot->min = val;
|
|
|
|
else if( m_sysTimePlot->max < val ) m_sysTimePlot->max = val;
|
2021-10-17 11:04:56 +00:00
|
|
|
m_sysTimePlot->sum += val;
|
2021-02-07 14:52:08 +00:00
|
|
|
m_sysTimePlot->data.push_back( { time, val } );
|
2019-02-21 21:45:39 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-08-12 22:13:50 +00:00
|
|
|
void Worker::ProcessContextSwitch( const QueueContextSwitch& ev )
|
|
|
|
{
|
2021-11-13 01:38:33 +00:00
|
|
|
#ifndef TRACY_NO_STATISTICS
|
|
|
|
m_data.newContextSwitchesReceived = true;
|
|
|
|
#endif
|
|
|
|
|
2022-06-21 23:55:10 +00:00
|
|
|
const auto time = TscTime( RefTime( m_refTimeCtx, ev.time ) );
|
2019-10-24 21:23:52 +00:00
|
|
|
if( m_data.lastTime < time ) m_data.lastTime = time;
|
2019-08-12 22:13:50 +00:00
|
|
|
|
2019-10-15 14:08:20 +00:00
|
|
|
if( ev.cpu >= m_data.cpuDataCount ) m_data.cpuDataCount = ev.cpu + 1;
|
2019-08-16 14:28:58 +00:00
|
|
|
auto& cs = m_data.cpuData[ev.cpu].cs;
|
2019-08-12 22:13:50 +00:00
|
|
|
if( ev.oldThread != 0 )
|
|
|
|
{
|
|
|
|
auto it = m_data.ctxSwitch.find( ev.oldThread );
|
|
|
|
if( it != m_data.ctxSwitch.end() )
|
|
|
|
{
|
|
|
|
auto& data = it->second->v;
|
|
|
|
assert( !data.empty() );
|
|
|
|
auto& item = data.back();
|
2019-08-15 21:53:47 +00:00
|
|
|
assert( item.Start() <= time );
|
2019-10-15 12:54:28 +00:00
|
|
|
assert( item.End() == -1 );
|
2019-08-15 21:53:47 +00:00
|
|
|
item.SetEnd( time );
|
|
|
|
item.SetReason( ev.reason );
|
|
|
|
item.SetState( ev.state );
|
2019-08-14 15:11:42 +00:00
|
|
|
|
2019-08-17 23:50:49 +00:00
|
|
|
const auto dt = time - item.Start();
|
|
|
|
it->second->runningTime += dt;
|
|
|
|
|
|
|
|
auto tdit = m_data.cpuThreadData.find( ev.oldThread );
|
|
|
|
if( tdit == m_data.cpuThreadData.end() )
|
|
|
|
{
|
|
|
|
tdit = m_data.cpuThreadData.emplace( ev.oldThread, CpuThreadData {} ).first;
|
|
|
|
}
|
|
|
|
tdit->second.runningRegions++;
|
|
|
|
tdit->second.runningTime += dt;
|
2019-08-12 22:13:50 +00:00
|
|
|
}
|
2019-08-16 14:28:58 +00:00
|
|
|
if( !cs.empty() )
|
|
|
|
{
|
|
|
|
auto& cx = cs.back();
|
2019-08-19 21:09:58 +00:00
|
|
|
assert( m_data.externalThreadCompress.DecompressThread( cx.Thread() ) == ev.oldThread );
|
2019-08-16 14:28:58 +00:00
|
|
|
cx.SetEnd( time );
|
|
|
|
}
|
2019-08-12 22:13:50 +00:00
|
|
|
}
|
|
|
|
if( ev.newThread != 0 )
|
|
|
|
{
|
|
|
|
auto it = m_data.ctxSwitch.find( ev.newThread );
|
|
|
|
if( it == m_data.ctxSwitch.end() )
|
|
|
|
{
|
|
|
|
auto ctx = m_slab.AllocInit<ContextSwitch>();
|
|
|
|
it = m_data.ctxSwitch.emplace( ev.newThread, ctx ).first;
|
|
|
|
}
|
|
|
|
auto& data = it->second->v;
|
2019-08-17 15:05:29 +00:00
|
|
|
ContextSwitchData* item = nullptr;
|
2019-08-17 23:50:49 +00:00
|
|
|
bool migration = false;
|
2019-08-17 15:05:29 +00:00
|
|
|
if( !data.empty() && data.back().Reason() == ContextSwitchData::Wakeup )
|
|
|
|
{
|
|
|
|
item = &data.back();
|
2019-08-17 23:50:49 +00:00
|
|
|
if( data.size() > 1 )
|
|
|
|
{
|
|
|
|
migration = data[data.size()-2].Cpu() != ev.cpu;
|
|
|
|
}
|
2019-08-17 15:05:29 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2019-11-02 15:21:46 +00:00
|
|
|
assert( data.empty() || (uint64_t)data.back().End() <= (uint64_t)time );
|
2019-08-17 23:50:49 +00:00
|
|
|
if( !data.empty() )
|
|
|
|
{
|
|
|
|
migration = data.back().Cpu() != ev.cpu;
|
|
|
|
}
|
2019-08-17 15:05:29 +00:00
|
|
|
item = &data.push_next();
|
2019-10-30 21:25:46 +00:00
|
|
|
item->SetWakeup( time );
|
2019-08-17 15:05:29 +00:00
|
|
|
}
|
|
|
|
item->SetStart( time );
|
|
|
|
item->SetEnd( -1 );
|
|
|
|
item->SetCpu( ev.cpu );
|
|
|
|
item->SetReason( -1 );
|
|
|
|
item->SetState( -1 );
|
2021-11-06 19:22:38 +00:00
|
|
|
item->SetThread( 0 );
|
2019-08-16 14:28:58 +00:00
|
|
|
|
|
|
|
auto& cx = cs.push_next();
|
|
|
|
cx.SetStart( time );
|
2019-08-16 19:20:04 +00:00
|
|
|
cx.SetEnd( -1 );
|
2019-08-19 21:09:58 +00:00
|
|
|
cx.SetThread( m_data.externalThreadCompress.CompressThread( ev.newThread ) );
|
2019-08-16 17:22:23 +00:00
|
|
|
|
2019-08-18 12:56:17 +00:00
|
|
|
CheckExternalName( ev.newThread );
|
2019-08-17 23:50:49 +00:00
|
|
|
|
|
|
|
if( migration )
|
|
|
|
{
|
|
|
|
auto tdit = m_data.cpuThreadData.find( ev.newThread );
|
|
|
|
if( tdit == m_data.cpuThreadData.end() )
|
|
|
|
{
|
|
|
|
tdit = m_data.cpuThreadData.emplace( ev.newThread, CpuThreadData {} ).first;
|
|
|
|
}
|
|
|
|
tdit->second.migrations++;
|
|
|
|
}
|
2019-08-12 22:13:50 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-08-17 15:05:29 +00:00
|
|
|
void Worker::ProcessThreadWakeup( const QueueThreadWakeup& ev )
|
|
|
|
{
|
2022-06-21 23:55:10 +00:00
|
|
|
const auto time = TscTime( RefTime( m_refTimeCtx, ev.time ) );
|
2019-10-24 21:23:52 +00:00
|
|
|
if( m_data.lastTime < time ) m_data.lastTime = time;
|
2019-08-17 15:05:29 +00:00
|
|
|
|
|
|
|
auto it = m_data.ctxSwitch.find( ev.thread );
|
|
|
|
if( it == m_data.ctxSwitch.end() )
|
|
|
|
{
|
|
|
|
auto ctx = m_slab.AllocInit<ContextSwitch>();
|
|
|
|
it = m_data.ctxSwitch.emplace( ev.thread, ctx ).first;
|
|
|
|
}
|
|
|
|
auto& data = it->second->v;
|
2020-02-10 00:26:31 +00:00
|
|
|
if( !data.empty() && !data.back().IsEndValid() ) return; // wakeup of a running thread
|
2019-08-17 15:05:29 +00:00
|
|
|
auto& item = data.push_next();
|
2019-10-30 21:25:46 +00:00
|
|
|
item.SetWakeup( time );
|
2019-08-17 15:05:29 +00:00
|
|
|
item.SetStart( time );
|
|
|
|
item.SetEnd( -1 );
|
|
|
|
item.SetCpu( 0 );
|
|
|
|
item.SetReason( ContextSwitchData::Wakeup );
|
|
|
|
item.SetState( -1 );
|
2021-11-06 19:22:38 +00:00
|
|
|
item.SetThread( 0 );
|
2019-08-17 15:05:29 +00:00
|
|
|
}
|
|
|
|
|
2019-08-17 20:32:41 +00:00
|
|
|
void Worker::ProcessTidToPid( const QueueTidToPid& ev )
|
|
|
|
{
|
2020-01-14 01:06:36 +00:00
|
|
|
if( m_data.tidToPid.find( ev.tid ) == m_data.tidToPid.end() ) m_data.tidToPid.emplace( ev.tid, ev.pid );
|
2019-08-17 20:32:41 +00:00
|
|
|
}
|
|
|
|
|
2021-05-19 00:31:20 +00:00
|
|
|
void Worker::ProcessHwSampleCpuCycle( const QueueHwSample& ev )
|
|
|
|
{
|
2022-06-21 23:30:01 +00:00
|
|
|
const auto time = ev.time == 0 ? 0 : TscTime( ev.time );
|
2021-05-19 21:05:33 +00:00
|
|
|
auto it = m_data.hwSamples.find( ev.ip );
|
|
|
|
if( it == m_data.hwSamples.end() ) it = m_data.hwSamples.emplace( ev.ip, HwSampleData {} ).first;
|
2021-06-04 11:38:45 +00:00
|
|
|
it->second.cycles.push_back( time );
|
2021-05-19 00:31:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void Worker::ProcessHwSampleInstructionRetired( const QueueHwSample& ev )
|
|
|
|
{
|
2022-06-21 23:30:01 +00:00
|
|
|
const auto time = ev.time == 0 ? 0 : TscTime( ev.time );
|
2021-05-19 21:05:33 +00:00
|
|
|
auto it = m_data.hwSamples.find( ev.ip );
|
|
|
|
if( it == m_data.hwSamples.end() ) it = m_data.hwSamples.emplace( ev.ip, HwSampleData {} ).first;
|
2021-06-04 11:38:45 +00:00
|
|
|
it->second.retired.push_back( time );
|
2021-05-19 00:31:20 +00:00
|
|
|
}
|
|
|
|
|
2021-05-20 00:19:11 +00:00
|
|
|
void Worker::ProcessHwSampleCacheReference( const QueueHwSample& ev )
|
|
|
|
{
|
2022-06-21 23:30:01 +00:00
|
|
|
const auto time = ev.time == 0 ? 0 : TscTime( ev.time );
|
2021-05-20 00:19:11 +00:00
|
|
|
auto it = m_data.hwSamples.find( ev.ip );
|
|
|
|
if( it == m_data.hwSamples.end() ) it = m_data.hwSamples.emplace( ev.ip, HwSampleData {} ).first;
|
2021-06-04 11:38:45 +00:00
|
|
|
it->second.cacheRef.push_back( time );
|
2021-05-20 00:19:11 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void Worker::ProcessHwSampleCacheMiss( const QueueHwSample& ev )
|
|
|
|
{
|
2022-06-21 23:30:01 +00:00
|
|
|
const auto time = ev.time == 0 ? 0 : TscTime( ev.time );
|
2021-05-20 00:19:11 +00:00
|
|
|
auto it = m_data.hwSamples.find( ev.ip );
|
|
|
|
if( it == m_data.hwSamples.end() ) it = m_data.hwSamples.emplace( ev.ip, HwSampleData {} ).first;
|
2021-06-04 11:38:45 +00:00
|
|
|
it->second.cacheMiss.push_back( time );
|
2021-05-20 00:19:11 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void Worker::ProcessHwSampleBranchRetired( const QueueHwSample& ev )
|
|
|
|
{
|
2022-06-21 23:30:01 +00:00
|
|
|
const auto time = ev.time == 0 ? 0 : TscTime( ev.time );
|
2021-05-20 00:19:11 +00:00
|
|
|
auto it = m_data.hwSamples.find( ev.ip );
|
|
|
|
if( it == m_data.hwSamples.end() ) it = m_data.hwSamples.emplace( ev.ip, HwSampleData {} ).first;
|
2021-06-04 11:38:45 +00:00
|
|
|
it->second.branchRetired.push_back( time );
|
2022-04-01 16:46:46 +00:00
|
|
|
m_data.hasBranchRetirement = true;
|
2021-05-20 00:19:11 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void Worker::ProcessHwSampleBranchMiss( const QueueHwSample& ev )
|
|
|
|
{
|
2022-06-21 23:30:01 +00:00
|
|
|
const auto time = ev.time == 0 ? 0 : TscTime( ev.time );
|
2021-05-20 00:19:11 +00:00
|
|
|
auto it = m_data.hwSamples.find( ev.ip );
|
|
|
|
if( it == m_data.hwSamples.end() ) it = m_data.hwSamples.emplace( ev.ip, HwSampleData {} ).first;
|
2021-06-04 11:38:45 +00:00
|
|
|
it->second.branchMiss.push_back( time );
|
2021-05-20 00:19:11 +00:00
|
|
|
}
|
|
|
|
|
2019-11-25 22:59:48 +00:00
|
|
|
void Worker::ProcessParamSetup( const QueueParamSetup& ev )
|
|
|
|
{
|
|
|
|
CheckString( ev.name );
|
|
|
|
m_params.push_back( Parameter { ev.idx, StringRef( StringRef::Ptr, ev.name ), bool( ev.isBool ), ev.val } );
|
|
|
|
}
|
|
|
|
|
2019-11-29 21:41:41 +00:00
|
|
|
void Worker::ProcessCpuTopology( const QueueCpuTopology& ev )
|
|
|
|
{
|
|
|
|
auto package = m_data.cpuTopology.find( ev.package );
|
2020-01-28 20:49:36 +00:00
|
|
|
if( package == m_data.cpuTopology.end() ) package = m_data.cpuTopology.emplace( ev.package, unordered_flat_map<uint32_t, std::vector<uint32_t>> {} ).first;
|
2019-11-29 21:41:41 +00:00
|
|
|
auto core = package->second.find( ev.core );
|
|
|
|
if( core == package->second.end() ) core = package->second.emplace( ev.core, std::vector<uint32_t> {} ).first;
|
|
|
|
core->second.emplace_back( ev.thread );
|
2019-11-29 21:46:57 +00:00
|
|
|
|
|
|
|
assert( m_data.cpuTopologyMap.find( ev.thread ) == m_data.cpuTopologyMap.end() );
|
|
|
|
m_data.cpuTopologyMap.emplace( ev.thread, CpuThreadTopology { ev.package, ev.core } );
|
2019-11-29 21:41:41 +00:00
|
|
|
}
|
|
|
|
|
2020-09-23 13:15:39 +00:00
|
|
|
void Worker::ProcessMemNamePayload( const QueueMemNamePayload& ev )
|
|
|
|
{
|
|
|
|
assert( m_memNamePayload == 0 );
|
|
|
|
m_memNamePayload = ev.name;
|
|
|
|
}
|
|
|
|
|
2021-11-02 00:53:10 +00:00
|
|
|
void Worker::ProcessFiberEnter( const QueueFiberEnter& ev )
|
|
|
|
{
|
2022-06-21 23:55:10 +00:00
|
|
|
const auto t = TscTime( RefTime( m_refTimeThread, ev.time ) );
|
2021-11-02 00:53:10 +00:00
|
|
|
if( m_data.lastTime < t ) m_data.lastTime = t;
|
|
|
|
|
|
|
|
uint64_t tid;
|
|
|
|
auto it = m_data.fiberToThreadMap.find( ev.fiber );
|
|
|
|
if( it == m_data.fiberToThreadMap.end() )
|
|
|
|
{
|
|
|
|
tid = ( uint64_t(1) << 32 ) | m_data.fiberToThreadMap.size();
|
|
|
|
m_data.fiberToThreadMap.emplace( ev.fiber, tid );
|
|
|
|
NewThread( tid, true );
|
2021-11-03 17:57:30 +00:00
|
|
|
CheckFiberName( ev.fiber, tid );
|
2021-11-02 00:53:10 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
tid = it->second;
|
|
|
|
}
|
|
|
|
|
2021-11-06 17:55:29 +00:00
|
|
|
auto td = NoticeThread( ev.thread );
|
2021-11-06 20:14:37 +00:00
|
|
|
if( td->fiber )
|
|
|
|
{
|
|
|
|
auto cit = m_data.ctxSwitch.find( td->fiber->id );
|
|
|
|
assert( cit != m_data.ctxSwitch.end() );
|
|
|
|
auto& data = cit->second->v;
|
|
|
|
assert( !data.empty() );
|
|
|
|
auto& item = data.back();
|
|
|
|
item.SetEnd( t );
|
|
|
|
}
|
2021-11-06 17:55:29 +00:00
|
|
|
td->fiber = RetrieveThread( tid );
|
|
|
|
assert( td->fiber );
|
2021-11-06 19:22:38 +00:00
|
|
|
|
|
|
|
auto cit = m_data.ctxSwitch.find( tid );
|
|
|
|
if( cit == m_data.ctxSwitch.end() )
|
|
|
|
{
|
|
|
|
auto ctx = m_slab.AllocInit<ContextSwitch>();
|
|
|
|
cit = m_data.ctxSwitch.emplace( tid, ctx ).first;
|
|
|
|
}
|
|
|
|
auto& data = cit->second->v;
|
|
|
|
auto& item = data.push_next();
|
|
|
|
item.SetStartCpu( t, 0 );
|
|
|
|
item.SetWakeup( t );
|
|
|
|
item.SetEndReasonState( -1, ContextSwitchData::Fiber, -1 );
|
|
|
|
item.SetThread( CompressThread( ev.thread ) );
|
2021-11-02 00:53:10 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void Worker::ProcessFiberLeave( const QueueFiberLeave& ev )
|
|
|
|
{
|
2022-06-21 23:55:10 +00:00
|
|
|
const auto t = TscTime( RefTime( m_refTimeThread, ev.time ) );
|
2021-11-02 00:53:10 +00:00
|
|
|
if( m_data.lastTime < t ) m_data.lastTime = t;
|
|
|
|
|
2021-11-06 17:55:29 +00:00
|
|
|
auto td = RetrieveThread( ev.thread );
|
|
|
|
if( !td->fiber )
|
2021-11-02 00:53:10 +00:00
|
|
|
{
|
|
|
|
FiberLeaveFailure();
|
2021-11-06 19:22:38 +00:00
|
|
|
return;
|
2021-11-02 00:53:10 +00:00
|
|
|
}
|
2021-11-06 19:22:38 +00:00
|
|
|
|
|
|
|
auto cit = m_data.ctxSwitch.find( td->fiber->id );
|
|
|
|
assert( cit != m_data.ctxSwitch.end() );
|
|
|
|
auto& data = cit->second->v;
|
|
|
|
assert( !data.empty() );
|
|
|
|
auto& item = data.back();
|
|
|
|
item.SetEnd( t );
|
|
|
|
|
|
|
|
const auto dt = t - item.Start();
|
|
|
|
cit->second->runningTime += dt;
|
|
|
|
|
|
|
|
td->fiber = nullptr;
|
2021-11-02 00:53:10 +00:00
|
|
|
}
|
|
|
|
|
2020-09-23 13:53:17 +00:00
|
|
|
void Worker::MemAllocChanged( uint64_t memname, MemData& memdata, int64_t time )
|
2018-04-28 13:49:12 +00:00
|
|
|
{
|
2020-09-23 13:53:17 +00:00
|
|
|
const auto val = (double)memdata.usage;
|
|
|
|
if( !memdata.plot )
|
2018-04-28 13:49:12 +00:00
|
|
|
{
|
2020-09-25 14:39:00 +00:00
|
|
|
CreateMemAllocPlot( memdata );
|
2020-09-23 13:53:17 +00:00
|
|
|
memdata.plot->min = val;
|
|
|
|
memdata.plot->max = val;
|
2021-10-17 11:04:56 +00:00
|
|
|
memdata.plot->sum = val;
|
2020-09-23 13:53:17 +00:00
|
|
|
memdata.plot->data.push_back( { time, val } );
|
2018-04-28 13:49:12 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2020-09-23 13:53:17 +00:00
|
|
|
assert( !memdata.plot->data.empty() );
|
|
|
|
assert( memdata.plot->data.back().time.Val() <= time );
|
|
|
|
if( memdata.plot->min > val ) memdata.plot->min = val;
|
|
|
|
else if( memdata.plot->max < val ) memdata.plot->max = val;
|
2021-10-17 11:04:56 +00:00
|
|
|
memdata.plot->sum += val;
|
2021-02-07 14:52:08 +00:00
|
|
|
memdata.plot->data.push_back( { time, val } );
|
2018-04-28 13:49:12 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-09-25 14:39:00 +00:00
|
|
|
void Worker::CreateMemAllocPlot( MemData& memdata )
|
2018-04-28 13:49:12 +00:00
|
|
|
{
|
2020-09-23 13:53:17 +00:00
|
|
|
assert( !memdata.plot );
|
|
|
|
memdata.plot = m_slab.AllocInit<PlotData>();
|
2020-09-25 14:39:00 +00:00
|
|
|
memdata.plot->name = memdata.name;
|
2020-09-23 13:53:17 +00:00
|
|
|
memdata.plot->type = PlotType::Memory;
|
|
|
|
memdata.plot->format = PlotValueFormatting::Memory;
|
|
|
|
memdata.plot->data.push_back( { GetFrameBegin( *m_data.framesBase, 0 ), 0. } );
|
|
|
|
m_data.plots.Data().push_back( memdata.plot );
|
2018-04-01 00:03:34 +00:00
|
|
|
}
|
|
|
|
|
2020-09-25 14:39:00 +00:00
|
|
|
void Worker::ReconstructMemAllocPlot( MemData& mem )
|
2018-04-29 11:40:04 +00:00
|
|
|
{
|
2020-01-20 22:16:33 +00:00
|
|
|
#ifdef NO_PARALLEL_SORT
|
2019-08-15 20:56:55 +00:00
|
|
|
pdqsort_branchless( mem.frees.begin(), mem.frees.end(), [&mem] ( const auto& lhs, const auto& rhs ) { return mem.data[lhs].TimeFree() < mem.data[rhs].TimeFree(); } );
|
2018-05-12 20:41:18 +00:00
|
|
|
#else
|
2019-08-15 20:56:55 +00:00
|
|
|
std::sort( std::execution::par_unseq, mem.frees.begin(), mem.frees.end(), [&mem] ( const auto& lhs, const auto& rhs ) { return mem.data[lhs].TimeFree() < mem.data[rhs].TimeFree(); } );
|
2018-05-12 20:41:18 +00:00
|
|
|
#endif
|
2018-04-29 11:40:04 +00:00
|
|
|
|
2018-05-02 15:59:50 +00:00
|
|
|
const auto psz = mem.data.size() + mem.frees.size() + 1;
|
2018-04-29 11:40:04 +00:00
|
|
|
|
|
|
|
PlotData* plot;
|
|
|
|
{
|
2021-02-07 17:11:24 +00:00
|
|
|
std::lock_guard<std::mutex> lock( m_data.lock );
|
2018-04-29 11:40:04 +00:00
|
|
|
plot = m_slab.AllocInit<PlotData>();
|
|
|
|
}
|
|
|
|
|
2020-09-25 14:39:00 +00:00
|
|
|
plot->name = mem.name;
|
2018-04-29 11:40:04 +00:00
|
|
|
plot->type = PlotType::Memory;
|
2019-11-05 17:02:08 +00:00
|
|
|
plot->format = PlotValueFormatting::Memory;
|
2019-02-15 00:58:23 +00:00
|
|
|
plot->data.reserve_exact( psz, m_slab );
|
2018-04-29 11:40:04 +00:00
|
|
|
|
2018-05-02 15:59:50 +00:00
|
|
|
auto aptr = mem.data.begin();
|
|
|
|
auto aend = mem.data.end();
|
|
|
|
auto fptr = mem.frees.begin();
|
|
|
|
auto fend = mem.frees.end();
|
2018-04-29 11:40:04 +00:00
|
|
|
|
2021-10-17 11:14:23 +00:00
|
|
|
double sum = 0;
|
2018-04-29 11:40:04 +00:00
|
|
|
double max = 0;
|
|
|
|
double usage = 0;
|
|
|
|
|
|
|
|
auto ptr = plot->data.data();
|
2018-08-04 17:47:09 +00:00
|
|
|
ptr->time = GetFrameBegin( *m_data.framesBase, 0 );
|
2018-04-29 11:40:04 +00:00
|
|
|
ptr->val = 0;
|
|
|
|
ptr++;
|
|
|
|
|
2018-04-30 11:44:44 +00:00
|
|
|
if( aptr != aend && fptr != fend )
|
2018-04-29 11:40:04 +00:00
|
|
|
{
|
2019-08-15 20:56:55 +00:00
|
|
|
auto atime = aptr->TimeAlloc();
|
|
|
|
auto ftime = mem.data[*fptr].TimeFree();
|
2018-04-30 11:44:44 +00:00
|
|
|
|
|
|
|
for(;;)
|
2018-04-29 11:40:04 +00:00
|
|
|
{
|
2018-04-30 11:44:44 +00:00
|
|
|
if( atime < ftime )
|
|
|
|
{
|
2019-10-30 21:01:13 +00:00
|
|
|
usage += int64_t( aptr->Size() );
|
2018-04-30 11:44:44 +00:00
|
|
|
assert( usage >= 0 );
|
|
|
|
if( max < usage ) max = usage;
|
2021-10-17 11:14:23 +00:00
|
|
|
sum += usage;
|
2018-04-30 11:44:44 +00:00
|
|
|
ptr->time = atime;
|
|
|
|
ptr->val = usage;
|
|
|
|
ptr++;
|
|
|
|
aptr++;
|
|
|
|
if( aptr == aend ) break;
|
2019-08-15 20:56:55 +00:00
|
|
|
atime = aptr->TimeAlloc();
|
2018-04-30 11:44:44 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2019-10-30 21:01:13 +00:00
|
|
|
usage -= int64_t( mem.data[*fptr].Size() );
|
2018-04-30 11:44:44 +00:00
|
|
|
assert( usage >= 0 );
|
|
|
|
if( max < usage ) max = usage;
|
2021-10-17 11:14:23 +00:00
|
|
|
sum += usage;
|
2018-04-30 11:44:44 +00:00
|
|
|
ptr->time = ftime;
|
|
|
|
ptr->val = usage;
|
|
|
|
ptr++;
|
|
|
|
fptr++;
|
|
|
|
if( fptr == fend ) break;
|
2019-08-15 20:56:55 +00:00
|
|
|
ftime = mem.data[*fptr].TimeFree();
|
2018-04-30 11:44:44 +00:00
|
|
|
}
|
2018-04-29 11:40:04 +00:00
|
|
|
}
|
|
|
|
}
|
2018-04-30 11:44:44 +00:00
|
|
|
|
2018-04-29 11:40:04 +00:00
|
|
|
while( aptr != aend )
|
|
|
|
{
|
2019-08-15 20:56:55 +00:00
|
|
|
assert( aptr->TimeFree() < 0 );
|
|
|
|
int64_t time = aptr->TimeAlloc();
|
2019-10-30 21:01:13 +00:00
|
|
|
usage += int64_t( aptr->Size() );
|
2018-04-29 11:40:04 +00:00
|
|
|
assert( usage >= 0 );
|
|
|
|
if( max < usage ) max = usage;
|
2021-10-17 11:14:23 +00:00
|
|
|
sum += usage;
|
2018-04-29 11:40:04 +00:00
|
|
|
ptr->time = time;
|
|
|
|
ptr->val = usage;
|
|
|
|
ptr++;
|
|
|
|
aptr++;
|
|
|
|
}
|
|
|
|
while( fptr != fend )
|
|
|
|
{
|
2019-01-29 21:10:14 +00:00
|
|
|
const auto& memData = mem.data[*fptr];
|
2019-08-15 20:56:55 +00:00
|
|
|
int64_t time = memData.TimeFree();
|
2019-10-30 21:01:13 +00:00
|
|
|
usage -= int64_t( memData.Size() );
|
2018-04-29 11:40:04 +00:00
|
|
|
assert( usage >= 0 );
|
|
|
|
assert( max >= usage );
|
2021-10-17 11:14:23 +00:00
|
|
|
sum += usage;
|
2018-04-29 11:40:04 +00:00
|
|
|
ptr->time = time;
|
|
|
|
ptr->val = usage;
|
|
|
|
ptr++;
|
|
|
|
fptr++;
|
|
|
|
}
|
|
|
|
|
|
|
|
plot->min = 0;
|
|
|
|
plot->max = max;
|
2021-10-17 11:14:23 +00:00
|
|
|
plot->sum = sum;
|
2018-04-29 11:40:04 +00:00
|
|
|
|
2021-02-07 17:11:24 +00:00
|
|
|
std::lock_guard<std::mutex> lock( m_data.lock );
|
2018-08-04 14:33:03 +00:00
|
|
|
m_data.plots.Data().insert( m_data.plots.Data().begin(), plot );
|
2020-09-23 13:53:17 +00:00
|
|
|
mem.plot = plot;
|
2018-04-29 11:40:04 +00:00
|
|
|
}
|
|
|
|
|
2019-11-04 23:40:41 +00:00
|
|
|
#ifndef TRACY_NO_STATISTICS
|
|
|
|
void Worker::ReconstructContextSwitchUsage()
|
|
|
|
{
|
2019-11-05 00:28:02 +00:00
|
|
|
assert( m_data.cpuDataCount != 0 );
|
|
|
|
const auto cpucnt = m_data.cpuDataCount;
|
|
|
|
|
|
|
|
auto& vec = m_data.ctxUsage;
|
|
|
|
vec.push_back( ContextSwitchUsage( 0, 0, 0 ) );
|
|
|
|
|
|
|
|
struct Cpu
|
|
|
|
{
|
|
|
|
bool startDone;
|
|
|
|
Vector<ContextSwitchCpu>::iterator it;
|
|
|
|
Vector<ContextSwitchCpu>::iterator end;
|
|
|
|
};
|
|
|
|
std::vector<Cpu> cpus;
|
|
|
|
cpus.reserve( cpucnt );
|
|
|
|
for( int i=0; i<cpucnt; i++ )
|
|
|
|
{
|
|
|
|
cpus.emplace_back( Cpu { false, m_data.cpuData[i].cs.begin(), m_data.cpuData[i].cs.end() } );
|
|
|
|
}
|
|
|
|
|
|
|
|
uint8_t other = 0;
|
|
|
|
uint8_t own = 0;
|
|
|
|
for(;;)
|
|
|
|
{
|
|
|
|
int64_t nextTime = std::numeric_limits<int64_t>::max();
|
|
|
|
bool atEnd = true;
|
|
|
|
for( int i=0; i<cpucnt; i++ )
|
|
|
|
{
|
|
|
|
if( cpus[i].it != cpus[i].end )
|
|
|
|
{
|
|
|
|
atEnd = false;
|
|
|
|
const auto ct = !cpus[i].startDone ? cpus[i].it->Start() : cpus[i].it->End();
|
|
|
|
if( ct < nextTime ) nextTime = ct;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if( atEnd ) break;
|
|
|
|
for( int i=0; i<cpucnt; i++ )
|
|
|
|
{
|
|
|
|
while( cpus[i].it != cpus[i].end )
|
|
|
|
{
|
|
|
|
const auto ct = !cpus[i].startDone ? cpus[i].it->Start() : cpus[i].it->End();
|
|
|
|
if( nextTime != ct ) break;
|
|
|
|
const auto isOwn = GetPidFromTid( DecompressThreadExternal( cpus[i].it->Thread() ) ) == m_pid;
|
|
|
|
if( !cpus[i].startDone )
|
|
|
|
{
|
|
|
|
if( isOwn )
|
|
|
|
{
|
|
|
|
own++;
|
|
|
|
assert( own <= cpucnt );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
other++;
|
|
|
|
assert( other <= cpucnt );
|
|
|
|
}
|
2020-02-10 00:26:31 +00:00
|
|
|
if( !cpus[i].it->IsEndValid() )
|
2019-11-05 00:28:02 +00:00
|
|
|
{
|
|
|
|
cpus[i].it++;
|
|
|
|
assert( cpus[i].it = cpus[i].end );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
cpus[i].startDone = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
if( isOwn )
|
|
|
|
{
|
|
|
|
assert( own > 0 );
|
|
|
|
own--;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
assert( other > 0 );
|
|
|
|
other--;
|
|
|
|
}
|
|
|
|
cpus[i].startDone = false;
|
|
|
|
cpus[i].it++;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
const auto& back = vec.back();
|
|
|
|
if( back.Other() != other || back.Own() != own )
|
|
|
|
{
|
|
|
|
vec.push_back( ContextSwitchUsage( nextTime, other, own ) );
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-02-07 17:11:24 +00:00
|
|
|
std::lock_guard<std::mutex> lock( m_data.lock );
|
2019-11-04 23:40:41 +00:00
|
|
|
m_data.ctxUsageReady = true;
|
|
|
|
}
|
2020-02-27 15:48:50 +00:00
|
|
|
|
2020-05-31 12:05:16 +00:00
|
|
|
bool Worker::UpdateSampleStatistics( uint32_t callstack, uint32_t count, bool canPostpone )
|
2020-02-27 15:48:50 +00:00
|
|
|
{
|
|
|
|
const auto& cs = GetCallstack( callstack );
|
|
|
|
const auto cssz = cs.size();
|
|
|
|
|
2020-02-29 12:45:50 +00:00
|
|
|
auto frames = (const CallstackFrameData**)alloca( cssz * sizeof( CallstackFrameData* ) );
|
2020-03-28 17:04:33 +00:00
|
|
|
for( uint16_t i=0; i<cssz; i++ )
|
2020-02-27 15:48:50 +00:00
|
|
|
{
|
2020-02-29 12:45:50 +00:00
|
|
|
auto frame = GetCallstackFrame( cs[i] );
|
|
|
|
if( !frame )
|
|
|
|
{
|
|
|
|
if( canPostpone )
|
|
|
|
{
|
|
|
|
auto it = m_data.postponedSamples.find( callstack );
|
|
|
|
if( it == m_data.postponedSamples.end() )
|
|
|
|
{
|
|
|
|
m_data.postponedSamples.emplace( callstack, count );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
it->second += count;
|
|
|
|
}
|
|
|
|
}
|
2020-05-31 12:05:16 +00:00
|
|
|
return false;
|
2020-02-29 12:45:50 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
frames[i] = frame;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if( canPostpone )
|
|
|
|
{
|
|
|
|
auto it = m_data.postponedSamples.find( callstack );
|
|
|
|
if( it != m_data.postponedSamples.end() )
|
|
|
|
{
|
|
|
|
count += it->second;
|
|
|
|
m_data.postponedSamples.erase( it );
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-02-29 15:24:15 +00:00
|
|
|
UpdateSampleStatisticsImpl( frames, cssz, count, cs );
|
2020-05-31 12:05:16 +00:00
|
|
|
return true;
|
2020-02-29 12:45:50 +00:00
|
|
|
}
|
|
|
|
|
2020-02-29 13:12:04 +00:00
|
|
|
void Worker::UpdateSampleStatisticsPostponed( decltype(Worker::DataBlock::postponedSamples.begin())& it )
|
|
|
|
{
|
|
|
|
const auto& cs = GetCallstack( it->first );
|
|
|
|
const auto cssz = cs.size();
|
|
|
|
|
|
|
|
auto frames = (const CallstackFrameData**)alloca( cssz * sizeof( CallstackFrameData* ) );
|
2020-03-28 17:04:33 +00:00
|
|
|
for( uint16_t i=0; i<cssz; i++ )
|
2020-02-29 13:12:04 +00:00
|
|
|
{
|
|
|
|
auto frame = GetCallstackFrame( cs[i] );
|
|
|
|
if( !frame )
|
|
|
|
{
|
|
|
|
++it;
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
frames[i] = frame;
|
|
|
|
}
|
|
|
|
|
2020-02-29 15:24:15 +00:00
|
|
|
UpdateSampleStatisticsImpl( frames, cssz, it->second, cs );
|
2020-02-29 13:12:04 +00:00
|
|
|
it = m_data.postponedSamples.erase( it );
|
|
|
|
}
|
|
|
|
|
2020-03-28 17:04:33 +00:00
|
|
|
void Worker::UpdateSampleStatisticsImpl( const CallstackFrameData** frames, uint16_t framesCount, uint32_t count, const VarArray<CallstackFrameId>& cs )
|
2020-02-29 12:45:50 +00:00
|
|
|
{
|
2020-02-29 15:24:15 +00:00
|
|
|
const auto fexcl = frames[0];
|
|
|
|
const auto fxsz = fexcl->size;
|
2020-02-29 15:41:22 +00:00
|
|
|
const auto& frame0 = fexcl->data[0];
|
|
|
|
auto sym0 = m_data.symbolStats.find( frame0.symAddr );
|
2021-11-27 16:46:43 +00:00
|
|
|
if( sym0 == m_data.symbolStats.end() ) sym0 = m_data.symbolStats.emplace( frame0.symAddr, SymbolStats { 0, 0 } ).first;
|
2020-02-29 15:41:22 +00:00
|
|
|
sym0->second.excl += count;
|
|
|
|
for( uint8_t f=1; f<fxsz; f++ )
|
2020-02-29 12:45:50 +00:00
|
|
|
{
|
2020-02-29 15:41:22 +00:00
|
|
|
const auto& frame = fexcl->data[f];
|
|
|
|
auto sym = m_data.symbolStats.find( frame.symAddr );
|
2021-11-27 16:46:43 +00:00
|
|
|
if( sym == m_data.symbolStats.end() ) sym = m_data.symbolStats.emplace( frame.symAddr, SymbolStats { 0, 0 } ).first;
|
2020-02-29 15:41:22 +00:00
|
|
|
sym->second.incl += count;
|
2020-02-27 15:48:50 +00:00
|
|
|
}
|
2020-03-28 17:04:33 +00:00
|
|
|
for( uint16_t c=1; c<framesCount; c++ )
|
2020-02-27 15:48:50 +00:00
|
|
|
{
|
2020-02-29 12:45:50 +00:00
|
|
|
const auto fincl = frames[c];
|
|
|
|
const auto fsz = fincl->size;
|
|
|
|
for( uint8_t f=0; f<fsz; f++ )
|
2020-02-27 15:48:50 +00:00
|
|
|
{
|
2020-02-29 12:45:50 +00:00
|
|
|
const auto& frame = fincl->data[f];
|
|
|
|
auto sym = m_data.symbolStats.find( frame.symAddr );
|
2021-11-27 16:46:43 +00:00
|
|
|
if( sym == m_data.symbolStats.end() ) sym = m_data.symbolStats.emplace( frame.symAddr, SymbolStats { 0, 0 } ).first;
|
2020-02-29 12:45:50 +00:00
|
|
|
sym->second.incl += count;
|
2020-02-27 15:48:50 +00:00
|
|
|
}
|
|
|
|
}
|
2020-02-29 15:24:15 +00:00
|
|
|
|
|
|
|
CallstackFrameId parentFrameId;
|
|
|
|
if( fxsz != 1 )
|
|
|
|
{
|
2021-04-08 23:15:18 +00:00
|
|
|
auto cfdata = (CallstackFrame*)alloca( uint8_t( fxsz-1 ) * sizeof( CallstackFrame ) );
|
2020-02-29 15:24:15 +00:00
|
|
|
for( int i=0; i<fxsz-1; i++ )
|
|
|
|
{
|
|
|
|
cfdata[i] = fexcl->data[i+1];
|
|
|
|
}
|
|
|
|
CallstackFrameData cfd;
|
|
|
|
cfd.data = cfdata;
|
|
|
|
cfd.size = fxsz-1;
|
|
|
|
cfd.imageName = fexcl->imageName;
|
|
|
|
|
|
|
|
auto it = m_data.revParentFrameMap.find( &cfd );
|
|
|
|
if( it == m_data.revParentFrameMap.end() )
|
|
|
|
{
|
|
|
|
auto frame = m_slab.Alloc<CallstackFrame>( fxsz-1 );
|
2020-04-14 00:22:14 +00:00
|
|
|
memcpy( frame, cfdata, ( fxsz-1 ) * sizeof( CallstackFrame ) );
|
2020-02-29 15:24:15 +00:00
|
|
|
auto frameData = m_slab.AllocInit<CallstackFrameData>();
|
|
|
|
frameData->data = frame;
|
|
|
|
frameData->size = fxsz - 1;
|
2020-03-01 00:27:21 +00:00
|
|
|
frameData->imageName = fexcl->imageName;
|
2020-02-29 15:24:15 +00:00
|
|
|
parentFrameId.idx = m_callstackParentNextIdx++;
|
|
|
|
parentFrameId.sel = 0;
|
|
|
|
parentFrameId.custom = 1;
|
2020-02-29 18:49:33 +00:00
|
|
|
m_data.parentCallstackFrameMap.emplace( parentFrameId, frameData );
|
2020-02-29 15:24:15 +00:00
|
|
|
m_data.revParentFrameMap.emplace( frameData, parentFrameId );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
parentFrameId = it->second;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-11-27 16:47:09 +00:00
|
|
|
uint32_t parentIdx;
|
2020-02-29 15:24:15 +00:00
|
|
|
{
|
2021-11-27 16:47:09 +00:00
|
|
|
const auto sz = framesCount - ( fxsz == 1 );
|
|
|
|
const auto memsize = sizeof( VarArray<CallstackFrameId> ) + sz * sizeof( CallstackFrameId );
|
|
|
|
auto mem = (char*)m_slab.AllocRaw( memsize );
|
|
|
|
|
|
|
|
auto data = (CallstackFrameId*)mem;
|
|
|
|
auto dst = data;
|
|
|
|
if( fxsz == 1 )
|
2020-02-29 15:24:15 +00:00
|
|
|
{
|
2021-11-27 16:47:09 +00:00
|
|
|
for( int i=0; i<sz; i++ )
|
|
|
|
{
|
|
|
|
*dst++ = cs[i+1];
|
|
|
|
}
|
2020-02-29 15:24:15 +00:00
|
|
|
}
|
2021-11-27 16:47:09 +00:00
|
|
|
else
|
2020-02-29 15:24:15 +00:00
|
|
|
{
|
2021-11-27 16:47:09 +00:00
|
|
|
*dst++ = parentFrameId;
|
|
|
|
for( int i=1; i<sz; i++ )
|
|
|
|
{
|
|
|
|
*dst++ = cs[i];
|
|
|
|
}
|
2020-02-29 15:24:15 +00:00
|
|
|
}
|
|
|
|
|
2021-11-27 16:47:09 +00:00
|
|
|
auto arr = (VarArray<CallstackFrameId>*)( mem + sz * sizeof( CallstackFrameId ) );
|
|
|
|
new(arr) VarArray<CallstackFrameId>( sz, data );
|
2020-02-29 15:24:15 +00:00
|
|
|
|
2021-11-27 16:47:09 +00:00
|
|
|
auto it = m_data.parentCallstackMap.find( arr );
|
|
|
|
if( it == m_data.parentCallstackMap.end() )
|
|
|
|
{
|
|
|
|
parentIdx = m_data.parentCallstackPayload.size();
|
|
|
|
m_data.parentCallstackMap.emplace( arr, parentIdx );
|
|
|
|
m_data.parentCallstackPayload.push_back( arr );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
parentIdx = it->second;
|
|
|
|
m_slab.Unalloc( memsize );
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
sym0 = m_data.symbolStats.find( frame0.symAddr );
|
|
|
|
auto sit = sym0->second.parents.find( parentIdx );
|
|
|
|
if( sit == sym0->second.parents.end() )
|
2020-02-29 15:24:15 +00:00
|
|
|
{
|
2021-11-27 16:47:09 +00:00
|
|
|
sym0->second.parents.emplace( parentIdx, count );
|
2020-02-29 15:24:15 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2021-11-27 16:47:09 +00:00
|
|
|
sit->second += count;
|
2020-02-29 15:24:15 +00:00
|
|
|
}
|
2020-02-29 15:41:22 +00:00
|
|
|
|
2021-11-27 16:47:09 +00:00
|
|
|
uint32_t baseParentIdx;
|
|
|
|
{
|
|
|
|
const auto sz = framesCount - 1;
|
|
|
|
const auto memsize = sizeof( VarArray<CallstackFrameId> ) + sz * sizeof( CallstackFrameId );
|
|
|
|
auto mem = (char*)m_slab.AllocRaw( memsize );
|
|
|
|
|
|
|
|
auto data = (CallstackFrameId*)mem;
|
|
|
|
auto dst = data;
|
|
|
|
for( int i=0; i<sz; i++ )
|
|
|
|
{
|
|
|
|
*dst++ = cs[i+1];
|
|
|
|
}
|
|
|
|
|
|
|
|
auto arr = (VarArray<CallstackFrameId>*)( mem + sz * sizeof( CallstackFrameId ) );
|
|
|
|
new(arr) VarArray<CallstackFrameId>( sz, data );
|
|
|
|
|
|
|
|
auto it = m_data.parentCallstackMap.find( arr );
|
|
|
|
if( it == m_data.parentCallstackMap.end() )
|
|
|
|
{
|
|
|
|
baseParentIdx = m_data.parentCallstackPayload.size();
|
|
|
|
m_data.parentCallstackMap.emplace( arr, baseParentIdx );
|
|
|
|
m_data.parentCallstackPayload.push_back( arr );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
baseParentIdx = it->second;
|
|
|
|
m_slab.Unalloc( memsize );
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
auto bit = sym0->second.baseParents.find( baseParentIdx );
|
|
|
|
if( bit == sym0->second.baseParents.end() )
|
2020-02-29 15:41:22 +00:00
|
|
|
{
|
2021-11-27 16:47:09 +00:00
|
|
|
sym0->second.baseParents.emplace( baseParentIdx, count );
|
2020-02-29 15:41:22 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2021-11-27 16:47:09 +00:00
|
|
|
bit->second += count;
|
2020-02-29 15:41:22 +00:00
|
|
|
}
|
2020-02-27 15:48:50 +00:00
|
|
|
}
|
2019-11-04 23:40:41 +00:00
|
|
|
#endif
|
|
|
|
|
2020-02-12 19:59:36 +00:00
|
|
|
int64_t Worker::ReadTimeline( FileRead& f, ZoneEvent* zone, int64_t refTime, int32_t& childIdx )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2020-01-26 15:18:16 +00:00
|
|
|
uint32_t sz;
|
2018-04-30 23:47:56 +00:00
|
|
|
f.Read( sz );
|
2020-02-12 19:59:36 +00:00
|
|
|
return ReadTimelineHaveSize( f, zone, refTime, childIdx, sz );
|
2020-02-12 01:14:21 +00:00
|
|
|
}
|
|
|
|
|
2020-02-12 19:59:36 +00:00
|
|
|
int64_t Worker::ReadTimelineHaveSize( FileRead& f, ZoneEvent* zone, int64_t refTime, int32_t& childIdx, uint32_t sz )
|
2020-02-12 01:14:21 +00:00
|
|
|
{
|
2018-07-22 14:05:50 +00:00
|
|
|
if( sz == 0 )
|
|
|
|
{
|
2019-09-30 23:05:37 +00:00
|
|
|
zone->SetChild( -1 );
|
2020-02-12 19:59:36 +00:00
|
|
|
return refTime;
|
2018-07-22 14:05:50 +00:00
|
|
|
}
|
|
|
|
else
|
2018-03-15 21:54:10 +00:00
|
|
|
{
|
2019-11-02 11:38:07 +00:00
|
|
|
const auto idx = childIdx;
|
|
|
|
childIdx++;
|
|
|
|
zone->SetChild( idx );
|
2020-02-12 19:59:36 +00:00
|
|
|
return ReadTimeline( f, m_data.zoneChildren[idx], sz, refTime, childIdx );
|
2018-03-15 21:54:10 +00:00
|
|
|
}
|
|
|
|
}
|
2018-02-13 13:57:47 +00:00
|
|
|
|
2019-11-02 11:38:07 +00:00
|
|
|
void Worker::ReadTimeline( FileRead& f, GpuEvent* zone, int64_t& refTime, int64_t& refGpuTime, int32_t& childIdx )
|
2018-03-15 21:54:10 +00:00
|
|
|
{
|
|
|
|
uint64_t sz;
|
2018-04-30 23:47:56 +00:00
|
|
|
f.Read( sz );
|
2020-02-12 01:14:21 +00:00
|
|
|
ReadTimelineHaveSize( f, zone, refTime, refGpuTime, childIdx, sz );
|
|
|
|
}
|
|
|
|
|
|
|
|
void Worker::ReadTimelineHaveSize( FileRead& f, GpuEvent* zone, int64_t& refTime, int64_t& refGpuTime, int32_t& childIdx, uint64_t sz )
|
|
|
|
{
|
2018-07-22 17:47:01 +00:00
|
|
|
if( sz == 0 )
|
2018-03-15 21:54:10 +00:00
|
|
|
{
|
2019-10-30 22:50:37 +00:00
|
|
|
zone->SetChild( -1 );
|
2018-07-22 17:47:01 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2019-11-02 11:38:07 +00:00
|
|
|
const auto idx = childIdx;
|
|
|
|
childIdx++;
|
|
|
|
zone->SetChild( idx );
|
|
|
|
ReadTimeline( f, m_data.gpuChildren[idx], sz, refTime, refGpuTime, childIdx );
|
2018-03-15 21:54:10 +00:00
|
|
|
}
|
|
|
|
}
|
2018-03-15 20:27:36 +00:00
|
|
|
|
2018-06-21 23:30:08 +00:00
|
|
|
#ifndef TRACY_NO_STATISTICS
|
2021-11-14 15:42:11 +00:00
|
|
|
void Worker::ReconstructZoneStatistics( uint8_t* countMap, ZoneEvent& zone, uint16_t thread )
|
2019-11-08 22:53:43 +00:00
|
|
|
{
|
2020-01-23 18:03:03 +00:00
|
|
|
assert( zone.IsEndValid() );
|
|
|
|
auto timeSpan = zone.End() - zone.Start();
|
|
|
|
if( timeSpan > 0 )
|
2018-06-21 23:30:08 +00:00
|
|
|
{
|
2020-01-23 18:03:03 +00:00
|
|
|
auto it = m_data.sourceLocationZones.find( zone.SrcLoc() );
|
|
|
|
assert( it != m_data.sourceLocationZones.end() );
|
2021-02-07 18:35:53 +00:00
|
|
|
ZoneThreadData ztd;
|
2020-01-23 18:03:03 +00:00
|
|
|
ztd.SetZone( &zone );
|
|
|
|
ztd.SetThread( thread );
|
2021-02-07 18:35:53 +00:00
|
|
|
auto& slz = it->second;
|
|
|
|
slz.zones.push_back( ztd );
|
2020-01-23 18:03:03 +00:00
|
|
|
if( slz.min > timeSpan ) slz.min = timeSpan;
|
|
|
|
if( slz.max < timeSpan ) slz.max = timeSpan;
|
|
|
|
slz.total += timeSpan;
|
|
|
|
slz.sumSq += double( timeSpan ) * timeSpan;
|
2021-11-14 15:42:11 +00:00
|
|
|
if( countMap[uint16_t(zone.SrcLoc())] == 0 )
|
2021-06-05 18:28:16 +00:00
|
|
|
{
|
|
|
|
slz.nonReentrantCount++;
|
|
|
|
if( slz.nonReentrantMin > timeSpan ) slz.nonReentrantMin = timeSpan;
|
|
|
|
if( slz.nonReentrantMax < timeSpan ) slz.nonReentrantMax = timeSpan;
|
|
|
|
slz.nonReentrantTotal += timeSpan;
|
|
|
|
}
|
2020-01-24 01:17:38 +00:00
|
|
|
if( zone.HasChildren() )
|
2018-06-21 23:30:08 +00:00
|
|
|
{
|
2020-01-23 18:03:03 +00:00
|
|
|
auto& children = GetZoneChildren( zone.Child() );
|
|
|
|
assert( children.is_magic() );
|
|
|
|
auto& c = *(Vector<ZoneEvent>*)( &children );
|
|
|
|
for( auto& v : c )
|
2018-06-21 23:30:08 +00:00
|
|
|
{
|
2020-01-23 18:03:03 +00:00
|
|
|
const auto childSpan = std::max( int64_t( 0 ), v.End() - v.Start() );
|
|
|
|
timeSpan -= childSpan;
|
2018-06-21 23:30:08 +00:00
|
|
|
}
|
|
|
|
}
|
2020-01-23 18:03:03 +00:00
|
|
|
if( slz.selfMin > timeSpan ) slz.selfMin = timeSpan;
|
|
|
|
if( slz.selfMax < timeSpan ) slz.selfMax = timeSpan;
|
|
|
|
slz.selfTotal += timeSpan;
|
2018-06-21 23:30:08 +00:00
|
|
|
}
|
2019-11-08 22:53:43 +00:00
|
|
|
}
|
2022-01-28 23:48:01 +00:00
|
|
|
|
|
|
|
void Worker::ReconstructZoneStatistics( GpuEvent& zone, uint16_t thread )
|
|
|
|
{
|
|
|
|
assert( zone.GpuEnd() >= 0 );
|
|
|
|
auto timeSpan = zone.GpuEnd() - zone.GpuStart();
|
|
|
|
if( timeSpan > 0 )
|
|
|
|
{
|
|
|
|
auto it = m_data.gpuSourceLocationZones.find( zone.SrcLoc() );
|
|
|
|
if( it == m_data.gpuSourceLocationZones.end() )
|
|
|
|
{
|
|
|
|
it = m_data.gpuSourceLocationZones.emplace( zone.SrcLoc(), GpuSourceLocationZones {} ).first;
|
|
|
|
}
|
|
|
|
GpuZoneThreadData ztd;
|
|
|
|
ztd.SetZone( &zone );
|
|
|
|
ztd.SetThread( thread );
|
|
|
|
auto& slz = it->second;
|
|
|
|
slz.zones.push_back( ztd );
|
|
|
|
if( slz.min > timeSpan ) slz.min = timeSpan;
|
|
|
|
if( slz.max < timeSpan ) slz.max = timeSpan;
|
|
|
|
slz.total += timeSpan;
|
|
|
|
slz.sumSq += double( timeSpan ) * timeSpan;
|
|
|
|
}
|
|
|
|
}
|
2018-07-29 12:16:13 +00:00
|
|
|
#else
|
2019-11-08 22:53:43 +00:00
|
|
|
void Worker::CountZoneStatistics( ZoneEvent* zone )
|
|
|
|
{
|
2019-11-08 22:59:20 +00:00
|
|
|
auto cnt = GetSourceLocationZonesCnt( zone->SrcLoc() );
|
|
|
|
(*cnt)++;
|
2018-06-21 23:30:08 +00:00
|
|
|
}
|
2022-01-23 14:47:03 +00:00
|
|
|
|
|
|
|
void Worker::CountZoneStatistics( GpuEvent* zone )
|
|
|
|
{
|
|
|
|
auto cnt = GetGpuSourceLocationZonesCnt( zone->SrcLoc() );
|
|
|
|
(*cnt)++;
|
|
|
|
}
|
2019-11-08 22:53:43 +00:00
|
|
|
#endif
|
2018-06-21 23:30:08 +00:00
|
|
|
|
2020-02-12 19:59:36 +00:00
|
|
|
int64_t Worker::ReadTimeline( FileRead& f, Vector<short_ptr<ZoneEvent>>& _vec, uint32_t size, int64_t refTime, int32_t& childIdx )
|
2018-03-15 21:54:10 +00:00
|
|
|
{
|
|
|
|
assert( size != 0 );
|
2020-02-12 19:03:14 +00:00
|
|
|
const auto lp = s_loadProgress.subProgress.load( std::memory_order_relaxed );
|
|
|
|
s_loadProgress.subProgress.store( lp + size, std::memory_order_relaxed );
|
2019-11-09 22:34:05 +00:00
|
|
|
auto& vec = *(Vector<ZoneEvent>*)( &_vec );
|
|
|
|
vec.set_magic();
|
2019-02-15 00:58:23 +00:00
|
|
|
vec.reserve_exact( size, m_slab );
|
2019-11-09 22:34:05 +00:00
|
|
|
auto zone = vec.begin();
|
2020-02-13 17:11:54 +00:00
|
|
|
auto end = vec.end() - 1;
|
|
|
|
|
|
|
|
int16_t srcloc;
|
|
|
|
int64_t tstart, tend;
|
|
|
|
uint32_t childSz, extra;
|
|
|
|
f.Read4( srcloc, tstart, extra, childSz );
|
|
|
|
|
|
|
|
while( zone != end )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2020-02-11 23:36:42 +00:00
|
|
|
refTime += tstart;
|
2020-02-12 19:16:14 +00:00
|
|
|
zone->SetStartSrcLoc( refTime, srcloc );
|
2020-02-13 17:11:54 +00:00
|
|
|
zone->extra = extra;
|
2020-02-12 19:59:36 +00:00
|
|
|
refTime = ReadTimelineHaveSize( f, zone, refTime, childIdx, childSz );
|
2020-02-13 17:11:54 +00:00
|
|
|
f.Read5( tend, srcloc, tstart, extra, childSz );
|
2020-02-12 19:59:36 +00:00
|
|
|
refTime += tend;
|
|
|
|
zone->SetEnd( refTime );
|
2019-03-13 00:46:05 +00:00
|
|
|
#ifdef TRACY_NO_STATISTICS
|
2019-11-08 22:53:43 +00:00
|
|
|
CountZoneStatistics( zone );
|
2019-03-13 00:46:05 +00:00
|
|
|
#endif
|
2020-02-13 17:11:54 +00:00
|
|
|
zone++;
|
2018-06-21 23:30:08 +00:00
|
|
|
}
|
2020-02-13 17:11:54 +00:00
|
|
|
|
|
|
|
refTime += tstart;
|
|
|
|
zone->SetStartSrcLoc( refTime, srcloc );
|
|
|
|
zone->extra = extra;
|
|
|
|
refTime = ReadTimelineHaveSize( f, zone, refTime, childIdx, childSz );
|
|
|
|
f.Read( tend );
|
|
|
|
refTime += tend;
|
|
|
|
zone->SetEnd( refTime );
|
|
|
|
#ifdef TRACY_NO_STATISTICS
|
|
|
|
CountZoneStatistics( zone );
|
|
|
|
#endif
|
|
|
|
|
2020-02-12 19:59:36 +00:00
|
|
|
return refTime;
|
2018-06-21 23:30:08 +00:00
|
|
|
}
|
2018-03-18 22:37:07 +00:00
|
|
|
|
2019-11-10 00:36:13 +00:00
|
|
|
void Worker::ReadTimeline( FileRead& f, Vector<short_ptr<GpuEvent>>& _vec, uint64_t size, int64_t& refTime, int64_t& refGpuTime, int32_t& childIdx )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2018-03-15 21:54:10 +00:00
|
|
|
assert( size != 0 );
|
2020-02-12 19:03:14 +00:00
|
|
|
const auto lp = s_loadProgress.subProgress.load( std::memory_order_relaxed );
|
|
|
|
s_loadProgress.subProgress.store( lp + size, std::memory_order_relaxed );
|
2019-11-10 00:36:13 +00:00
|
|
|
auto& vec = *(Vector<GpuEvent>*)( &_vec );
|
|
|
|
vec.set_magic();
|
2019-02-15 00:58:23 +00:00
|
|
|
vec.reserve_exact( size, m_slab );
|
2019-11-10 00:36:13 +00:00
|
|
|
auto zone = vec.begin();
|
|
|
|
auto end = vec.end();
|
2019-02-16 19:53:07 +00:00
|
|
|
do
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2019-10-30 22:50:37 +00:00
|
|
|
int64_t tcpu, tgpu;
|
2019-10-13 12:36:59 +00:00
|
|
|
int16_t srcloc;
|
|
|
|
uint16_t thread;
|
2020-02-12 01:14:21 +00:00
|
|
|
uint64_t childSz;
|
|
|
|
f.Read6( tcpu, tgpu, srcloc, zone->callstack, thread, childSz );
|
2020-02-11 23:34:09 +00:00
|
|
|
zone->SetSrcLoc( srcloc );
|
2019-10-13 12:36:59 +00:00
|
|
|
zone->SetThread( thread );
|
2019-10-30 22:50:37 +00:00
|
|
|
refTime += tcpu;
|
|
|
|
refGpuTime += tgpu;
|
2019-10-13 12:36:59 +00:00
|
|
|
zone->SetCpuStart( refTime );
|
2019-10-30 22:50:37 +00:00
|
|
|
zone->SetGpuStart( refGpuTime );
|
2018-12-30 22:06:03 +00:00
|
|
|
|
2020-02-12 01:14:21 +00:00
|
|
|
ReadTimelineHaveSize( f, zone, refTime, refGpuTime, childIdx, childSz );
|
2018-12-30 22:06:03 +00:00
|
|
|
|
2019-10-30 22:50:37 +00:00
|
|
|
f.Read2( tcpu, tgpu );
|
|
|
|
refTime += tcpu;
|
|
|
|
refGpuTime += tgpu;
|
|
|
|
zone->SetCpuEnd( refTime );
|
|
|
|
zone->SetGpuEnd( refGpuTime );
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
2019-11-10 00:36:13 +00:00
|
|
|
while( ++zone != end );
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
2019-08-01 21:14:09 +00:00
|
|
|
void Worker::Disconnect()
|
|
|
|
{
|
2021-12-11 12:13:57 +00:00
|
|
|
//Query( ServerQueryDisconnect, 0 );
|
|
|
|
Shutdown();
|
2019-08-01 21:14:09 +00:00
|
|
|
m_disconnect = true;
|
|
|
|
}
|
|
|
|
|
2021-06-04 11:38:45 +00:00
|
|
|
static void WriteHwSampleVec( FileWrite& f, SortedVector<Int48, Int48Sort>& vec )
|
|
|
|
{
|
|
|
|
uint64_t sz = vec.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
if( sz != 0 )
|
|
|
|
{
|
|
|
|
if( !vec.is_sorted() ) vec.sort();
|
|
|
|
int64_t refTime = 0;
|
|
|
|
for( auto& v : vec )
|
|
|
|
{
|
|
|
|
WriteTimeOffset( f, refTime, v.Val() );
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-05-15 13:50:20 +00:00
|
|
|
void Worker::Write( FileWrite& f, bool fiDict )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2021-11-14 12:01:27 +00:00
|
|
|
DoPostponedWorkAll();
|
2021-02-07 17:29:29 +00:00
|
|
|
|
2018-04-21 11:45:48 +00:00
|
|
|
f.Write( FileHeader, sizeof( FileHeader ) );
|
|
|
|
|
2018-02-13 13:57:47 +00:00
|
|
|
f.Write( &m_delay, sizeof( m_delay ) );
|
|
|
|
f.Write( &m_resolution, sizeof( m_resolution ) );
|
|
|
|
f.Write( &m_timerMul, sizeof( m_timerMul ) );
|
|
|
|
f.Write( &m_data.lastTime, sizeof( m_data.lastTime ) );
|
2018-07-10 20:56:41 +00:00
|
|
|
f.Write( &m_data.frameOffset, sizeof( m_data.frameOffset ) );
|
2019-08-17 20:19:04 +00:00
|
|
|
f.Write( &m_pid, sizeof( m_pid ) );
|
2020-02-25 22:46:16 +00:00
|
|
|
f.Write( &m_samplingPeriod, sizeof( m_samplingPeriod ) );
|
2020-03-25 20:48:24 +00:00
|
|
|
f.Write( &m_data.cpuArch, sizeof( m_data.cpuArch ) );
|
2020-05-06 16:59:54 +00:00
|
|
|
f.Write( &m_data.cpuId, sizeof( m_data.cpuId ) );
|
|
|
|
f.Write( m_data.cpuManufacturer, 12 );
|
2018-02-13 13:57:47 +00:00
|
|
|
|
|
|
|
uint64_t sz = m_captureName.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
f.Write( m_captureName.c_str(), sz );
|
|
|
|
|
2018-08-28 22:57:11 +00:00
|
|
|
sz = m_captureProgram.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
f.Write( m_captureProgram.c_str(), sz );
|
|
|
|
|
|
|
|
f.Write( &m_captureTime, sizeof( m_captureTime ) );
|
2021-01-31 16:51:16 +00:00
|
|
|
f.Write( &m_executableTime, sizeof( m_executableTime ) );
|
2018-08-28 22:57:11 +00:00
|
|
|
|
2018-08-19 16:28:48 +00:00
|
|
|
sz = m_hostInfo.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
f.Write( m_hostInfo.c_str(), sz );
|
|
|
|
|
2019-11-29 21:41:41 +00:00
|
|
|
sz = m_data.cpuTopology.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
for( auto& package : m_data.cpuTopology )
|
|
|
|
{
|
|
|
|
sz = package.second.size();
|
|
|
|
f.Write( &package.first, sizeof( package.first ) );
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
for( auto& core : package.second )
|
|
|
|
{
|
|
|
|
sz = core.second.size();
|
|
|
|
f.Write( &core.first, sizeof( core.first ) );
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
for( auto& thread : core.second )
|
|
|
|
{
|
|
|
|
f.Write( &thread, sizeof( thread ) );
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-03-26 20:41:44 +00:00
|
|
|
f.Write( &m_data.crashEvent, sizeof( m_data.crashEvent ) );
|
2018-08-20 00:27:24 +00:00
|
|
|
|
2018-08-04 17:47:09 +00:00
|
|
|
sz = m_data.frames.Data().size();
|
2018-02-13 13:57:47 +00:00
|
|
|
f.Write( &sz, sizeof( sz ) );
|
2018-08-04 17:47:09 +00:00
|
|
|
for( auto& fd : m_data.frames.Data() )
|
|
|
|
{
|
2018-12-30 22:06:03 +00:00
|
|
|
int64_t refTime = 0;
|
2018-08-04 17:47:09 +00:00
|
|
|
f.Write( &fd->name, sizeof( fd->name ) );
|
2018-08-05 00:09:59 +00:00
|
|
|
f.Write( &fd->continuous, sizeof( fd->continuous ) );
|
2018-08-04 17:47:09 +00:00
|
|
|
sz = fd->frames.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
2018-08-05 00:09:59 +00:00
|
|
|
if( fd->continuous )
|
|
|
|
{
|
|
|
|
for( auto& fe : fd->frames )
|
|
|
|
{
|
2018-12-30 22:06:03 +00:00
|
|
|
WriteTimeOffset( f, refTime, fe.start );
|
2019-06-06 21:08:19 +00:00
|
|
|
f.Write( &fe.frameImage, sizeof( fe.frameImage ) );
|
2018-08-05 00:09:59 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2018-12-30 22:06:03 +00:00
|
|
|
for( auto& fe : fd->frames )
|
|
|
|
{
|
|
|
|
WriteTimeOffset( f, refTime, fe.start );
|
|
|
|
WriteTimeOffset( f, refTime, fe.end );
|
2019-06-06 21:08:19 +00:00
|
|
|
f.Write( &fe.frameImage, sizeof( fe.frameImage ) );
|
2018-12-30 22:06:03 +00:00
|
|
|
}
|
2018-08-05 00:09:59 +00:00
|
|
|
}
|
2018-08-04 17:47:09 +00:00
|
|
|
}
|
2018-02-13 13:57:47 +00:00
|
|
|
|
|
|
|
sz = m_data.stringData.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
for( auto& v : m_data.stringData )
|
|
|
|
{
|
|
|
|
uint64_t ptr = (uint64_t)v;
|
|
|
|
f.Write( &ptr, sizeof( ptr ) );
|
|
|
|
sz = strlen( v );
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
f.Write( v, sz );
|
|
|
|
}
|
|
|
|
|
|
|
|
sz = m_data.strings.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
for( auto& v : m_data.strings )
|
|
|
|
{
|
|
|
|
f.Write( &v.first, sizeof( v.first ) );
|
|
|
|
uint64_t ptr = (uint64_t)v.second;
|
|
|
|
f.Write( &ptr, sizeof( ptr ) );
|
|
|
|
}
|
|
|
|
|
|
|
|
sz = m_data.threadNames.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
for( auto& v : m_data.threadNames )
|
|
|
|
{
|
|
|
|
f.Write( &v.first, sizeof( v.first ) );
|
|
|
|
uint64_t ptr = (uint64_t)v.second;
|
|
|
|
f.Write( &ptr, sizeof( ptr ) );
|
|
|
|
}
|
|
|
|
|
2019-08-16 17:24:38 +00:00
|
|
|
sz = m_data.externalNames.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
for( auto& v : m_data.externalNames )
|
|
|
|
{
|
|
|
|
f.Write( &v.first, sizeof( v.first ) );
|
2019-08-16 17:49:16 +00:00
|
|
|
uint64_t ptr = (uint64_t)v.second.first;
|
|
|
|
f.Write( &ptr, sizeof( ptr ) );
|
|
|
|
ptr = (uint64_t)v.second.second;
|
2019-08-16 17:24:38 +00:00
|
|
|
f.Write( &ptr, sizeof( ptr ) );
|
|
|
|
}
|
|
|
|
|
2019-08-19 20:56:02 +00:00
|
|
|
m_data.localThreadCompress.Save( f );
|
2019-08-19 21:09:58 +00:00
|
|
|
m_data.externalThreadCompress.Save( f );
|
2018-07-29 13:19:44 +00:00
|
|
|
|
2018-02-13 13:57:47 +00:00
|
|
|
sz = m_data.sourceLocation.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
for( auto& v : m_data.sourceLocation )
|
|
|
|
{
|
|
|
|
f.Write( &v.first, sizeof( v.first ) );
|
2019-11-01 19:17:25 +00:00
|
|
|
f.Write( &v.second, sizeof( SourceLocationBase ) );
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
sz = m_data.sourceLocationExpand.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
for( auto& v : m_data.sourceLocationExpand )
|
|
|
|
{
|
|
|
|
f.Write( &v, sizeof( v ) );
|
|
|
|
}
|
|
|
|
|
|
|
|
sz = m_data.sourceLocationPayload.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
for( auto& v : m_data.sourceLocationPayload )
|
|
|
|
{
|
2019-11-02 13:28:59 +00:00
|
|
|
f.Write( v, sizeof( SourceLocationBase ) );
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
2018-07-29 12:58:01 +00:00
|
|
|
#ifndef TRACY_NO_STATISTICS
|
|
|
|
sz = m_data.sourceLocationZones.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
for( auto& v : m_data.sourceLocationZones )
|
|
|
|
{
|
2019-08-15 15:42:26 +00:00
|
|
|
int16_t id = v.first;
|
2018-07-29 12:58:01 +00:00
|
|
|
uint64_t cnt = v.second.zones.size();
|
|
|
|
f.Write( &id, sizeof( id ) );
|
|
|
|
f.Write( &cnt, sizeof( cnt ) );
|
|
|
|
}
|
2022-01-29 14:14:43 +00:00
|
|
|
|
|
|
|
sz = m_data.gpuSourceLocationZones.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
for( auto& v : m_data.gpuSourceLocationZones )
|
|
|
|
{
|
|
|
|
int16_t id = v.first;
|
|
|
|
uint64_t cnt = v.second.zones.size();
|
|
|
|
f.Write( &id, sizeof( id ) );
|
|
|
|
f.Write( &cnt, sizeof( cnt ) );
|
|
|
|
}
|
2018-07-29 12:58:01 +00:00
|
|
|
#else
|
|
|
|
sz = m_data.sourceLocationZonesCnt.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
for( auto& v : m_data.sourceLocationZonesCnt )
|
|
|
|
{
|
2019-08-15 15:42:26 +00:00
|
|
|
int16_t id = v.first;
|
2018-07-29 12:58:01 +00:00
|
|
|
uint64_t cnt = v.second;
|
|
|
|
f.Write( &id, sizeof( id ) );
|
|
|
|
f.Write( &cnt, sizeof( cnt ) );
|
|
|
|
}
|
2022-01-29 14:14:43 +00:00
|
|
|
|
|
|
|
sz = m_data.gpuSourceLocationZonesCnt.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
for( auto& v : m_data.gpuSourceLocationZonesCnt )
|
|
|
|
{
|
|
|
|
int16_t id = v.first;
|
|
|
|
uint64_t cnt = v.second;
|
|
|
|
f.Write( &id, sizeof( id ) );
|
|
|
|
f.Write( &cnt, sizeof( cnt ) );
|
|
|
|
}
|
2018-07-29 12:58:01 +00:00
|
|
|
#endif
|
|
|
|
|
2018-02-13 13:57:47 +00:00
|
|
|
sz = m_data.lockMap.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
for( auto& v : m_data.lockMap )
|
|
|
|
{
|
|
|
|
f.Write( &v.first, sizeof( v.first ) );
|
2020-03-08 12:47:38 +00:00
|
|
|
f.Write( &v.second->customName, sizeof( v.second->customName ) );
|
2019-03-16 01:09:50 +00:00
|
|
|
f.Write( &v.second->srcloc, sizeof( v.second->srcloc ) );
|
|
|
|
f.Write( &v.second->type, sizeof( v.second->type ) );
|
|
|
|
f.Write( &v.second->valid, sizeof( v.second->valid ) );
|
|
|
|
f.Write( &v.second->timeAnnounce, sizeof( v.second->timeAnnounce ) );
|
|
|
|
f.Write( &v.second->timeTerminate, sizeof( v.second->timeTerminate ) );
|
|
|
|
sz = v.second->threadList.size();
|
2018-02-13 13:57:47 +00:00
|
|
|
f.Write( &sz, sizeof( sz ) );
|
2019-03-16 01:09:50 +00:00
|
|
|
for( auto& t : v.second->threadList )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
|
|
|
f.Write( &t, sizeof( t ) );
|
|
|
|
}
|
2019-03-16 01:09:50 +00:00
|
|
|
int64_t refTime = v.second->timeAnnounce;
|
|
|
|
sz = v.second->timeline.size();
|
2018-02-13 13:57:47 +00:00
|
|
|
f.Write( &sz, sizeof( sz ) );
|
2019-03-16 01:09:50 +00:00
|
|
|
for( auto& lev : v.second->timeline )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2019-08-15 18:39:16 +00:00
|
|
|
WriteTimeOffset( f, refTime, lev.ptr->Time() );
|
|
|
|
const int16_t srcloc = lev.ptr->SrcLoc();
|
|
|
|
f.Write( &srcloc, sizeof( srcloc ) );
|
2019-03-16 13:18:43 +00:00
|
|
|
f.Write( &lev.ptr->thread, sizeof( lev.ptr->thread ) );
|
|
|
|
f.Write( &lev.ptr->type, sizeof( lev.ptr->type ) );
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
{
|
2018-12-30 22:06:03 +00:00
|
|
|
int64_t refTime = 0;
|
|
|
|
sz = m_data.messages.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
for( auto& v : m_data.messages )
|
|
|
|
{
|
2019-11-02 15:32:42 +00:00
|
|
|
const auto ptr = (uint64_t)(MessageData*)v;
|
2018-12-30 22:06:03 +00:00
|
|
|
f.Write( &ptr, sizeof( ptr ) );
|
|
|
|
WriteTimeOffset( f, refTime, v->time );
|
|
|
|
f.Write( &v->ref, sizeof( v->ref ) );
|
2019-05-10 18:32:47 +00:00
|
|
|
f.Write( &v->color, sizeof( v->color ) );
|
2019-11-14 23:42:44 +00:00
|
|
|
f.Write( &v->callstack, sizeof( v->callstack ) );
|
2018-12-30 22:06:03 +00:00
|
|
|
}
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
2020-01-26 14:57:55 +00:00
|
|
|
sz = m_data.zoneExtra.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
f.Write( m_data.zoneExtra.data(), sz * sizeof( ZoneExtra ) );
|
|
|
|
|
2019-03-27 00:46:54 +00:00
|
|
|
sz = 0;
|
|
|
|
for( auto& v : m_data.threads ) sz += v->count;
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
2019-11-02 11:38:07 +00:00
|
|
|
sz = m_data.zoneChildren.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
2018-02-13 13:57:47 +00:00
|
|
|
sz = m_data.threads.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
for( auto& thread : m_data.threads )
|
|
|
|
{
|
2018-12-30 22:06:03 +00:00
|
|
|
int64_t refTime = 0;
|
2018-02-13 13:57:47 +00:00
|
|
|
f.Write( &thread->id, sizeof( thread->id ) );
|
|
|
|
f.Write( &thread->count, sizeof( thread->count ) );
|
2021-06-16 23:47:19 +00:00
|
|
|
f.Write( &thread->kernelSampleCnt, sizeof( thread->kernelSampleCnt ) );
|
2021-11-02 00:46:29 +00:00
|
|
|
f.Write( &thread->isFiber, sizeof( thread->isFiber ) );
|
2018-12-30 22:06:03 +00:00
|
|
|
WriteTimeline( f, thread->timeline, refTime );
|
2018-02-13 13:57:47 +00:00
|
|
|
sz = thread->messages.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
for( auto& v : thread->messages )
|
|
|
|
{
|
2019-11-02 15:17:20 +00:00
|
|
|
auto ptr = uint64_t( (MessageData*)v );
|
2018-02-13 13:57:47 +00:00
|
|
|
f.Write( &ptr, sizeof( ptr ) );
|
|
|
|
}
|
2021-12-21 14:24:11 +00:00
|
|
|
sz = thread->ctxSwitchSamples.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
refTime = 0;
|
|
|
|
for( auto& v : thread->ctxSwitchSamples )
|
|
|
|
{
|
|
|
|
WriteTimeOffset( f, refTime, v.time.Val() );
|
|
|
|
f.Write( &v.callstack, sizeof( v.callstack ) );
|
|
|
|
}
|
2021-11-13 02:23:43 +00:00
|
|
|
if( m_inconsistentSamples )
|
|
|
|
{
|
|
|
|
#ifdef NO_PARALLEL_SORT
|
|
|
|
pdqsort_branchless( thread->samples.begin(), thread->samples.end(), [] ( const auto& lhs, const auto& rhs ) { return lhs.time.Val() < rhs.time.Val(); } );
|
|
|
|
#else
|
|
|
|
std::sort( std::execution::par_unseq, thread->samples.begin(), thread->samples.end(), [] ( const auto& lhs, const auto& rhs ) { return lhs.time.Val() < rhs.time.Val(); } );
|
|
|
|
#endif
|
|
|
|
}
|
2020-02-22 16:13:53 +00:00
|
|
|
sz = thread->samples.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
refTime = 0;
|
|
|
|
for( auto& v : thread->samples )
|
|
|
|
{
|
|
|
|
WriteTimeOffset( f, refTime, v.time.Val() );
|
|
|
|
f.Write( &v.callstack, sizeof( v.callstack ) );
|
|
|
|
}
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
2019-03-27 00:46:54 +00:00
|
|
|
sz = 0;
|
|
|
|
for( auto& v : m_data.gpuData ) sz += v->count;
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
2019-11-02 11:38:07 +00:00
|
|
|
sz = m_data.gpuChildren.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
2018-02-13 13:57:47 +00:00
|
|
|
sz = m_data.gpuData.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
for( auto& ctx : m_data.gpuData )
|
|
|
|
{
|
|
|
|
f.Write( &ctx->thread, sizeof( ctx->thread ) );
|
2020-07-07 19:19:33 +00:00
|
|
|
uint8_t calibration = ctx->hasCalibration;
|
|
|
|
f.Write( &calibration, sizeof( calibration ) );
|
2018-02-13 13:57:47 +00:00
|
|
|
f.Write( &ctx->count, sizeof( ctx->count ) );
|
2018-06-17 16:27:16 +00:00
|
|
|
f.Write( &ctx->period, sizeof( ctx->period ) );
|
2020-05-27 16:16:53 +00:00
|
|
|
f.Write( &ctx->type, sizeof( ctx->type ) );
|
2021-01-31 17:56:12 +00:00
|
|
|
f.Write( &ctx->name, sizeof( ctx->name ) );
|
2021-06-09 19:12:50 +00:00
|
|
|
f.Write( &ctx->overflow, sizeof( ctx->overflow ) );
|
2019-09-23 15:27:49 +00:00
|
|
|
sz = ctx->threadData.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
for( auto& td : ctx->threadData )
|
|
|
|
{
|
|
|
|
int64_t refTime = 0;
|
|
|
|
int64_t refGpuTime = 0;
|
|
|
|
uint64_t tid = td.first;
|
|
|
|
f.Write( &tid, sizeof( tid ) );
|
|
|
|
WriteTimeline( f, td.second.timeline, refTime, refGpuTime );
|
|
|
|
}
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
2018-08-04 14:33:03 +00:00
|
|
|
sz = m_data.plots.Data().size();
|
2019-02-21 21:53:26 +00:00
|
|
|
for( auto& plot : m_data.plots.Data() ) { if( plot->type == PlotType::Memory ) sz--; }
|
2018-02-13 13:57:47 +00:00
|
|
|
f.Write( &sz, sizeof( sz ) );
|
2018-08-04 14:33:03 +00:00
|
|
|
for( auto& plot : m_data.plots.Data() )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2019-02-21 21:53:26 +00:00
|
|
|
if( plot->type == PlotType::Memory ) continue;
|
|
|
|
f.Write( &plot->type, sizeof( plot->type ) );
|
2019-11-05 17:02:08 +00:00
|
|
|
f.Write( &plot->format, sizeof( plot->format ) );
|
2018-02-13 13:57:47 +00:00
|
|
|
f.Write( &plot->name, sizeof( plot->name ) );
|
|
|
|
f.Write( &plot->min, sizeof( plot->min ) );
|
|
|
|
f.Write( &plot->max, sizeof( plot->max ) );
|
2021-10-17 11:14:44 +00:00
|
|
|
f.Write( &plot->sum, sizeof( plot->sum ) );
|
2018-12-30 22:06:03 +00:00
|
|
|
int64_t refTime = 0;
|
2018-02-13 13:57:47 +00:00
|
|
|
sz = plot->data.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
2018-12-30 22:06:03 +00:00
|
|
|
for( auto& v : plot->data )
|
|
|
|
{
|
2019-11-03 13:50:11 +00:00
|
|
|
WriteTimeOffset( f, refTime, v.time.Val() );
|
2018-12-30 22:06:03 +00:00
|
|
|
f.Write( &v.val, sizeof( v.val ) );
|
|
|
|
}
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
2018-04-02 00:05:16 +00:00
|
|
|
|
2020-09-25 14:39:00 +00:00
|
|
|
sz = m_data.memNameMap.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
sz = 0;
|
|
|
|
for( auto& memory : m_data.memNameMap )
|
|
|
|
{
|
|
|
|
sz += memory.second->data.size();
|
|
|
|
}
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
for( auto& memory : m_data.memNameMap )
|
2018-04-02 00:05:16 +00:00
|
|
|
{
|
2020-09-25 14:39:00 +00:00
|
|
|
uint64_t name = memory.first;
|
|
|
|
f.Write( &name, sizeof( name ) );
|
|
|
|
|
|
|
|
auto& memdata = *memory.second;
|
2018-12-30 22:06:03 +00:00
|
|
|
int64_t refTime = 0;
|
2020-09-23 13:53:17 +00:00
|
|
|
sz = memdata.data.size();
|
2018-12-30 22:06:03 +00:00
|
|
|
f.Write( &sz, sizeof( sz ) );
|
2020-09-23 13:53:17 +00:00
|
|
|
sz = memdata.active.size();
|
2018-12-30 22:06:03 +00:00
|
|
|
f.Write( &sz, sizeof( sz ) );
|
2020-09-23 13:53:17 +00:00
|
|
|
sz = memdata.frees.size();
|
2018-12-30 22:06:03 +00:00
|
|
|
f.Write( &sz, sizeof( sz ) );
|
2020-09-23 13:53:17 +00:00
|
|
|
for( auto& mem : memdata.data )
|
2018-12-30 22:06:03 +00:00
|
|
|
{
|
2019-10-30 21:01:13 +00:00
|
|
|
const auto ptr = mem.Ptr();
|
|
|
|
const auto size = mem.Size();
|
|
|
|
const Int24 csAlloc = mem.CsAlloc();
|
|
|
|
f.Write( &ptr, sizeof( ptr ) );
|
|
|
|
f.Write( &size, sizeof( size ) );
|
|
|
|
f.Write( &csAlloc, sizeof( csAlloc ) );
|
2018-12-30 22:06:03 +00:00
|
|
|
f.Write( &mem.csFree, sizeof( mem.csFree ) );
|
2019-08-15 20:56:55 +00:00
|
|
|
|
|
|
|
int64_t timeAlloc = mem.TimeAlloc();
|
|
|
|
uint16_t threadAlloc = mem.ThreadAlloc();
|
|
|
|
int64_t timeFree = mem.TimeFree();
|
|
|
|
uint16_t threadFree = mem.ThreadFree();
|
|
|
|
WriteTimeOffset( f, refTime, timeAlloc );
|
|
|
|
int64_t freeOffset = timeFree < 0 ? timeFree : timeFree - timeAlloc;
|
|
|
|
f.Write( &freeOffset, sizeof( freeOffset ) );
|
|
|
|
f.Write( &threadAlloc, sizeof( threadAlloc ) );
|
|
|
|
f.Write( &threadFree, sizeof( threadFree ) );
|
2018-12-30 22:06:03 +00:00
|
|
|
}
|
2020-09-23 13:53:17 +00:00
|
|
|
f.Write( &memdata.high, sizeof( memdata.high ) );
|
|
|
|
f.Write( &memdata.low, sizeof( memdata.low ) );
|
|
|
|
f.Write( &memdata.usage, sizeof( memdata.usage ) );
|
2020-09-25 14:39:00 +00:00
|
|
|
f.Write( &memdata.name, sizeof( memdata.name ) );
|
2018-04-02 00:05:16 +00:00
|
|
|
}
|
2018-06-19 19:39:52 +00:00
|
|
|
|
2018-06-19 19:52:54 +00:00
|
|
|
sz = m_data.callstackPayload.size() - 1;
|
2018-06-19 19:39:52 +00:00
|
|
|
f.Write( &sz, sizeof( sz ) );
|
2018-06-19 23:54:27 +00:00
|
|
|
for( size_t i=1; i<=sz; i++ )
|
2018-06-19 19:39:52 +00:00
|
|
|
{
|
2018-06-19 19:52:54 +00:00
|
|
|
auto cs = m_data.callstackPayload[i];
|
2020-03-28 17:04:33 +00:00
|
|
|
uint16_t csz = cs->size();
|
2018-06-19 19:39:52 +00:00
|
|
|
f.Write( &csz, sizeof( csz ) );
|
2019-03-03 15:50:18 +00:00
|
|
|
f.Write( cs->data(), sizeof( CallstackFrameId ) * csz );
|
2018-06-19 19:39:52 +00:00
|
|
|
}
|
2018-06-19 23:59:25 +00:00
|
|
|
|
|
|
|
sz = m_data.callstackFrameMap.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
for( auto& frame : m_data.callstackFrameMap )
|
|
|
|
{
|
2019-03-03 15:50:18 +00:00
|
|
|
f.Write( &frame.first, sizeof( CallstackFrameId ) );
|
2019-01-20 18:11:48 +00:00
|
|
|
f.Write( &frame.second->size, sizeof( frame.second->size ) );
|
2020-02-25 23:55:43 +00:00
|
|
|
f.Write( &frame.second->imageName, sizeof( frame.second->imageName ) );
|
2019-01-20 18:11:48 +00:00
|
|
|
f.Write( frame.second->data, sizeof( CallstackFrame ) * frame.second->size );
|
2018-06-19 23:59:25 +00:00
|
|
|
}
|
2019-06-06 21:08:19 +00:00
|
|
|
|
2019-07-12 16:45:35 +00:00
|
|
|
sz = m_data.appInfo.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
2019-12-19 16:30:37 +00:00
|
|
|
if( sz != 0 ) f.Write( m_data.appInfo.data(), sizeof( m_data.appInfo[0] ) * sz );
|
2019-07-12 16:45:35 +00:00
|
|
|
|
2019-06-06 21:08:19 +00:00
|
|
|
{
|
2020-04-14 18:07:30 +00:00
|
|
|
sz = m_data.frameImage.size();
|
2021-05-15 14:47:47 +00:00
|
|
|
if( fiDict )
|
|
|
|
{
|
|
|
|
enum : uint32_t { DictSize = 4*1024*1024 };
|
2021-05-15 16:39:01 +00:00
|
|
|
enum : uint32_t { SamplesLimit = 1U << 31 };
|
2021-05-15 14:47:47 +00:00
|
|
|
uint32_t sNum = 0;
|
|
|
|
uint32_t sSize = 0;
|
|
|
|
for( auto& fi : m_data.frameImage )
|
|
|
|
{
|
|
|
|
const auto fisz = fi->w * fi->h / 2;
|
|
|
|
if( sSize + fisz > SamplesLimit ) break;
|
|
|
|
sSize += fisz;
|
|
|
|
sNum++;
|
|
|
|
}
|
|
|
|
|
|
|
|
uint32_t offset = 0;
|
|
|
|
auto sdata = new char[sSize];
|
|
|
|
auto ssize = new size_t[sSize];
|
|
|
|
for( uint32_t i=0; i<sNum; i++ )
|
|
|
|
{
|
|
|
|
const auto& fi = m_data.frameImage[i];
|
|
|
|
const auto fisz = fi->w * fi->h / 2;
|
2021-05-15 15:34:00 +00:00
|
|
|
const auto image = m_texcomp.Unpack( *fi );
|
2021-05-15 14:47:47 +00:00
|
|
|
memcpy( sdata+offset, image, fisz );
|
|
|
|
ssize[i] = fisz;
|
|
|
|
offset += fisz;
|
|
|
|
}
|
|
|
|
assert( offset == sSize );
|
|
|
|
|
|
|
|
ZDICT_fastCover_params_t params = {};
|
|
|
|
params.d = 6;
|
|
|
|
params.k = 50;
|
|
|
|
params.f = 30;
|
|
|
|
params.nbThreads = std::thread::hardware_concurrency();
|
|
|
|
params.zParams.compressionLevel = 3;
|
|
|
|
|
|
|
|
auto dict = new char[DictSize];
|
2021-11-20 02:30:28 +00:00
|
|
|
const auto dictret = ZDICT_optimizeTrainFromBuffer_fastCover( dict, DictSize, sdata, ssize, sNum, ¶ms );
|
|
|
|
if( dictret <= DictSize )
|
|
|
|
{
|
|
|
|
const auto finalDictSize = uint32_t( dictret );
|
|
|
|
auto zdict = ZSTD_createCDict( dict, finalDictSize, 3 );
|
|
|
|
|
|
|
|
f.Write( &finalDictSize, sizeof( finalDictSize ) );
|
|
|
|
f.Write( dict, finalDictSize );
|
2021-05-15 14:47:47 +00:00
|
|
|
|
2021-11-20 02:30:28 +00:00
|
|
|
ZSTD_freeCDict( zdict );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
uint32_t zero = 0;
|
|
|
|
f.Write( &zero, sizeof( zero ) );
|
|
|
|
}
|
2021-05-15 14:47:47 +00:00
|
|
|
|
|
|
|
delete[] dict;
|
|
|
|
delete[] ssize;
|
|
|
|
delete[] sdata;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
uint32_t zero = 0;
|
|
|
|
f.Write( &zero, sizeof( zero ) );
|
|
|
|
}
|
2020-04-14 18:07:30 +00:00
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
for( auto& fi : m_data.frameImage )
|
|
|
|
{
|
|
|
|
f.Write( &fi->w, sizeof( fi->w ) );
|
|
|
|
f.Write( &fi->h, sizeof( fi->h ) );
|
|
|
|
f.Write( &fi->flip, sizeof( fi->flip ) );
|
2021-05-15 15:34:00 +00:00
|
|
|
const auto image = m_texcomp.Unpack( *fi );
|
2020-04-14 18:07:30 +00:00
|
|
|
f.Write( image, fi->w * fi->h / 2 );
|
|
|
|
}
|
2019-06-06 21:08:19 +00:00
|
|
|
}
|
2019-08-12 22:56:57 +00:00
|
|
|
|
|
|
|
// Only save context switches relevant to active threads.
|
2020-01-28 20:49:36 +00:00
|
|
|
std::vector<unordered_flat_map<uint64_t, ContextSwitch*>::const_iterator> ctxValid;
|
2019-08-12 22:56:57 +00:00
|
|
|
ctxValid.reserve( m_data.ctxSwitch.size() );
|
|
|
|
for( auto it = m_data.ctxSwitch.begin(); it != m_data.ctxSwitch.end(); ++it )
|
|
|
|
{
|
2020-03-30 21:41:21 +00:00
|
|
|
auto td = RetrieveThread( it->first );
|
2020-03-31 00:20:34 +00:00
|
|
|
if( td && ( td->count > 0 || !td->samples.empty() ) )
|
2019-08-12 22:56:57 +00:00
|
|
|
{
|
|
|
|
ctxValid.emplace_back( it );
|
|
|
|
}
|
|
|
|
}
|
|
|
|
sz = ctxValid.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
for( auto& ctx : ctxValid )
|
|
|
|
{
|
|
|
|
f.Write( &ctx->first, sizeof( ctx->first ) );
|
|
|
|
sz = ctx->second->v.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
int64_t refTime = 0;
|
|
|
|
for( auto& cs : ctx->second->v )
|
|
|
|
{
|
2019-10-30 21:25:46 +00:00
|
|
|
WriteTimeOffset( f, refTime, cs.WakeupVal() );
|
2019-08-15 21:53:47 +00:00
|
|
|
WriteTimeOffset( f, refTime, cs.Start() );
|
|
|
|
WriteTimeOffset( f, refTime, cs.End() );
|
|
|
|
uint8_t cpu = cs.Cpu();
|
|
|
|
int8_t reason = cs.Reason();
|
|
|
|
int8_t state = cs.State();
|
2021-11-06 19:22:38 +00:00
|
|
|
uint64_t thread = DecompressThread( cs.Thread() );
|
2019-08-15 21:53:47 +00:00
|
|
|
f.Write( &cpu, sizeof( cpu ) );
|
|
|
|
f.Write( &reason, sizeof( reason ) );
|
|
|
|
f.Write( &state, sizeof( state ) );
|
2021-11-06 19:22:38 +00:00
|
|
|
f.Write( &thread, sizeof( thread ) );
|
2019-08-12 22:56:57 +00:00
|
|
|
}
|
|
|
|
}
|
2019-08-16 14:51:02 +00:00
|
|
|
|
|
|
|
sz = GetContextSwitchPerCpuCount();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
for( int i=0; i<256; i++ )
|
|
|
|
{
|
|
|
|
sz = m_data.cpuData[i].cs.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
int64_t refTime = 0;
|
|
|
|
for( auto& cx : m_data.cpuData[i].cs )
|
|
|
|
{
|
|
|
|
WriteTimeOffset( f, refTime, cx.Start() );
|
|
|
|
WriteTimeOffset( f, refTime, cx.End() );
|
2019-08-19 21:09:58 +00:00
|
|
|
uint16_t thread = cx.Thread();
|
2019-08-16 14:51:02 +00:00
|
|
|
f.Write( &thread, sizeof( thread ) );
|
|
|
|
}
|
|
|
|
}
|
2019-08-17 20:36:21 +00:00
|
|
|
|
|
|
|
sz = m_data.tidToPid.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
for( auto& v : m_data.tidToPid )
|
|
|
|
{
|
|
|
|
f.Write( &v.first, sizeof( v.first ) );
|
|
|
|
f.Write( &v.second, sizeof( v.second ) );
|
|
|
|
}
|
2019-08-17 23:53:38 +00:00
|
|
|
|
|
|
|
sz = m_data.cpuThreadData.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
for( auto& v : m_data.cpuThreadData )
|
|
|
|
{
|
|
|
|
f.Write( &v.first, sizeof( v.first ) );
|
|
|
|
f.Write( &v.second, sizeof( v.second ) );
|
|
|
|
}
|
2020-02-26 21:53:18 +00:00
|
|
|
|
2020-04-08 10:49:58 +00:00
|
|
|
sz = m_data.symbolLoc.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
sz = m_data.symbolLocInline.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
2020-02-26 21:53:18 +00:00
|
|
|
sz = m_data.symbolMap.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
for( auto& v : m_data.symbolMap )
|
|
|
|
{
|
|
|
|
f.Write( &v.first, sizeof( v.first ) );
|
|
|
|
f.Write( &v.second, sizeof( v.second ) );
|
|
|
|
}
|
2020-03-25 19:52:59 +00:00
|
|
|
|
|
|
|
sz = m_data.symbolCode.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
for( auto& v : m_data.symbolCode )
|
|
|
|
{
|
|
|
|
f.Write( &v.first, sizeof( v.first ) );
|
|
|
|
f.Write( &v.second.len, sizeof( v.second.len ) );
|
2020-03-25 21:50:13 +00:00
|
|
|
f.Write( v.second.data, v.second.len );
|
2020-03-25 19:52:59 +00:00
|
|
|
}
|
2020-04-02 10:12:10 +00:00
|
|
|
|
|
|
|
sz = m_data.locationCodeAddressList.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
for( auto& v : m_data.locationCodeAddressList )
|
|
|
|
{
|
|
|
|
f.Write( &v.first, sizeof( v.first ) );
|
|
|
|
uint16_t lsz = uint16_t( v.second.size() );
|
|
|
|
f.Write( &lsz, sizeof( lsz ) );
|
|
|
|
uint64_t ref = 0;
|
|
|
|
const uint64_t* ptr = v.second.data();
|
|
|
|
for( uint16_t i=0; i<lsz; i++ )
|
|
|
|
{
|
|
|
|
uint64_t diff = *ptr++ - ref;
|
|
|
|
ref += diff;
|
|
|
|
f.Write( &diff, sizeof( diff ) );
|
|
|
|
}
|
|
|
|
}
|
2020-05-23 13:43:42 +00:00
|
|
|
|
2021-06-19 17:58:16 +00:00
|
|
|
sz = m_data.codeSymbolMap.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
for( auto& v : m_data.codeSymbolMap )
|
|
|
|
{
|
|
|
|
f.Write( &v.first, sizeof( v.first ) );
|
|
|
|
f.Write( &v.second, sizeof( v.second ) );
|
|
|
|
}
|
|
|
|
|
2021-05-20 17:37:51 +00:00
|
|
|
sz = m_data.hwSamples.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
for( auto& v : m_data.hwSamples )
|
|
|
|
{
|
|
|
|
f.Write( &v.first, sizeof( v.first ) );
|
2021-06-04 11:38:45 +00:00
|
|
|
WriteHwSampleVec( f, v.second.cycles );
|
|
|
|
WriteHwSampleVec( f, v.second.retired );
|
|
|
|
WriteHwSampleVec( f, v.second.cacheRef );
|
|
|
|
WriteHwSampleVec( f, v.second.cacheMiss );
|
|
|
|
WriteHwSampleVec( f, v.second.branchRetired );
|
|
|
|
WriteHwSampleVec( f, v.second.branchMiss );
|
2021-05-20 17:37:51 +00:00
|
|
|
}
|
|
|
|
|
2020-05-23 13:43:42 +00:00
|
|
|
sz = m_data.sourceFileCache.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
for( auto& v : m_data.sourceFileCache )
|
|
|
|
{
|
|
|
|
uint32_t s32 = strlen( v.first );
|
|
|
|
f.Write( &s32, sizeof( s32 ) );
|
|
|
|
f.Write( v.first, s32 );
|
|
|
|
f.Write( &v.second.len, sizeof( v.second.len ) );
|
|
|
|
f.Write( v.second.data, v.second.len );
|
|
|
|
}
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
|
2019-11-02 15:17:20 +00:00
|
|
|
void Worker::WriteTimeline( FileWrite& f, const Vector<short_ptr<ZoneEvent>>& vec, int64_t& refTime )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2020-01-26 15:18:16 +00:00
|
|
|
uint32_t sz = uint32_t( vec.size() );
|
2018-02-13 13:57:47 +00:00
|
|
|
f.Write( &sz, sizeof( sz ) );
|
2019-11-10 15:34:38 +00:00
|
|
|
if( vec.is_magic() )
|
|
|
|
{
|
|
|
|
WriteTimelineImpl<VectorAdapterDirect<ZoneEvent>>( f, *(Vector<ZoneEvent>*)( &vec ), refTime );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
WriteTimelineImpl<VectorAdapterPointer<ZoneEvent>>( f, vec, refTime );
|
|
|
|
}
|
|
|
|
}
|
2018-02-13 13:57:47 +00:00
|
|
|
|
2019-11-10 15:34:38 +00:00
|
|
|
template<typename Adapter, typename V>
|
|
|
|
void Worker::WriteTimelineImpl( FileWrite& f, const V& vec, int64_t& refTime )
|
|
|
|
{
|
|
|
|
Adapter a;
|
|
|
|
for( auto& val : vec )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2019-11-10 15:34:38 +00:00
|
|
|
auto& v = a(val);
|
|
|
|
int16_t srcloc = v.SrcLoc();
|
2019-08-15 18:12:09 +00:00
|
|
|
f.Write( &srcloc, sizeof( srcloc ) );
|
2019-11-10 15:34:38 +00:00
|
|
|
int64_t start = v.Start();
|
2019-08-15 18:12:09 +00:00
|
|
|
WriteTimeOffset( f, refTime, start );
|
2020-01-26 14:57:55 +00:00
|
|
|
f.Write( &v.extra, sizeof( v.extra ) );
|
2020-02-20 23:36:45 +00:00
|
|
|
if( !v.HasChildren() )
|
2018-07-22 14:05:50 +00:00
|
|
|
{
|
2020-01-26 15:18:16 +00:00
|
|
|
const uint32_t sz = 0;
|
2018-07-22 14:05:50 +00:00
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2019-11-10 15:34:38 +00:00
|
|
|
WriteTimeline( f, GetZoneChildren( v.Child() ), refTime );
|
2018-07-22 14:05:50 +00:00
|
|
|
}
|
2019-11-10 15:34:38 +00:00
|
|
|
WriteTimeOffset( f, refTime, v.End() );
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-02 14:52:34 +00:00
|
|
|
void Worker::WriteTimeline( FileWrite& f, const Vector<short_ptr<GpuEvent>>& vec, int64_t& refTime, int64_t& refGpuTime )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
|
|
|
uint64_t sz = vec.size();
|
|
|
|
f.Write( &sz, sizeof( sz ) );
|
2019-11-10 15:34:38 +00:00
|
|
|
if( vec.is_magic() )
|
|
|
|
{
|
|
|
|
WriteTimelineImpl<VectorAdapterDirect<GpuEvent>>( f, *(Vector<GpuEvent>*)( &vec ), refTime, refGpuTime );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
WriteTimelineImpl<VectorAdapterPointer<GpuEvent>>( f, vec, refTime, refGpuTime );
|
|
|
|
}
|
|
|
|
}
|
2018-02-13 13:57:47 +00:00
|
|
|
|
2019-11-10 15:34:38 +00:00
|
|
|
template<typename Adapter, typename V>
|
|
|
|
void Worker::WriteTimelineImpl( FileWrite& f, const V& vec, int64_t& refTime, int64_t& refGpuTime )
|
|
|
|
{
|
|
|
|
Adapter a;
|
|
|
|
for( auto& val : vec )
|
2018-02-13 13:57:47 +00:00
|
|
|
{
|
2019-11-10 15:34:38 +00:00
|
|
|
auto& v = a(val);
|
|
|
|
WriteTimeOffset( f, refTime, v.CpuStart() );
|
|
|
|
WriteTimeOffset( f, refGpuTime, v.GpuStart() );
|
|
|
|
const int16_t srcloc = v.SrcLoc();
|
2019-10-13 12:36:59 +00:00
|
|
|
f.Write( &srcloc, sizeof( srcloc ) );
|
2019-11-10 15:34:38 +00:00
|
|
|
f.Write( &v.callstack, sizeof( v.callstack ) );
|
|
|
|
const uint16_t thread = v.Thread();
|
2019-10-13 12:36:59 +00:00
|
|
|
f.Write( &thread, sizeof( thread ) );
|
2018-12-30 22:06:03 +00:00
|
|
|
|
2019-11-10 15:34:38 +00:00
|
|
|
if( v.Child() < 0 )
|
2018-07-22 17:47:01 +00:00
|
|
|
{
|
2019-11-10 15:34:38 +00:00
|
|
|
const uint64_t sz = 0;
|
2018-07-22 17:47:01 +00:00
|
|
|
f.Write( &sz, sizeof( sz ) );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2019-11-10 15:34:38 +00:00
|
|
|
WriteTimeline( f, GetGpuChildren( v.Child() ), refTime, refGpuTime );
|
2018-07-22 17:47:01 +00:00
|
|
|
}
|
2018-12-30 22:06:03 +00:00
|
|
|
|
2019-11-10 15:34:38 +00:00
|
|
|
WriteTimeOffset( f, refTime, v.CpuEnd() );
|
|
|
|
WriteTimeOffset( f, refGpuTime, v.GpuEnd() );
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-01-15 17:42:15 +00:00
|
|
|
static const char* s_failureReasons[] = {
|
|
|
|
"<unknown reason>",
|
2019-01-15 17:56:26 +00:00
|
|
|
"Invalid order of zone begin and end events.",
|
2020-04-30 17:05:13 +00:00
|
|
|
"Zone is ended twice.",
|
2019-01-16 00:17:01 +00:00
|
|
|
"Zone text transfer destination doesn't match active zone.",
|
2021-10-10 12:05:21 +00:00
|
|
|
"Zone value transfer destination doesn't match active zone.",
|
2020-11-27 11:37:35 +00:00
|
|
|
"Zone color transfer destination doesn't match active zone.",
|
2019-01-16 00:17:01 +00:00
|
|
|
"Zone name transfer destination doesn't match active zone.",
|
2019-02-28 18:32:42 +00:00
|
|
|
"Memory free event without a matching allocation.",
|
2021-03-09 21:10:29 +00:00
|
|
|
"Memory allocation event was reported for an address that is already tracked and not freed.",
|
2019-02-28 18:32:42 +00:00
|
|
|
"Discontinuous frame begin/end mismatch.",
|
2019-06-09 11:44:53 +00:00
|
|
|
"Frame image offset is invalid.",
|
|
|
|
"Multiple frame images were sent for a single frame.",
|
2021-11-02 00:47:31 +00:00
|
|
|
"Fiber execution stopped on a thread which is not executing a fiber.",
|
2019-01-15 17:42:15 +00:00
|
|
|
};
|
|
|
|
|
2019-01-15 17:56:17 +00:00
|
|
|
static_assert( sizeof( s_failureReasons ) / sizeof( *s_failureReasons ) == (int)Worker::Failure::NUM_FAILURES, "Missing failure reason description." );
|
|
|
|
|
2019-01-15 17:42:15 +00:00
|
|
|
const char* Worker::GetFailureString( Worker::Failure failure )
|
|
|
|
{
|
|
|
|
return s_failureReasons[(int)failure];
|
|
|
|
}
|
|
|
|
|
2019-11-25 22:59:48 +00:00
|
|
|
void Worker::SetParameter( size_t paramIdx, int32_t val )
|
|
|
|
{
|
|
|
|
assert( paramIdx < m_params.size() );
|
|
|
|
m_params[paramIdx].val = val;
|
|
|
|
const auto idx = uint64_t( m_params[paramIdx].idx );
|
|
|
|
const auto v = uint64_t( uint32_t( val ) );
|
2020-01-28 20:57:44 +00:00
|
|
|
Query( ServerQueryParameter, ( idx << 32 ) | v );
|
2019-11-25 22:59:48 +00:00
|
|
|
}
|
|
|
|
|
2019-11-29 21:46:57 +00:00
|
|
|
const Worker::CpuThreadTopology* Worker::GetThreadTopology( uint32_t cpuThread ) const
|
|
|
|
{
|
|
|
|
auto it = m_data.cpuTopologyMap.find( cpuThread );
|
|
|
|
if( it == m_data.cpuTopologyMap.end() ) return nullptr;
|
|
|
|
return &it->second;
|
|
|
|
}
|
|
|
|
|
2020-02-20 22:39:40 +00:00
|
|
|
ZoneExtra& Worker::AllocZoneExtra( ZoneEvent& ev )
|
2020-01-26 14:57:55 +00:00
|
|
|
{
|
|
|
|
assert( ev.extra == 0 );
|
|
|
|
ev.extra = uint32_t( m_data.zoneExtra.size() );
|
|
|
|
auto& extra = m_data.zoneExtra.push_next();
|
2020-08-15 00:14:29 +00:00
|
|
|
memset( (char*)&extra, 0, sizeof( extra ) );
|
2020-02-20 22:39:40 +00:00
|
|
|
return extra;
|
2020-01-26 14:57:55 +00:00
|
|
|
}
|
|
|
|
|
2020-02-20 22:37:55 +00:00
|
|
|
ZoneExtra& Worker::RequestZoneExtra( ZoneEvent& ev )
|
|
|
|
{
|
2020-02-20 22:39:40 +00:00
|
|
|
if( !HasZoneExtra( ev ) )
|
|
|
|
{
|
|
|
|
return AllocZoneExtra( ev );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
return GetZoneExtraMutable( ev );
|
|
|
|
}
|
2020-02-20 22:37:55 +00:00
|
|
|
}
|
|
|
|
|
2022-05-01 12:25:07 +00:00
|
|
|
void Worker::CacheSource( const StringRef& str, const StringIdx& image )
|
2020-05-23 12:09:47 +00:00
|
|
|
{
|
|
|
|
assert( str.active );
|
|
|
|
assert( m_checkedFileStrings.find( str ) == m_checkedFileStrings.end() );
|
|
|
|
m_checkedFileStrings.emplace( str );
|
|
|
|
auto file = GetString( str );
|
2021-01-26 00:47:00 +00:00
|
|
|
// Possible duplication of pointer and index strings
|
2021-01-31 16:24:41 +00:00
|
|
|
if( m_data.sourceFileCache.find( file ) != m_data.sourceFileCache.end() ) return;
|
2021-02-06 21:59:07 +00:00
|
|
|
const auto execTime = GetExecutableTime();
|
|
|
|
if( SourceFileValid( file, execTime != 0 ? execTime : GetCaptureTime() ) )
|
2021-01-31 16:24:41 +00:00
|
|
|
{
|
2022-03-30 14:06:35 +00:00
|
|
|
CacheSourceFromFile( file );
|
2020-05-23 12:09:47 +00:00
|
|
|
}
|
2021-02-06 21:59:07 +00:00
|
|
|
else if( execTime != 0 )
|
2021-02-03 23:03:25 +00:00
|
|
|
{
|
|
|
|
m_sourceCodeQuery.emplace_back( file );
|
2022-05-01 12:25:07 +00:00
|
|
|
QuerySourceFile( file, image.Active() ? GetString( image ) : nullptr );
|
2021-02-03 23:03:25 +00:00
|
|
|
}
|
2020-05-23 12:09:47 +00:00
|
|
|
}
|
|
|
|
|
2022-03-30 14:06:35 +00:00
|
|
|
void Worker::CacheSourceFromFile( const char* fn )
|
|
|
|
{
|
|
|
|
FILE* f = fopen( fn, "rb" );
|
|
|
|
fseek( f, 0, SEEK_END );
|
|
|
|
const auto sz = ftell( f );
|
|
|
|
fseek( f, 0, SEEK_SET );
|
|
|
|
auto src = (char*)m_slab.AllocBig( sz );
|
|
|
|
fread( src, 1, sz, f );
|
|
|
|
fclose( f );
|
|
|
|
m_data.sourceFileCache.emplace( fn, MemoryBlock{ src, uint32_t( sz ) } );
|
|
|
|
}
|
|
|
|
|
2020-05-23 12:22:50 +00:00
|
|
|
uint64_t Worker::GetSourceFileCacheSize() const
|
|
|
|
{
|
|
|
|
uint64_t cnt = 0;
|
|
|
|
for( auto& v : m_data.sourceFileCache )
|
|
|
|
{
|
|
|
|
cnt += v.second.len;
|
|
|
|
}
|
|
|
|
return cnt;
|
|
|
|
}
|
|
|
|
|
2020-05-23 13:08:26 +00:00
|
|
|
Worker::MemoryBlock Worker::GetSourceFileFromCache( const char* file ) const
|
|
|
|
{
|
|
|
|
auto it = m_data.sourceFileCache.find( file );
|
|
|
|
if( it == m_data.sourceFileCache.end() ) return MemoryBlock {};
|
|
|
|
return it->second;
|
|
|
|
}
|
|
|
|
|
2021-06-04 11:58:00 +00:00
|
|
|
HwSampleData* Worker::GetHwSampleData( uint64_t addr )
|
2021-05-19 21:05:50 +00:00
|
|
|
{
|
|
|
|
auto it = m_data.hwSamples.find( addr );
|
|
|
|
if( it == m_data.hwSamples.end() ) return nullptr;
|
|
|
|
return &it->second;
|
|
|
|
}
|
|
|
|
|
2021-05-19 21:29:27 +00:00
|
|
|
uint64_t Worker::GetHwSampleCount() const
|
|
|
|
{
|
|
|
|
uint64_t cnt = 0;
|
|
|
|
for( auto& v : m_data.hwSamples )
|
|
|
|
{
|
2021-06-04 11:38:45 +00:00
|
|
|
cnt += v.second.cycles.size();
|
|
|
|
cnt += v.second.retired.size();
|
|
|
|
cnt += v.second.cacheRef.size();
|
|
|
|
cnt += v.second.cacheMiss.size();
|
|
|
|
cnt += v.second.branchRetired.size();
|
|
|
|
cnt += v.second.branchMiss.size();
|
2021-05-19 21:29:27 +00:00
|
|
|
}
|
|
|
|
return cnt;
|
|
|
|
}
|
|
|
|
|
2022-03-30 14:07:15 +00:00
|
|
|
void Worker::CacheSourceFiles()
|
|
|
|
{
|
|
|
|
const auto execTime = GetExecutableTime();
|
|
|
|
|
|
|
|
for( auto& sl : m_data.sourceLocationPayload )
|
|
|
|
{
|
|
|
|
const char* file = GetString( sl->file );
|
|
|
|
if( m_data.sourceFileCache.find( file ) == m_data.sourceFileCache.end() )
|
|
|
|
{
|
|
|
|
if( SourceFileValid( file, execTime != 0 ? execTime : GetCaptureTime() ) ) CacheSourceFromFile( file );
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for( auto& sl : m_data.sourceLocation )
|
|
|
|
{
|
|
|
|
const char* file = GetString( sl.second.file );
|
|
|
|
if( m_data.sourceFileCache.find( file ) == m_data.sourceFileCache.end() )
|
|
|
|
{
|
|
|
|
if( SourceFileValid( file, execTime != 0 ? execTime : GetCaptureTime() ) ) CacheSourceFromFile( file );
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for( auto& sym : m_data.symbolMap )
|
|
|
|
{
|
|
|
|
const char* file = GetString( sym.second.file );
|
|
|
|
if( m_data.sourceFileCache.find( file ) == m_data.sourceFileCache.end() )
|
|
|
|
{
|
|
|
|
if( SourceFileValid( file, execTime != 0 ? execTime : GetCaptureTime() ) ) CacheSourceFromFile( file );
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-02-13 13:57:47 +00:00
|
|
|
}
|