mirror of
https://github.com/wolfpld/tracy.git
synced 2024-11-25 15:34:36 +00:00
Merge pull request #401 from thedmd/inactive-allocations
Add ability to filter callstacks in memory tab by inactive allocations.
This commit is contained in:
commit
2ae4a394a9
@ -3286,7 +3286,7 @@ Each tree node consists of the function name, the source file location, and the
|
||||
|
||||
The \emph{Group by function name} option controls how tree nodes are grouped. If it is disabled, the grouping is performed at a machine instruction-level granularity. This may result in a very verbose output, but the displayed source locations are precise. To make the tree more readable, you may opt to perform grouping at the function name level, which will result in less valid source file locations, as multiple entries are collapsed into one.
|
||||
|
||||
Enabling the \emph{Only active allocations} option will limit the call stack tree only to display active allocations.
|
||||
Enabling the \emph{Only active allocations} option will limit the call stack tree only to display active allocations. Enabling \emph{Only inactive allocations} option will have similar effect for inactive allocations. Both are mutually exclusive, enabling one disables the other. Displaing inactive allocations, when combined with \emph{Limit range}, will show short lived allocatios highlighting potentially unwanted behavior in the code.
|
||||
|
||||
Clicking the \RMB{}~right mouse button on the function name will open the allocations list window (see section \ref{alloclist}), which lists all the allocations included at the current call stack tree level. Likewise, clicking the \RMB{}~right mouse button on the source file location will open the source file view window (if applicable, see section~\ref{sourceview}).
|
||||
|
||||
|
@ -17227,7 +17227,7 @@ static tracy_force_inline T* GetParentFrameTreeItemGroup( unordered_flat_map<uin
|
||||
}
|
||||
|
||||
|
||||
unordered_flat_map<uint32_t, View::MemPathData> View::GetCallstackPaths( const MemData& mem, bool onlyActive ) const
|
||||
unordered_flat_map<uint32_t, View::MemPathData> View::GetCallstackPaths( const MemData& mem, MemRange memRange ) const
|
||||
{
|
||||
unordered_flat_map<uint32_t, MemPathData> pathSum;
|
||||
pathSum.reserve( m_worker.GetCallstackPayloadCount() );
|
||||
@ -17238,13 +17238,13 @@ unordered_flat_map<uint32_t, View::MemPathData> View::GetCallstackPaths( const M
|
||||
if( it != mem.data.end() )
|
||||
{
|
||||
auto end = std::lower_bound( mem.data.begin(), mem.data.end(), m_memInfo.range.max, []( const auto& lhs, const auto& rhs ) { return lhs.TimeAlloc() < rhs; } );
|
||||
if( onlyActive )
|
||||
if( memRange != MemRange::Full )
|
||||
{
|
||||
while( it != end )
|
||||
{
|
||||
auto& ev = *it++;
|
||||
if( ev.CsAlloc() == 0 ) continue;
|
||||
if( ev.TimeFree() >= 0 && ev.TimeFree() < m_memInfo.range.max ) continue;
|
||||
if( ( memRange == MemRange::Inactive ) == ( ev.TimeFree() >= 0 && ev.TimeFree() < m_memInfo.range.max ) ) continue;
|
||||
auto pit = pathSum.find( ev.CsAlloc() );
|
||||
if( pit == pathSum.end() )
|
||||
{
|
||||
@ -17279,12 +17279,12 @@ unordered_flat_map<uint32_t, View::MemPathData> View::GetCallstackPaths( const M
|
||||
}
|
||||
else
|
||||
{
|
||||
if( onlyActive )
|
||||
if( memRange != MemRange::Full )
|
||||
{
|
||||
for( auto& ev : mem.data )
|
||||
{
|
||||
if( ev.CsAlloc() == 0 ) continue;
|
||||
if( ev.TimeFree() >= 0 ) continue;
|
||||
if( ( memRange == MemRange::Inactive ) == ( ev.TimeFree() >= 0 ) ) continue;
|
||||
auto it = pathSum.find( ev.CsAlloc() );
|
||||
if( it == pathSum.end() )
|
||||
{
|
||||
@ -17321,7 +17321,7 @@ unordered_flat_map<uint32_t, View::MemPathData> View::GetCallstackPaths( const M
|
||||
unordered_flat_map<uint64_t, MemCallstackFrameTree> View::GetCallstackFrameTreeBottomUp( const MemData& mem ) const
|
||||
{
|
||||
unordered_flat_map<uint64_t, MemCallstackFrameTree> root;
|
||||
auto pathSum = GetCallstackPaths( mem, m_activeOnlyBottomUp );
|
||||
auto pathSum = GetCallstackPaths( mem, m_memRangeBottomUp );
|
||||
if( m_groupCallstackTreeByNameBottomUp )
|
||||
{
|
||||
for( auto& path : pathSum )
|
||||
@ -17451,7 +17451,7 @@ unordered_flat_map<uint64_t, CallstackFrameTree> View::GetParentsCallstackFrameT
|
||||
unordered_flat_map<uint64_t, MemCallstackFrameTree> View::GetCallstackFrameTreeTopDown( const MemData& mem ) const
|
||||
{
|
||||
unordered_flat_map<uint64_t, MemCallstackFrameTree> root;
|
||||
auto pathSum = GetCallstackPaths( mem, m_activeOnlyTopDown );
|
||||
auto pathSum = GetCallstackPaths( mem, m_memRangeTopDown );
|
||||
if( m_groupCallstackTreeByNameTopDown )
|
||||
{
|
||||
for( auto& path : pathSum )
|
||||
@ -18011,7 +18011,15 @@ void View::DrawMemory()
|
||||
ImGui::SameLine();
|
||||
ImGui::Spacing();
|
||||
ImGui::SameLine();
|
||||
SmallCheckbox( "Only active allocations", &m_activeOnlyBottomUp );
|
||||
bool activeOnlyBottomUp = m_memRangeBottomUp == MemRange::Active;
|
||||
if( SmallCheckbox( "Only active allocations", &activeOnlyBottomUp ) )
|
||||
m_memRangeBottomUp = activeOnlyBottomUp ? MemRange::Active : MemRange::Full;
|
||||
ImGui::SameLine();
|
||||
ImGui::Spacing();
|
||||
ImGui::SameLine();
|
||||
bool inactiveOnlyBottomUp = m_memRangeBottomUp == MemRange::Inactive;
|
||||
if( SmallCheckbox( "Only inactive allocations", &inactiveOnlyBottomUp ) )
|
||||
m_memRangeBottomUp = inactiveOnlyBottomUp ? MemRange::Inactive : MemRange::Full;
|
||||
|
||||
auto tree = GetCallstackFrameTreeBottomUp( mem );
|
||||
if( !tree.empty() )
|
||||
@ -18041,7 +18049,15 @@ void View::DrawMemory()
|
||||
ImGui::SameLine();
|
||||
ImGui::Spacing();
|
||||
ImGui::SameLine();
|
||||
SmallCheckbox( "Only active allocations", &m_activeOnlyTopDown );
|
||||
bool activeOnlyTopDown = m_memRangeTopDown == MemRange::Active;
|
||||
if( SmallCheckbox( "Only active allocations", &activeOnlyTopDown ) )
|
||||
m_memRangeTopDown = activeOnlyTopDown ? MemRange::Active : MemRange::Full;
|
||||
ImGui::SameLine();
|
||||
ImGui::Spacing();
|
||||
ImGui::SameLine();
|
||||
bool inactiveOnlyTopDown = m_memRangeTopDown == MemRange::Inactive;
|
||||
if( SmallCheckbox( "Only inactive allocations", &inactiveOnlyTopDown ) )
|
||||
m_memRangeTopDown = inactiveOnlyTopDown ? MemRange::Inactive : MemRange::Full;
|
||||
|
||||
auto tree = GetCallstackFrameTreeTopDown( mem );
|
||||
if( !tree.empty() )
|
||||
|
@ -147,6 +147,13 @@ private:
|
||||
LastRange
|
||||
};
|
||||
|
||||
enum class MemRange
|
||||
{
|
||||
Full,
|
||||
Active,
|
||||
Inactive
|
||||
};
|
||||
|
||||
struct ZoneColorData
|
||||
{
|
||||
uint32_t color;
|
||||
@ -227,7 +234,7 @@ private:
|
||||
|
||||
void ListMemData( std::vector<const MemEvent*>& vec, std::function<void(const MemEvent*)> DrawAddress, const char* id = nullptr, int64_t startTime = -1, uint64_t pool = 0 );
|
||||
|
||||
unordered_flat_map<uint32_t, MemPathData> GetCallstackPaths( const MemData& mem, bool onlyActive ) const;
|
||||
unordered_flat_map<uint32_t, MemPathData> GetCallstackPaths( const MemData& mem, MemRange memRange ) const;
|
||||
unordered_flat_map<uint64_t, MemCallstackFrameTree> GetCallstackFrameTreeBottomUp( const MemData& mem ) const;
|
||||
unordered_flat_map<uint64_t, MemCallstackFrameTree> GetCallstackFrameTreeTopDown( const MemData& mem ) const;
|
||||
void DrawFrameTreeLevel( const unordered_flat_map<uint64_t, MemCallstackFrameTree>& tree, int& idx );
|
||||
@ -494,8 +501,8 @@ private:
|
||||
|
||||
bool m_groupCallstackTreeByNameBottomUp = true;
|
||||
bool m_groupCallstackTreeByNameTopDown = true;
|
||||
bool m_activeOnlyBottomUp = false;
|
||||
bool m_activeOnlyTopDown = false;
|
||||
MemRange m_memRangeBottomUp = MemRange::Full;
|
||||
MemRange m_memRangeTopDown = MemRange::Full;
|
||||
|
||||
enum class SaveThreadState
|
||||
{
|
||||
|
Loading…
Reference in New Issue
Block a user