@@ -25,12 +25,9 @@ struct StackDepotNode {
25
25
using hash_type = u64 ;
26
26
hash_type stack_hash;
27
27
u32 link;
28
- atomic_uint32_t tag_and_use_count; // tag : 12 high bits; use_count : 20 ;
28
+ u32 tag;
29
29
30
30
static const u32 kTabSizeLog = SANITIZER_ANDROID ? 16 : 20 ;
31
- static const u32 kUseCountBits = 20 ;
32
- static const u32 kMaxUseCount = 1 << kUseCountBits ;
33
- static const u32 kUseCountMask = (1 << kUseCountBits ) - 1 ;
34
31
35
32
typedef StackTrace args_type;
36
33
bool eq (hash_type hash, const args_type &args) const {
@@ -53,19 +50,6 @@ struct StackDepotNode {
53
50
typedef StackDepotHandle handle_type;
54
51
};
55
52
56
- COMPILER_CHECK (StackDepotNode::kMaxUseCount >= (u32 )kStackDepotMaxUseCount );
57
-
58
- int StackDepotHandle::use_count () const {
59
- return atomic_load (&node_->tag_and_use_count , memory_order_relaxed) &
60
- StackDepotNode::kUseCountMask ;
61
- }
62
- void StackDepotHandle::inc_use_count_unsafe () {
63
- u32 prev =
64
- atomic_fetch_add (&node_->tag_and_use_count , 1 , memory_order_relaxed) &
65
- StackDepotNode::kUseCountMask ;
66
- CHECK_LT (prev + 1 , StackDepotNode::kMaxUseCount );
67
- }
68
-
69
53
// FIXME(dvyukov): this single reserved bit is used in TSan.
70
54
typedef StackDepotBase<StackDepotNode, 1 , StackDepotNode::kTabSizeLog >
71
55
StackDepot;
@@ -74,15 +58,27 @@ static StackDepot theDepot;
74
58
// caching efficiency.
75
59
static TwoLevelMap<uptr *, StackDepot::kNodesSize1 , StackDepot::kNodesSize2 >
76
60
tracePtrs;
61
+ // Keep mutable data out of frequently access nodes to improve caching
62
+ // efficiency.
63
+ static TwoLevelMap<atomic_uint32_t , StackDepot::kNodesSize1 ,
64
+ StackDepot::kNodesSize2 >
65
+ useCounts;
66
+
67
+ int StackDepotHandle::use_count () const {
68
+ return atomic_load_relaxed (&useCounts[id_]);
69
+ }
70
+
71
+ void StackDepotHandle::inc_use_count_unsafe () {
72
+ atomic_fetch_add (&useCounts[id_], 1 , memory_order_relaxed);
73
+ }
77
74
78
75
uptr StackDepotNode::allocated () {
79
- return traceAllocator.allocated () + tracePtrs.MemoryUsage ();
76
+ return traceAllocator.allocated () + tracePtrs.MemoryUsage () +
77
+ useCounts.MemoryUsage ();
80
78
}
81
79
82
80
void StackDepotNode::store (u32 id, const args_type &args, hash_type hash) {
83
- CHECK_EQ (args.tag & (~kUseCountMask >> kUseCountBits ), args.tag );
84
- atomic_store (&tag_and_use_count, args.tag << kUseCountBits ,
85
- memory_order_relaxed);
81
+ tag = args.tag ;
86
82
stack_hash = hash;
87
83
uptr *stack_trace = traceAllocator.alloc (args.size + 1 );
88
84
*stack_trace = args.size ;
@@ -94,8 +90,6 @@ StackDepotNode::args_type StackDepotNode::load(u32 id) const {
94
90
const uptr *stack_trace = tracePtrs[id];
95
91
if (!stack_trace)
96
92
return {};
97
- u32 tag =
98
- atomic_load (&tag_and_use_count, memory_order_relaxed) >> kUseCountBits ;
99
93
return args_type (stack_trace + 1 , *stack_trace, tag);
100
94
}
101
95
0 commit comments