@@ -48,7 +48,10 @@ template <typename Config> class SizeClassAllocator64 {
48
48
typedef typename Config::Primary::CompactPtrT CompactPtrT;
49
49
typedef typename Config::Primary::SizeClassMap SizeClassMap;
50
50
static const uptr CompactPtrScale = Config::Primary::CompactPtrScale;
51
+ static const uptr RegionSizeLog = Config::Primary::RegionSizeLog;
51
52
static const uptr GroupSizeLog = Config::Primary::GroupSizeLog;
53
+ static_assert (RegionSizeLog >= GroupSizeLog,
54
+ " Group size shouldn't be greater than the region size" );
52
55
static const uptr GroupScale = GroupSizeLog - CompactPtrScale;
53
56
typedef SizeClassAllocator64<Config> ThisT;
54
57
typedef SizeClassAllocatorLocalCache<ThisT> CacheT;
@@ -67,7 +70,7 @@ template <typename Config> class SizeClassAllocator64 {
67
70
DCHECK (isAligned (reinterpret_cast <uptr>(this ), alignof (ThisT)));
68
71
69
72
const uptr PageSize = getPageSizeCached ();
70
- const uptr GroupSize = (1U << GroupSizeLog);
73
+ const uptr GroupSize = (1UL << GroupSizeLog);
71
74
const uptr PagesInGroup = GroupSize / PageSize;
72
75
const uptr MinSizeClass = getSizeByClassId (1 );
73
76
// When trying to release pages back to memory, visiting smaller size
@@ -119,11 +122,10 @@ template <typename Config> class SizeClassAllocator64 {
119
122
RegionInfo *Region = getRegionInfo (I);
120
123
// The actual start of a region is offset by a random number of pages
121
124
// when PrimaryEnableRandomOffset is set.
122
- Region->RegionBeg =
123
- (PrimaryBase + (I << Config::Primary::RegionSizeLog)) +
124
- (Config::Primary::EnableRandomOffset
125
- ? ((getRandomModN (&Seed, 16 ) + 1 ) * PageSize)
126
- : 0 );
125
+ Region->RegionBeg = (PrimaryBase + (I << RegionSizeLog)) +
126
+ (Config::Primary::EnableRandomOffset
127
+ ? ((getRandomModN (&Seed, 16 ) + 1 ) * PageSize)
128
+ : 0 );
127
129
Region->RandState = getRandomU32 (&Seed);
128
130
// Releasing small blocks is expensive, set a higher threshold to avoid
129
131
// frequent page releases.
@@ -134,7 +136,7 @@ template <typename Config> class SizeClassAllocator64 {
134
136
Region->ReleaseInfo .LastReleaseAtNs = Time;
135
137
136
138
Region->MemMapInfo .MemMap = ReservedMemory.dispatch (
137
- PrimaryBase + (I << Config::Primary:: RegionSizeLog), RegionSize);
139
+ PrimaryBase + (I << RegionSizeLog), RegionSize);
138
140
CHECK (Region->MemMapInfo .MemMap .isAllocated ());
139
141
}
140
142
shuffle (RegionInfoArray, NumClasses, &Seed);
@@ -271,19 +273,21 @@ template <typename Config> class SizeClassAllocator64 {
271
273
// TODO(chiahungduan): Consider not doing grouping if the group size is not
272
274
// greater than the block size with a certain scale.
273
275
274
- // Sort the blocks so that blocks belonging to the same group can be pushed
275
- // together.
276
276
bool SameGroup = true ;
277
- for (u32 I = 1 ; I < Size; ++I) {
278
- if (compactPtrGroup (Array[I - 1 ]) != compactPtrGroup (Array[I]))
279
- SameGroup = false ;
280
- CompactPtrT Cur = Array[I];
281
- u32 J = I;
282
- while (J > 0 && compactPtrGroup (Cur) < compactPtrGroup (Array[J - 1 ])) {
283
- Array[J] = Array[J - 1 ];
284
- --J;
277
+ if (GroupSizeLog < RegionSizeLog) {
278
+ // Sort the blocks so that blocks belonging to the same group can be
279
+ // pushed together.
280
+ for (u32 I = 1 ; I < Size; ++I) {
281
+ if (compactPtrGroup (Array[I - 1 ]) != compactPtrGroup (Array[I]))
282
+ SameGroup = false ;
283
+ CompactPtrT Cur = Array[I];
284
+ u32 J = I;
285
+ while (J > 0 && compactPtrGroup (Cur) < compactPtrGroup (Array[J - 1 ])) {
286
+ Array[J] = Array[J - 1 ];
287
+ --J;
288
+ }
289
+ Array[J] = Cur;
285
290
}
286
- Array[J] = Cur;
287
291
}
288
292
289
293
{
@@ -477,7 +481,7 @@ template <typename Config> class SizeClassAllocator64 {
477
481
AtomicOptions Options;
478
482
479
483
private:
480
- static const uptr RegionSize = 1UL << Config::Primary:: RegionSizeLog;
484
+ static const uptr RegionSize = 1UL << RegionSizeLog;
481
485
static const uptr NumClasses = SizeClassMap::NumClasses;
482
486
static const uptr PrimarySize = RegionSize * NumClasses;
483
487
@@ -1125,7 +1129,7 @@ template <typename Config> class SizeClassAllocator64 {
1125
1129
collectGroupsToRelease (RegionInfo *Region, const uptr BlockSize,
1126
1130
const uptr AllocatedUserEnd, const uptr CompactPtrBase)
1127
1131
REQUIRES (Region->MMLock, Region->FLLock) {
1128
- const uptr GroupSize = (1U << GroupSizeLog);
1132
+ const uptr GroupSize = (1UL << GroupSizeLog);
1129
1133
const uptr PageSize = getPageSizeCached ();
1130
1134
SinglyLinkedList<BatchGroup> GroupsToRelease;
1131
1135
@@ -1292,7 +1296,7 @@ template <typename Config> class SizeClassAllocator64 {
1292
1296
const uptr AllocatedUserEnd, const uptr CompactPtrBase,
1293
1297
SinglyLinkedList<BatchGroup> &GroupsToRelease)
1294
1298
REQUIRES (Region->MMLock) EXCLUDES(Region->FLLock) {
1295
- const uptr GroupSize = (1U << GroupSizeLog);
1299
+ const uptr GroupSize = (1UL << GroupSizeLog);
1296
1300
auto DecompactPtr = [CompactPtrBase](CompactPtrT CompactPtr) {
1297
1301
return decompactPtrInternal (CompactPtrBase, CompactPtr);
1298
1302
};
0 commit comments