Skip to content

Commit a7130d8

Browse files
committed
[ADT][NFC] Use empty base optimisation in BumpPtrAllocatorImpl
Most uses of this class just use the default MallocAllocator. As this contains no fields, we can use the empty base optimisation for BumpPtrAllocatorImpl and save 8 bytes of padding for most use cases. This prevents using a class that is marked as `final` as the `AllocatorT` template argument. In one must use an allocator that has been marked as `final`, the simplest way around this is a proxy class. The class should have all the methods that `AllocaterBase` expects and should forward the calls to your own allocator instance. Reviewed By: dblaikie Differential Revision: https://reviews.llvm.org/D94439
1 parent 55f2eee commit a7130d8

File tree

1 file changed

+12
-13
lines changed

1 file changed

+12
-13
lines changed

llvm/include/llvm/Support/Allocator.h

Lines changed: 12 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,8 @@ template <typename AllocatorT = MallocAllocator, size_t SlabSize = 4096,
6666
size_t SizeThreshold = SlabSize, size_t GrowthDelay = 128>
6767
class BumpPtrAllocatorImpl
6868
: public AllocatorBase<BumpPtrAllocatorImpl<AllocatorT, SlabSize,
69-
SizeThreshold, GrowthDelay>> {
69+
SizeThreshold, GrowthDelay>>,
70+
private AllocatorT {
7071
public:
7172
static_assert(SizeThreshold <= SlabSize,
7273
"The SizeThreshold must be at most the SlabSize to ensure "
@@ -80,15 +81,15 @@ class BumpPtrAllocatorImpl
8081

8182
template <typename T>
8283
BumpPtrAllocatorImpl(T &&Allocator)
83-
: Allocator(std::forward<T &&>(Allocator)) {}
84+
: AllocatorT(std::forward<T &&>(Allocator)) {}
8485

8586
// Manually implement a move constructor as we must clear the old allocator's
8687
// slabs as a matter of correctness.
8788
BumpPtrAllocatorImpl(BumpPtrAllocatorImpl &&Old)
88-
: CurPtr(Old.CurPtr), End(Old.End), Slabs(std::move(Old.Slabs)),
89+
: AllocatorT(static_cast<AllocatorT &&>(Old)), CurPtr(Old.CurPtr),
90+
End(Old.End), Slabs(std::move(Old.Slabs)),
8991
CustomSizedSlabs(std::move(Old.CustomSizedSlabs)),
90-
BytesAllocated(Old.BytesAllocated), RedZoneSize(Old.RedZoneSize),
91-
Allocator(std::move(Old.Allocator)) {
92+
BytesAllocated(Old.BytesAllocated), RedZoneSize(Old.RedZoneSize) {
9293
Old.CurPtr = Old.End = nullptr;
9394
Old.BytesAllocated = 0;
9495
Old.Slabs.clear();
@@ -110,7 +111,7 @@ class BumpPtrAllocatorImpl
110111
RedZoneSize = RHS.RedZoneSize;
111112
Slabs = std::move(RHS.Slabs);
112113
CustomSizedSlabs = std::move(RHS.CustomSizedSlabs);
113-
Allocator = std::move(RHS.Allocator);
114+
AllocatorT::operator=(static_cast<AllocatorT &&>(RHS));
114115

115116
RHS.CurPtr = RHS.End = nullptr;
116117
RHS.BytesAllocated = 0;
@@ -170,7 +171,8 @@ class BumpPtrAllocatorImpl
170171
// If Size is really big, allocate a separate slab for it.
171172
size_t PaddedSize = SizeToAllocate + Alignment.value() - 1;
172173
if (PaddedSize > SizeThreshold) {
173-
void *NewSlab = Allocator.Allocate(PaddedSize, alignof(std::max_align_t));
174+
void *NewSlab =
175+
AllocatorT::Allocate(PaddedSize, alignof(std::max_align_t));
174176
// We own the new slab and don't want anyone reading anyting other than
175177
// pieces returned from this method. So poison the whole slab.
176178
__asan_poison_memory_region(NewSlab, PaddedSize);
@@ -315,9 +317,6 @@ class BumpPtrAllocatorImpl
315317
/// a sanitizer.
316318
size_t RedZoneSize = 1;
317319

318-
/// The allocator instance we use to get slabs of memory.
319-
AllocatorT Allocator;
320-
321320
static size_t computeSlabSize(unsigned SlabIdx) {
322321
// Scale the actual allocated slab size based on the number of slabs
323322
// allocated. Every GrowthDelay slabs allocated, we double
@@ -333,7 +332,7 @@ class BumpPtrAllocatorImpl
333332
size_t AllocatedSlabSize = computeSlabSize(Slabs.size());
334333

335334
void *NewSlab =
336-
Allocator.Allocate(AllocatedSlabSize, alignof(std::max_align_t));
335+
AllocatorT::Allocate(AllocatedSlabSize, alignof(std::max_align_t));
337336
// We own the new slab and don't want anyone reading anything other than
338337
// pieces returned from this method. So poison the whole slab.
339338
__asan_poison_memory_region(NewSlab, AllocatedSlabSize);
@@ -349,7 +348,7 @@ class BumpPtrAllocatorImpl
349348
for (; I != E; ++I) {
350349
size_t AllocatedSlabSize =
351350
computeSlabSize(std::distance(Slabs.begin(), I));
352-
Allocator.Deallocate(*I, AllocatedSlabSize, alignof(std::max_align_t));
351+
AllocatorT::Deallocate(*I, AllocatedSlabSize, alignof(std::max_align_t));
353352
}
354353
}
355354

@@ -358,7 +357,7 @@ class BumpPtrAllocatorImpl
358357
for (auto &PtrAndSize : CustomSizedSlabs) {
359358
void *Ptr = PtrAndSize.first;
360359
size_t Size = PtrAndSize.second;
361-
Allocator.Deallocate(Ptr, Size, alignof(std::max_align_t));
360+
AllocatorT::Deallocate(Ptr, Size, alignof(std::max_align_t));
362361
}
363362
}
364363

0 commit comments

Comments
 (0)