Skip to content

Commit a565509

Browse files
committed
[ADT] Use Empty Base Optimization for Allocators
In D94439, BumpPtrAllocator changed its implementation to use an empty base optimization for the underlying allocator. This patch builds on that by extending its functionality to more classes as well as enabling the underlying allocator to be a reference type, something not currently possible as you can't derive from a reference. The main place this sees use is in StringMaps which often use the default MallocAllocator, yet have to pay the size of a pointer for no reason. Reviewed By: dblaikie Differential Revision: https://reviews.llvm.org/D129206
1 parent 1ce3f94 commit a565509

File tree

5 files changed

+59
-31
lines changed

5 files changed

+59
-31
lines changed

llvm/include/llvm/ADT/ScopedHashTable.h

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -147,7 +147,9 @@ class ScopedHashTableIterator {
147147
};
148148

149149
template <typename K, typename V, typename KInfo, typename AllocatorTy>
150-
class ScopedHashTable {
150+
class ScopedHashTable : detail::AllocatorHolder<AllocatorTy> {
151+
using AllocTy = detail::AllocatorHolder<AllocatorTy>;
152+
151153
public:
152154
/// ScopeTy - This is a helpful typedef that allows clients to get easy access
153155
/// to the name of the scope for this hash table.
@@ -162,11 +164,9 @@ class ScopedHashTable {
162164
DenseMap<K, ValTy*, KInfo> TopLevelMap;
163165
ScopeTy *CurScope = nullptr;
164166

165-
AllocatorTy Allocator;
166-
167167
public:
168168
ScopedHashTable() = default;
169-
ScopedHashTable(AllocatorTy A) : Allocator(A) {}
169+
ScopedHashTable(AllocatorTy A) : AllocTy(A) {}
170170
ScopedHashTable(const ScopedHashTable &) = delete;
171171
ScopedHashTable &operator=(const ScopedHashTable &) = delete;
172172

@@ -175,8 +175,7 @@ class ScopedHashTable {
175175
}
176176

177177
/// Access to the allocator.
178-
AllocatorTy &getAllocator() { return Allocator; }
179-
const AllocatorTy &getAllocator() const { return Allocator; }
178+
using AllocTy::getAllocator;
180179

181180
/// Return 1 if the specified key is in the table, 0 otherwise.
182181
size_type count(const K &Key) const {
@@ -217,7 +216,7 @@ class ScopedHashTable {
217216
assert(S && "No scope active!");
218217
ScopedHashTableVal<K, V> *&KeyEntry = TopLevelMap[Key];
219218
KeyEntry = ValTy::Create(S->getLastValInScope(), KeyEntry, Key, Val,
220-
Allocator);
219+
getAllocator());
221220
S->setLastValInScope(KeyEntry);
222221
}
223222
};

llvm/include/llvm/ADT/StringMap.h

Lines changed: 15 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -107,8 +107,9 @@ class StringMapImpl {
107107
/// funky memory allocation and hashing things to make it extremely efficient,
108108
/// storing the string data *after* the value in the map.
109109
template <typename ValueTy, typename AllocatorTy = MallocAllocator>
110-
class StringMap : public StringMapImpl {
111-
AllocatorTy Allocator;
110+
class StringMap : public StringMapImpl,
111+
private detail::AllocatorHolder<AllocatorTy> {
112+
using AllocTy = detail::AllocatorHolder<AllocatorTy>;
112113

113114
public:
114115
using MapEntryTy = StringMapEntry<ValueTy>;
@@ -119,24 +120,23 @@ class StringMap : public StringMapImpl {
119120
: StringMapImpl(InitialSize, static_cast<unsigned>(sizeof(MapEntryTy))) {}
120121

121122
explicit StringMap(AllocatorTy A)
122-
: StringMapImpl(static_cast<unsigned>(sizeof(MapEntryTy))), Allocator(A) {
123-
}
123+
: StringMapImpl(static_cast<unsigned>(sizeof(MapEntryTy))), AllocTy(A) {}
124124

125125
StringMap(unsigned InitialSize, AllocatorTy A)
126126
: StringMapImpl(InitialSize, static_cast<unsigned>(sizeof(MapEntryTy))),
127-
Allocator(A) {}
127+
AllocTy(A) {}
128128

129129
StringMap(std::initializer_list<std::pair<StringRef, ValueTy>> List)
130130
: StringMapImpl(List.size(), static_cast<unsigned>(sizeof(MapEntryTy))) {
131131
insert(List);
132132
}
133133

134134
StringMap(StringMap &&RHS)
135-
: StringMapImpl(std::move(RHS)), Allocator(std::move(RHS.Allocator)) {}
135+
: StringMapImpl(std::move(RHS)), AllocTy(std::move(RHS.getAllocator())) {}
136136

137137
StringMap(const StringMap &RHS)
138138
: StringMapImpl(static_cast<unsigned>(sizeof(MapEntryTy))),
139-
Allocator(RHS.Allocator) {
139+
AllocTy(RHS.getAllocator()) {
140140
if (RHS.empty())
141141
return;
142142

@@ -156,7 +156,7 @@ class StringMap : public StringMapImpl {
156156
}
157157

158158
TheTable[I] = MapEntryTy::Create(
159-
static_cast<MapEntryTy *>(Bucket)->getKey(), Allocator,
159+
static_cast<MapEntryTy *>(Bucket)->getKey(), getAllocator(),
160160
static_cast<MapEntryTy *>(Bucket)->getValue());
161161
HashTable[I] = RHSHashTable[I];
162162
}
@@ -171,7 +171,7 @@ class StringMap : public StringMapImpl {
171171

172172
StringMap &operator=(StringMap RHS) {
173173
StringMapImpl::swap(RHS);
174-
std::swap(Allocator, RHS.Allocator);
174+
std::swap(getAllocator(), RHS.getAllocator());
175175
return *this;
176176
}
177177

@@ -183,15 +183,14 @@ class StringMap : public StringMapImpl {
183183
for (unsigned I = 0, E = NumBuckets; I != E; ++I) {
184184
StringMapEntryBase *Bucket = TheTable[I];
185185
if (Bucket && Bucket != getTombstoneVal()) {
186-
static_cast<MapEntryTy *>(Bucket)->Destroy(Allocator);
186+
static_cast<MapEntryTy *>(Bucket)->Destroy(getAllocator());
187187
}
188188
}
189189
}
190190
free(TheTable);
191191
}
192192

193-
AllocatorTy &getAllocator() { return Allocator; }
194-
const AllocatorTy &getAllocator() const { return Allocator; }
193+
using AllocTy::getAllocator;
195194

196195
using key_type = const char *;
197196
using mapped_type = ValueTy;
@@ -336,7 +335,8 @@ class StringMap : public StringMapImpl {
336335

337336
if (Bucket == getTombstoneVal())
338337
--NumTombstones;
339-
Bucket = MapEntryTy::Create(Key, Allocator, std::forward<ArgsTy>(Args)...);
338+
Bucket =
339+
MapEntryTy::Create(Key, getAllocator(), std::forward<ArgsTy>(Args)...);
340340
++NumItems;
341341
assert(NumItems + NumTombstones <= NumBuckets);
342342

@@ -354,7 +354,7 @@ class StringMap : public StringMapImpl {
354354
for (unsigned I = 0, E = NumBuckets; I != E; ++I) {
355355
StringMapEntryBase *&Bucket = TheTable[I];
356356
if (Bucket && Bucket != getTombstoneVal()) {
357-
static_cast<MapEntryTy *>(Bucket)->Destroy(Allocator);
357+
static_cast<MapEntryTy *>(Bucket)->Destroy(getAllocator());
358358
}
359359
Bucket = nullptr;
360360
}
@@ -370,7 +370,7 @@ class StringMap : public StringMapImpl {
370370
void erase(iterator I) {
371371
MapEntryTy &V = *I;
372372
remove(&V);
373-
V.Destroy(Allocator);
373+
V.Destroy(getAllocator());
374374
}
375375

376376
bool erase(StringRef Key) {

llvm/include/llvm/Support/Allocator.h

Lines changed: 12 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,9 @@ template <typename AllocatorT = MallocAllocator, size_t SlabSize = 4096,
6363
class BumpPtrAllocatorImpl
6464
: public AllocatorBase<BumpPtrAllocatorImpl<AllocatorT, SlabSize,
6565
SizeThreshold, GrowthDelay>>,
66-
private AllocatorT {
66+
private detail::AllocatorHolder<AllocatorT> {
67+
using AllocTy = detail::AllocatorHolder<AllocatorT>;
68+
6769
public:
6870
static_assert(SizeThreshold <= SlabSize,
6971
"The SizeThreshold must be at most the SlabSize to ensure "
@@ -77,12 +79,12 @@ class BumpPtrAllocatorImpl
7779

7880
template <typename T>
7981
BumpPtrAllocatorImpl(T &&Allocator)
80-
: AllocatorT(std::forward<T &&>(Allocator)) {}
82+
: AllocTy(std::forward<T &&>(Allocator)) {}
8183

8284
// Manually implement a move constructor as we must clear the old allocator's
8385
// slabs as a matter of correctness.
8486
BumpPtrAllocatorImpl(BumpPtrAllocatorImpl &&Old)
85-
: AllocatorT(static_cast<AllocatorT &&>(Old)), CurPtr(Old.CurPtr),
87+
: AllocTy(std::move(Old.getAllocator())), CurPtr(Old.CurPtr),
8688
End(Old.End), Slabs(std::move(Old.Slabs)),
8789
CustomSizedSlabs(std::move(Old.CustomSizedSlabs)),
8890
BytesAllocated(Old.BytesAllocated), RedZoneSize(Old.RedZoneSize) {
@@ -107,7 +109,7 @@ class BumpPtrAllocatorImpl
107109
RedZoneSize = RHS.RedZoneSize;
108110
Slabs = std::move(RHS.Slabs);
109111
CustomSizedSlabs = std::move(RHS.CustomSizedSlabs);
110-
AllocatorT::operator=(static_cast<AllocatorT &&>(RHS));
112+
AllocTy::operator=(std::move(RHS.getAllocator()));
111113

112114
RHS.CurPtr = RHS.End = nullptr;
113115
RHS.BytesAllocated = 0;
@@ -175,7 +177,7 @@ class BumpPtrAllocatorImpl
175177
size_t PaddedSize = SizeToAllocate + Alignment.value() - 1;
176178
if (PaddedSize > SizeThreshold) {
177179
void *NewSlab =
178-
AllocatorT::Allocate(PaddedSize, alignof(std::max_align_t));
180+
this->getAllocator().Allocate(PaddedSize, alignof(std::max_align_t));
179181
// We own the new slab and don't want anyone reading anyting other than
180182
// pieces returned from this method. So poison the whole slab.
181183
__asan_poison_memory_region(NewSlab, PaddedSize);
@@ -334,8 +336,8 @@ class BumpPtrAllocatorImpl
334336
void StartNewSlab() {
335337
size_t AllocatedSlabSize = computeSlabSize(Slabs.size());
336338

337-
void *NewSlab =
338-
AllocatorT::Allocate(AllocatedSlabSize, alignof(std::max_align_t));
339+
void *NewSlab = this->getAllocator().Allocate(AllocatedSlabSize,
340+
alignof(std::max_align_t));
339341
// We own the new slab and don't want anyone reading anything other than
340342
// pieces returned from this method. So poison the whole slab.
341343
__asan_poison_memory_region(NewSlab, AllocatedSlabSize);
@@ -351,7 +353,8 @@ class BumpPtrAllocatorImpl
351353
for (; I != E; ++I) {
352354
size_t AllocatedSlabSize =
353355
computeSlabSize(std::distance(Slabs.begin(), I));
354-
AllocatorT::Deallocate(*I, AllocatedSlabSize, alignof(std::max_align_t));
356+
this->getAllocator().Deallocate(*I, AllocatedSlabSize,
357+
alignof(std::max_align_t));
355358
}
356359
}
357360

@@ -360,7 +363,7 @@ class BumpPtrAllocatorImpl
360363
for (auto &PtrAndSize : CustomSizedSlabs) {
361364
void *Ptr = PtrAndSize.first;
362365
size_t Size = PtrAndSize.second;
363-
AllocatorT::Deallocate(Ptr, Size, alignof(std::max_align_t));
366+
this->getAllocator().Deallocate(Ptr, Size, alignof(std::max_align_t));
364367
}
365368
}
366369

llvm/include/llvm/Support/AllocatorBase.h

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -99,6 +99,28 @@ class MallocAllocator : public AllocatorBase<MallocAllocator> {
9999
void PrintStats() const {}
100100
};
101101

102+
namespace detail {
103+
104+
template <typename Alloc> class AllocatorHolder : Alloc {
105+
public:
106+
AllocatorHolder() = default;
107+
AllocatorHolder(const Alloc &A) : Alloc(A) {}
108+
AllocatorHolder(Alloc &&A) : Alloc(static_cast<Alloc &&>(A)) {}
109+
Alloc &getAllocator() { return *this; }
110+
const Alloc &getAllocator() const { return *this; }
111+
};
112+
113+
template <typename Alloc> class AllocatorHolder<Alloc &> {
114+
Alloc &A;
115+
116+
public:
117+
AllocatorHolder(Alloc &A) : A(A) {}
118+
Alloc &getAllocator() { return A; }
119+
const Alloc &getAllocator() const { return A; }
120+
};
121+
122+
} // namespace detail
123+
102124
} // namespace llvm
103125

104126
#endif // LLVM_SUPPORT_ALLOCATORBASE_H

llvm/unittests/ADT/StringMapTest.cpp

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,10 @@ using namespace llvm;
1717

1818
namespace {
1919

20+
static_assert(sizeof(StringMap<uint32_t>) <
21+
sizeof(StringMap<uint32_t, MallocAllocator &>),
22+
"Ensure empty base optimization happens with default allocator");
23+
2024
// Test fixture
2125
class StringMapTest : public testing::Test {
2226
protected:

0 commit comments

Comments
 (0)