Skip to content

Commit 4b18996

Browse files
authored
[scudo] Use MemMap in BufferPool and RegionPageMap (#66788)
1 parent 17d276a commit 4b18996

File tree

2 files changed

+64
-66
lines changed

2 files changed

+64
-66
lines changed

compiler-rt/lib/scudo/standalone/release.h

Lines changed: 56 additions & 58 deletions
Original file line numberDiff line numberDiff line change
@@ -109,9 +109,22 @@ class BufferPool {
109109
SCUDO_CACHE_LINE_SIZE),
110110
"");
111111

112+
struct Buffer {
113+
// Pointer to the buffer's memory, or nullptr if no buffer was allocated.
114+
uptr *Data = nullptr;
115+
116+
// The index of the underlying static buffer, or StaticBufferCount if this
117+
// buffer was dynamically allocated. This value is initially set to a poison
118+
// value to aid debugging.
119+
uptr BufferIndex = ~static_cast<uptr>(0);
120+
121+
// Only valid if BufferIndex == StaticBufferCount.
122+
MemMapT MemMap = {};
123+
};
124+
112125
// Return a zero-initialized buffer which can contain at least the given
113126
// number of elements, or nullptr on failure.
114-
uptr *getBuffer(const uptr NumElements) {
127+
Buffer getBuffer(const uptr NumElements) {
115128
if (UNLIKELY(NumElements > StaticBufferNumElements))
116129
return getDynamicBuffer(NumElements);
117130

@@ -130,53 +143,33 @@ class BufferPool {
130143
if (index >= StaticBufferCount)
131144
return getDynamicBuffer(NumElements);
132145

133-
const uptr Offset = index * StaticBufferNumElements;
134-
memset(&RawBuffer[Offset], 0, StaticBufferNumElements * sizeof(uptr));
135-
return &RawBuffer[Offset];
146+
Buffer Buf;
147+
Buf.Data = &RawBuffer[index * StaticBufferNumElements];
148+
Buf.BufferIndex = index;
149+
memset(Buf.Data, 0, StaticBufferNumElements * sizeof(uptr));
150+
return Buf;
136151
}
137152

138-
void releaseBuffer(uptr *Buffer, const uptr NumElements) {
139-
const uptr index = getStaticBufferIndex(Buffer, NumElements);
140-
if (index < StaticBufferCount) {
153+
void releaseBuffer(Buffer Buf) {
154+
DCHECK_NE(Buf.Data, nullptr);
155+
DCHECK_LE(Buf.BufferIndex, StaticBufferCount);
156+
if (Buf.BufferIndex != StaticBufferCount) {
141157
ScopedLock L(Mutex);
142-
DCHECK_EQ((Mask & (static_cast<uptr>(1) << index)), 0U);
143-
Mask |= static_cast<uptr>(1) << index;
158+
DCHECK_EQ((Mask & (static_cast<uptr>(1) << Buf.BufferIndex)), 0U);
159+
Mask |= static_cast<uptr>(1) << Buf.BufferIndex;
144160
} else {
145-
const uptr MappedSize =
146-
roundUp(NumElements * sizeof(uptr), getPageSizeCached());
147-
unmap(reinterpret_cast<void *>(Buffer), MappedSize);
161+
Buf.MemMap.unmap(Buf.MemMap.getBase(), Buf.MemMap.getCapacity());
148162
}
149163
}
150164

151-
bool isStaticBufferTestOnly(uptr *Buffer, uptr NumElements) {
152-
return getStaticBufferIndex(Buffer, NumElements) < StaticBufferCount;
165+
bool isStaticBufferTestOnly(const Buffer &Buf) {
166+
DCHECK_NE(Buf.Data, nullptr);
167+
DCHECK_LE(Buf.BufferIndex, StaticBufferCount);
168+
return Buf.BufferIndex != StaticBufferCount;
153169
}
154170

155171
private:
156-
uptr getStaticBufferIndex(uptr *Buffer, uptr NumElements) {
157-
if (UNLIKELY(NumElements > StaticBufferNumElements))
158-
return StaticBufferCount;
159-
160-
const uptr BufferBase = reinterpret_cast<uptr>(Buffer);
161-
const uptr RawBufferBase = reinterpret_cast<uptr>(RawBuffer);
162-
163-
if (BufferBase < RawBufferBase ||
164-
BufferBase >= RawBufferBase + sizeof(RawBuffer)) {
165-
return StaticBufferCount;
166-
}
167-
168-
DCHECK_LE(NumElements, StaticBufferNumElements);
169-
DCHECK_LE(BufferBase + NumElements * sizeof(uptr),
170-
RawBufferBase + sizeof(RawBuffer));
171-
172-
const uptr StaticBufferSize = StaticBufferNumElements * sizeof(uptr);
173-
DCHECK_EQ((BufferBase - RawBufferBase) % StaticBufferSize, 0U);
174-
const uptr index = (BufferBase - RawBufferBase) / StaticBufferSize;
175-
DCHECK_LT(index, StaticBufferCount);
176-
return index;
177-
}
178-
179-
uptr *getDynamicBuffer(const uptr NumElements) {
172+
Buffer getDynamicBuffer(const uptr NumElements) {
180173
// When using a heap-based buffer, precommit the pages backing the
181174
// Vmar by passing |MAP_PRECOMMIT| flag. This allows an optimization
182175
// where page fault exceptions are skipped as the allocated memory
@@ -185,15 +178,18 @@ class BufferPool {
185178
const uptr MmapFlags = MAP_ALLOWNOMEM | (SCUDO_FUCHSIA ? MAP_PRECOMMIT : 0);
186179
const uptr MappedSize =
187180
roundUp(NumElements * sizeof(uptr), getPageSizeCached());
188-
return reinterpret_cast<uptr *>(
189-
map(nullptr, MappedSize, "scudo:counters", MmapFlags, &MapData));
181+
Buffer Buf;
182+
if (Buf.MemMap.map(/*Addr=*/0, MappedSize, "scudo:counters", MmapFlags)) {
183+
Buf.Data = reinterpret_cast<uptr *>(Buf.MemMap.getBase());
184+
Buf.BufferIndex = StaticBufferCount;
185+
}
186+
return Buf;
190187
}
191188

192189
HybridMutex Mutex;
193190
// '1' means that buffer index is not used. '0' means the buffer is in use.
194191
uptr Mask GUARDED_BY(Mutex) = ~static_cast<uptr>(0);
195192
uptr RawBuffer[StaticBufferCount * StaticBufferNumElements] GUARDED_BY(Mutex);
196-
[[no_unique_address]] MapPlatformData MapData = {};
197193
};
198194

199195
// A Region page map is used to record the usage of pages in the regions. It
@@ -210,15 +206,15 @@ class RegionPageMap {
210206
RegionPageMap()
211207
: Regions(0), NumCounters(0), CounterSizeBitsLog(0), CounterMask(0),
212208
PackingRatioLog(0), BitOffsetMask(0), SizePerRegion(0),
213-
BufferNumElements(0), Buffer(nullptr) {}
209+
BufferNumElements(0) {}
214210
RegionPageMap(uptr NumberOfRegions, uptr CountersPerRegion, uptr MaxValue) {
215211
reset(NumberOfRegions, CountersPerRegion, MaxValue);
216212
}
217213
~RegionPageMap() {
218214
if (!isAllocated())
219215
return;
220-
Buffers.releaseBuffer(Buffer, BufferNumElements);
221-
Buffer = nullptr;
216+
Buffers.releaseBuffer(Buffer);
217+
Buffer = {};
222218
}
223219

224220
// Lock of `StaticBuffer` is acquired conditionally and there's no easy way to
@@ -233,7 +229,7 @@ class RegionPageMap {
233229
Regions = NumberOfRegion;
234230
NumCounters = CountersPerRegion;
235231

236-
constexpr uptr MaxCounterBits = sizeof(*Buffer) * 8UL;
232+
constexpr uptr MaxCounterBits = sizeof(*Buffer.Data) * 8UL;
237233
// Rounding counter storage size up to the power of two allows for using
238234
// bit shifts calculating particular counter's Index and offset.
239235
const uptr CounterSizeBits =
@@ -254,7 +250,7 @@ class RegionPageMap {
254250
Buffer = Buffers.getBuffer(BufferNumElements);
255251
}
256252

257-
bool isAllocated() const { return !!Buffer; }
253+
bool isAllocated() const { return Buffer.Data != nullptr; }
258254

259255
uptr getCount() const { return NumCounters; }
260256

@@ -263,7 +259,8 @@ class RegionPageMap {
263259
DCHECK_LT(I, NumCounters);
264260
const uptr Index = I >> PackingRatioLog;
265261
const uptr BitOffset = (I & BitOffsetMask) << CounterSizeBitsLog;
266-
return (Buffer[Region * SizePerRegion + Index] >> BitOffset) & CounterMask;
262+
return (Buffer.Data[Region * SizePerRegion + Index] >> BitOffset) &
263+
CounterMask;
267264
}
268265

269266
void inc(uptr Region, uptr I) const {
@@ -272,8 +269,8 @@ class RegionPageMap {
272269
const uptr BitOffset = (I & BitOffsetMask) << CounterSizeBitsLog;
273270
DCHECK_LT(BitOffset, SCUDO_WORDSIZE);
274271
DCHECK_EQ(isAllCounted(Region, I), false);
275-
Buffer[Region * SizePerRegion + Index] += static_cast<uptr>(1U)
276-
<< BitOffset;
272+
Buffer.Data[Region * SizePerRegion + Index] += static_cast<uptr>(1U)
273+
<< BitOffset;
277274
}
278275

279276
void incN(uptr Region, uptr I, uptr N) const {
@@ -284,7 +281,7 @@ class RegionPageMap {
284281
const uptr BitOffset = (I & BitOffsetMask) << CounterSizeBitsLog;
285282
DCHECK_LT(BitOffset, SCUDO_WORDSIZE);
286283
DCHECK_EQ(isAllCounted(Region, I), false);
287-
Buffer[Region * SizePerRegion + Index] += N << BitOffset;
284+
Buffer.Data[Region * SizePerRegion + Index] += N << BitOffset;
288285
}
289286

290287
void incRange(uptr Region, uptr From, uptr To) const {
@@ -303,7 +300,7 @@ class RegionPageMap {
303300
const uptr Index = I >> PackingRatioLog;
304301
const uptr BitOffset = (I & BitOffsetMask) << CounterSizeBitsLog;
305302
DCHECK_LT(BitOffset, SCUDO_WORDSIZE);
306-
Buffer[Region * SizePerRegion + Index] |= CounterMask << BitOffset;
303+
Buffer.Data[Region * SizePerRegion + Index] |= CounterMask << BitOffset;
307304
}
308305
void setAsAllCountedRange(uptr Region, uptr From, uptr To) const {
309306
DCHECK_LE(From, To);
@@ -329,6 +326,13 @@ class RegionPageMap {
329326
uptr getBufferNumElements() const { return BufferNumElements; }
330327

331328
private:
329+
// We may consider making this configurable if there are cases which may
330+
// benefit from this.
331+
static const uptr StaticBufferCount = 2U;
332+
static const uptr StaticBufferNumElements = 512U;
333+
using BufferPoolT = BufferPool<StaticBufferCount, StaticBufferNumElements>;
334+
static BufferPoolT Buffers;
335+
332336
uptr Regions;
333337
uptr NumCounters;
334338
uptr CounterSizeBitsLog;
@@ -338,13 +342,7 @@ class RegionPageMap {
338342

339343
uptr SizePerRegion;
340344
uptr BufferNumElements;
341-
uptr *Buffer;
342-
343-
// We may consider making this configurable if there are cases which may
344-
// benefit from this.
345-
static const uptr StaticBufferCount = 2U;
346-
static const uptr StaticBufferNumElements = 512U;
347-
static BufferPool<StaticBufferCount, StaticBufferNumElements> Buffers;
345+
BufferPoolT::Buffer Buffer;
348346
};
349347

350348
template <class ReleaseRecorderT> class FreePagesRangeTracker {

compiler-rt/lib/scudo/standalone/tests/release_test.cpp

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -637,18 +637,18 @@ TEST(ScudoReleaseTest, BufferPool) {
637637
scudo::BufferPool<StaticBufferCount, StaticBufferNumElements>;
638638
std::unique_ptr<BufferPool> Pool(new BufferPool());
639639

640-
std::vector<std::pair<scudo::uptr *, scudo::uptr>> Buffers;
640+
std::vector<BufferPool::Buffer> Buffers;
641641
for (scudo::uptr I = 0; I < StaticBufferCount; ++I) {
642-
scudo::uptr *P = Pool->getBuffer(StaticBufferNumElements);
643-
EXPECT_TRUE(Pool->isStaticBufferTestOnly(P, StaticBufferNumElements));
644-
Buffers.emplace_back(P, StaticBufferNumElements);
642+
BufferPool::Buffer Buffer = Pool->getBuffer(StaticBufferNumElements);
643+
EXPECT_TRUE(Pool->isStaticBufferTestOnly(Buffer));
644+
Buffers.push_back(Buffer);
645645
}
646646

647647
// The static buffer is supposed to be used up.
648-
scudo::uptr *P = Pool->getBuffer(StaticBufferNumElements);
649-
EXPECT_FALSE(Pool->isStaticBufferTestOnly(P, StaticBufferNumElements));
648+
BufferPool::Buffer Buffer = Pool->getBuffer(StaticBufferNumElements);
649+
EXPECT_FALSE(Pool->isStaticBufferTestOnly(Buffer));
650650

651-
Pool->releaseBuffer(P, StaticBufferNumElements);
651+
Pool->releaseBuffer(Buffer);
652652
for (auto &Buffer : Buffers)
653-
Pool->releaseBuffer(Buffer.first, Buffer.second);
653+
Pool->releaseBuffer(Buffer);
654654
}

0 commit comments

Comments
 (0)