Skip to content

Commit d59370d

Browse files
committed
[scudo] Split the code paths in quarantineOrDeallocateChunk()
Move block untagging logic to another function and sink untagged pointer to quarantineOrDeallocateChunk().
1 parent 5b56688 commit d59370d

File tree

1 file changed

+51
-27
lines changed

1 file changed

+51
-27
lines changed

compiler-rt/lib/scudo/standalone/combined.h

Lines changed: 51 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -468,7 +468,8 @@ class Allocator {
468468
reportDeleteSizeMismatch(Ptr, DeleteSize, Size);
469469
}
470470

471-
quarantineOrDeallocateChunk(Options, TaggedPtr, &Header, Size);
471+
quarantineOrDeallocateChunk(Options, TaggedPtr, /*HeaderTaggedPtr=*/Ptr,
472+
&Header, Size);
472473
}
473474

474475
void *reallocate(void *OldPtr, uptr NewSize, uptr Alignment = MinAlignment) {
@@ -567,7 +568,8 @@ class Allocator {
567568
void *NewPtr = allocate(NewSize, Chunk::Origin::Malloc, Alignment);
568569
if (LIKELY(NewPtr)) {
569570
memcpy(NewPtr, OldTaggedPtr, Min(NewSize, OldSize));
570-
quarantineOrDeallocateChunk(Options, OldTaggedPtr, &Header, OldSize);
571+
quarantineOrDeallocateChunk(Options, OldTaggedPtr,
572+
/*HeaderTaggedPtr=*/OldPtr, &Header, OldSize);
571573
}
572574
return NewPtr;
573575
}
@@ -1199,9 +1201,10 @@ class Allocator {
11991201
}
12001202

12011203
void quarantineOrDeallocateChunk(const Options &Options, void *TaggedPtr,
1204+
void *HeaderTaggedPtr,
12021205
Chunk::UnpackedHeader *Header,
12031206
uptr Size) NO_THREAD_SAFETY_ANALYSIS {
1204-
void *Ptr = getHeaderTaggedPointer(TaggedPtr);
1207+
void *Ptr = HeaderTaggedPtr;
12051208
// If the quarantine is disabled, the actual size of a chunk is 0 or larger
12061209
// than the maximum allowed, we return a chunk directly to the backend.
12071210
// This purposefully underflows for Size == 0.
@@ -1212,31 +1215,23 @@ class Allocator {
12121215
Header->State = Chunk::State::Available;
12131216
else
12141217
Header->State = Chunk::State::Quarantined;
1215-
Header->OriginOrWasZeroed = useMemoryTagging<Config>(Options) &&
1216-
Header->ClassId &&
1217-
!TSDRegistry.getDisableMemInit();
1218-
Chunk::storeHeader(Cookie, Ptr, Header);
12191218

1219+
void *BlockBegin;
12201220
if (UNLIKELY(useMemoryTagging<Config>(Options))) {
1221-
u8 PrevTag = extractTag(reinterpret_cast<uptr>(TaggedPtr));
1222-
storeDeallocationStackMaybe(Options, Ptr, PrevTag, Size);
1223-
if (Header->ClassId) {
1224-
if (!TSDRegistry.getDisableMemInit()) {
1225-
uptr TaggedBegin, TaggedEnd;
1226-
const uptr OddEvenMask = computeOddEvenMaskForPointerMaybe(
1227-
Options, reinterpret_cast<uptr>(getBlockBegin(Ptr, Header)),
1228-
Header->ClassId);
1229-
// Exclude the previous tag so that immediate use after free is
1230-
// detected 100% of the time.
1231-
setRandomTag(Ptr, Size, OddEvenMask | (1UL << PrevTag), &TaggedBegin,
1232-
&TaggedEnd);
1233-
}
1234-
}
1221+
Header->OriginOrWasZeroed =
1222+
Header->ClassId && !TSDRegistry.getDisableMemInit();
1223+
BlockBegin = unTagBlock(Options, TaggedPtr, HeaderTaggedPtr, Header, Size,
1224+
BypassQuarantine);
1225+
} else {
1226+
Header->OriginOrWasZeroed = 0U;
1227+
if (BypassQuarantine && allocatorSupportsMemoryTagging<Config>())
1228+
Ptr = untagPointer(Ptr);
1229+
BlockBegin = getBlockBegin(Ptr, Header);
12351230
}
1231+
1232+
Chunk::storeHeader(Cookie, Ptr, Header);
1233+
12361234
if (BypassQuarantine) {
1237-
if (allocatorSupportsMemoryTagging<Config>())
1238-
Ptr = untagPointer(Ptr);
1239-
void *BlockBegin = getBlockBegin(Ptr, Header);
12401235
const uptr ClassId = Header->ClassId;
12411236
if (LIKELY(ClassId)) {
12421237
bool CacheDrained;
@@ -1251,9 +1246,6 @@ class Allocator {
12511246
if (CacheDrained)
12521247
Primary.tryReleaseToOS(ClassId, ReleaseToOS::Normal);
12531248
} else {
1254-
if (UNLIKELY(useMemoryTagging<Config>(Options)))
1255-
storeTags(reinterpret_cast<uptr>(BlockBegin),
1256-
reinterpret_cast<uptr>(Ptr));
12571249
Secondary.deallocate(Options, BlockBegin);
12581250
}
12591251
} else {
@@ -1263,6 +1255,38 @@ class Allocator {
12631255
}
12641256
}
12651257

1258+
NOINLINE void *unTagBlock(const Options &Options, void *TaggedPtr,
1259+
void *HeaderTaggedPtr,
1260+
Chunk::UnpackedHeader *Header, const uptr Size,
1261+
bool BypassQuarantine) {
1262+
DCHECK(useMemoryTagging<Config>(Options));
1263+
void *Ptr = HeaderTaggedPtr;
1264+
1265+
const u8 PrevTag = extractTag(reinterpret_cast<uptr>(TaggedPtr));
1266+
storeDeallocationStackMaybe(Options, Ptr, PrevTag, Size);
1267+
if (Header->ClassId) {
1268+
if (!TSDRegistry.getDisableMemInit()) {
1269+
uptr TaggedBegin, TaggedEnd;
1270+
const uptr OddEvenMask = computeOddEvenMaskForPointerMaybe(
1271+
Options, reinterpret_cast<uptr>(getBlockBegin(Ptr, Header)),
1272+
Header->ClassId);
1273+
// Exclude the previous tag so that immediate use after free is
1274+
// detected 100% of the time.
1275+
setRandomTag(Ptr, Size, OddEvenMask | (1UL << PrevTag), &TaggedBegin,
1276+
&TaggedEnd);
1277+
}
1278+
}
1279+
1280+
Ptr = untagPointer(Ptr);
1281+
void *BlockBegin = getBlockBegin(Ptr, Header);
1282+
if (BypassQuarantine && !Header->ClassId) {
1283+
storeTags(reinterpret_cast<uptr>(BlockBegin),
1284+
reinterpret_cast<uptr>(Ptr));
1285+
}
1286+
1287+
return BlockBegin;
1288+
}
1289+
12661290
bool getChunkFromBlock(uptr Block, uptr *Chunk,
12671291
Chunk::UnpackedHeader *Header) {
12681292
*Chunk =

0 commit comments

Comments
 (0)