@@ -468,7 +468,8 @@ class Allocator {
468
468
reportDeleteSizeMismatch (Ptr, DeleteSize, Size);
469
469
}
470
470
471
- quarantineOrDeallocateChunk (Options, TaggedPtr, &Header, Size);
471
+ quarantineOrDeallocateChunk (Options, TaggedPtr, /* HeaderTaggedPtr=*/ Ptr,
472
+ &Header, Size);
472
473
}
473
474
474
475
void *reallocate (void *OldPtr, uptr NewSize, uptr Alignment = MinAlignment) {
@@ -567,7 +568,8 @@ class Allocator {
567
568
void *NewPtr = allocate (NewSize, Chunk::Origin::Malloc, Alignment);
568
569
if (LIKELY (NewPtr)) {
569
570
memcpy (NewPtr, OldTaggedPtr, Min (NewSize, OldSize));
570
- quarantineOrDeallocateChunk (Options, OldTaggedPtr, &Header, OldSize);
571
+ quarantineOrDeallocateChunk (Options, OldTaggedPtr,
572
+ /* HeaderTaggedPtr=*/ OldPtr, &Header, OldSize);
571
573
}
572
574
return NewPtr;
573
575
}
@@ -1199,9 +1201,10 @@ class Allocator {
1199
1201
}
1200
1202
1201
1203
void quarantineOrDeallocateChunk (const Options &Options, void *TaggedPtr,
1204
+ void *HeaderTaggedPtr,
1202
1205
Chunk::UnpackedHeader *Header,
1203
1206
uptr Size) NO_THREAD_SAFETY_ANALYSIS {
1204
- void *Ptr = getHeaderTaggedPointer (TaggedPtr) ;
1207
+ void *Ptr = HeaderTaggedPtr ;
1205
1208
// If the quarantine is disabled, the actual size of a chunk is 0 or larger
1206
1209
// than the maximum allowed, we return a chunk directly to the backend.
1207
1210
// This purposefully underflows for Size == 0.
@@ -1212,31 +1215,23 @@ class Allocator {
1212
1215
Header->State = Chunk::State::Available;
1213
1216
else
1214
1217
Header->State = Chunk::State::Quarantined;
1215
- Header->OriginOrWasZeroed = useMemoryTagging<Config>(Options) &&
1216
- Header->ClassId &&
1217
- !TSDRegistry.getDisableMemInit ();
1218
- Chunk::storeHeader (Cookie, Ptr, Header);
1219
1218
1219
+ void *BlockBegin;
1220
1220
if (UNLIKELY (useMemoryTagging<Config>(Options))) {
1221
- u8 PrevTag = extractTag (reinterpret_cast <uptr>(TaggedPtr));
1222
- storeDeallocationStackMaybe (Options, Ptr, PrevTag, Size);
1223
- if (Header->ClassId ) {
1224
- if (!TSDRegistry.getDisableMemInit ()) {
1225
- uptr TaggedBegin, TaggedEnd;
1226
- const uptr OddEvenMask = computeOddEvenMaskForPointerMaybe (
1227
- Options, reinterpret_cast <uptr>(getBlockBegin (Ptr, Header)),
1228
- Header->ClassId );
1229
- // Exclude the previous tag so that immediate use after free is
1230
- // detected 100% of the time.
1231
- setRandomTag (Ptr, Size, OddEvenMask | (1UL << PrevTag), &TaggedBegin,
1232
- &TaggedEnd);
1233
- }
1234
- }
1221
+ Header->OriginOrWasZeroed =
1222
+ Header->ClassId && !TSDRegistry.getDisableMemInit ();
1223
+ BlockBegin = unTagBlock (Options, TaggedPtr, HeaderTaggedPtr, Header, Size,
1224
+ BypassQuarantine);
1225
+ } else {
1226
+ Header->OriginOrWasZeroed = 0U ;
1227
+ if (BypassQuarantine && allocatorSupportsMemoryTagging<Config>())
1228
+ Ptr = untagPointer (Ptr);
1229
+ BlockBegin = getBlockBegin (Ptr, Header);
1235
1230
}
1231
+
1232
+ Chunk::storeHeader (Cookie, Ptr, Header);
1233
+
1236
1234
if (BypassQuarantine) {
1237
- if (allocatorSupportsMemoryTagging<Config>())
1238
- Ptr = untagPointer (Ptr);
1239
- void *BlockBegin = getBlockBegin (Ptr, Header);
1240
1235
const uptr ClassId = Header->ClassId ;
1241
1236
if (LIKELY (ClassId)) {
1242
1237
bool CacheDrained;
@@ -1251,9 +1246,6 @@ class Allocator {
1251
1246
if (CacheDrained)
1252
1247
Primary.tryReleaseToOS (ClassId, ReleaseToOS::Normal);
1253
1248
} else {
1254
- if (UNLIKELY (useMemoryTagging<Config>(Options)))
1255
- storeTags (reinterpret_cast <uptr>(BlockBegin),
1256
- reinterpret_cast <uptr>(Ptr));
1257
1249
Secondary.deallocate (Options, BlockBegin);
1258
1250
}
1259
1251
} else {
@@ -1263,6 +1255,38 @@ class Allocator {
1263
1255
}
1264
1256
}
1265
1257
1258
+ NOINLINE void *unTagBlock (const Options &Options, void *TaggedPtr,
1259
+ void *HeaderTaggedPtr,
1260
+ Chunk::UnpackedHeader *Header, const uptr Size,
1261
+ bool BypassQuarantine) {
1262
+ DCHECK (useMemoryTagging<Config>(Options));
1263
+ void *Ptr = HeaderTaggedPtr;
1264
+
1265
+ const u8 PrevTag = extractTag (reinterpret_cast <uptr>(TaggedPtr));
1266
+ storeDeallocationStackMaybe (Options, Ptr, PrevTag, Size);
1267
+ if (Header->ClassId ) {
1268
+ if (!TSDRegistry.getDisableMemInit ()) {
1269
+ uptr TaggedBegin, TaggedEnd;
1270
+ const uptr OddEvenMask = computeOddEvenMaskForPointerMaybe (
1271
+ Options, reinterpret_cast <uptr>(getBlockBegin (Ptr, Header)),
1272
+ Header->ClassId );
1273
+ // Exclude the previous tag so that immediate use after free is
1274
+ // detected 100% of the time.
1275
+ setRandomTag (Ptr, Size, OddEvenMask | (1UL << PrevTag), &TaggedBegin,
1276
+ &TaggedEnd);
1277
+ }
1278
+ }
1279
+
1280
+ Ptr = untagPointer (Ptr);
1281
+ void *BlockBegin = getBlockBegin (Ptr, Header);
1282
+ if (BypassQuarantine && !Header->ClassId ) {
1283
+ storeTags (reinterpret_cast <uptr>(BlockBegin),
1284
+ reinterpret_cast <uptr>(Ptr));
1285
+ }
1286
+
1287
+ return BlockBegin;
1288
+ }
1289
+
1266
1290
bool getChunkFromBlock (uptr Block, uptr *Chunk,
1267
1291
Chunk::UnpackedHeader *Header) {
1268
1292
*Chunk =
0 commit comments