@@ -3277,12 +3277,13 @@ static bool isRemovableWrite(CallBase &CB, Value *UsedV,
3277
3277
return Dest && Dest->Ptr == UsedV;
3278
3278
}
3279
3279
3280
- static bool isAllocSiteRemovable (Instruction *AI,
3281
- SmallVectorImpl<WeakTrackingVH> &Users,
3282
- const TargetLibraryInfo &TLI) {
3280
+ static std::optional<ModRefInfo>
3281
+ isAllocSiteRemovable (Instruction *AI, SmallVectorImpl<WeakTrackingVH> &Users,
3282
+ const TargetLibraryInfo &TLI, bool KnowInit ) {
3283
3283
SmallVector<Instruction*, 4 > Worklist;
3284
3284
const std::optional<StringRef> Family = getAllocationFamily (AI, &TLI);
3285
3285
Worklist.push_back (AI);
3286
+ ModRefInfo Access = KnowInit ? ModRefInfo::NoModRef : ModRefInfo::Mod;
3286
3287
3287
3288
do {
3288
3289
Instruction *PI = Worklist.pop_back_val ();
@@ -3291,7 +3292,7 @@ static bool isAllocSiteRemovable(Instruction *AI,
3291
3292
switch (I->getOpcode ()) {
3292
3293
default :
3293
3294
// Give up the moment we see something we can't handle.
3294
- return false ;
3295
+ return std::nullopt ;
3295
3296
3296
3297
case Instruction::AddrSpaceCast:
3297
3298
case Instruction::BitCast:
@@ -3306,10 +3307,10 @@ static bool isAllocSiteRemovable(Instruction *AI,
3306
3307
// We also fold comparisons in some conditions provided the alloc has
3307
3308
// not escaped (see isNeverEqualToUnescapedAlloc).
3308
3309
if (!ICI->isEquality ())
3309
- return false ;
3310
+ return std::nullopt ;
3310
3311
unsigned OtherIndex = (ICI->getOperand (0 ) == PI) ? 1 : 0 ;
3311
3312
if (!isNeverEqualToUnescapedAlloc (ICI->getOperand (OtherIndex), TLI, AI))
3312
- return false ;
3313
+ return std::nullopt ;
3313
3314
3314
3315
// Do not fold compares to aligned_alloc calls, as they may have to
3315
3316
// return null in case the required alignment cannot be satisfied,
@@ -3329,7 +3330,7 @@ static bool isAllocSiteRemovable(Instruction *AI,
3329
3330
if (CB && TLI.getLibFunc (*CB->getCalledFunction (), TheLibFunc) &&
3330
3331
TLI.has (TheLibFunc) && TheLibFunc == LibFunc_aligned_alloc &&
3331
3332
!AlignmentAndSizeKnownValid (CB))
3332
- return false ;
3333
+ return std::nullopt ;
3333
3334
Users.emplace_back (I);
3334
3335
continue ;
3335
3336
}
@@ -3339,14 +3340,21 @@ static bool isAllocSiteRemovable(Instruction *AI,
3339
3340
if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(I)) {
3340
3341
switch (II->getIntrinsicID ()) {
3341
3342
default :
3342
- return false ;
3343
+ return std::nullopt ;
3343
3344
3344
3345
case Intrinsic::memmove:
3345
3346
case Intrinsic::memcpy:
3346
3347
case Intrinsic::memset: {
3347
3348
MemIntrinsic *MI = cast<MemIntrinsic>(II);
3348
- if (MI->isVolatile () || MI->getRawDest () != PI)
3349
- return false ;
3349
+ if (MI->isVolatile ())
3350
+ return std::nullopt;
3351
+ // Note: this could also be ModRef, but we can still interpret that
3352
+ // as just Mod in that case.
3353
+ ModRefInfo NewAccess =
3354
+ MI->getRawDest () == PI ? ModRefInfo::Mod : ModRefInfo::Ref;
3355
+ if ((Access & ~NewAccess) != ModRefInfo::NoModRef)
3356
+ return std::nullopt;
3357
+ Access |= NewAccess;
3350
3358
[[fallthrough]];
3351
3359
}
3352
3360
case Intrinsic::assume:
@@ -3365,11 +3373,6 @@ static bool isAllocSiteRemovable(Instruction *AI,
3365
3373
}
3366
3374
}
3367
3375
3368
- if (isRemovableWrite (*cast<CallBase>(I), PI, TLI)) {
3369
- Users.emplace_back (I);
3370
- continue ;
3371
- }
3372
-
3373
3376
if (Family && getFreedOperand (cast<CallBase>(I), &TLI) == PI &&
3374
3377
getAllocationFamily (I, &TLI) == Family) {
3375
3378
Users.emplace_back (I);
@@ -3383,20 +3386,43 @@ static bool isAllocSiteRemovable(Instruction *AI,
3383
3386
continue ;
3384
3387
}
3385
3388
3386
- return false ;
3389
+ if (!isRefSet (Access) &&
3390
+ isRemovableWrite (*cast<CallBase>(I), PI, TLI)) {
3391
+ Access |= ModRefInfo::Mod;
3392
+ Users.emplace_back (I);
3393
+ continue ;
3394
+ }
3395
+
3396
+ return std::nullopt;
3387
3397
3388
3398
case Instruction::Store: {
3389
3399
StoreInst *SI = cast<StoreInst>(I);
3390
3400
if (SI->isVolatile () || SI->getPointerOperand () != PI)
3391
- return false ;
3401
+ return std::nullopt;
3402
+ if (isRefSet (Access))
3403
+ return std::nullopt;
3404
+ Access |= ModRefInfo::Mod;
3405
+ Users.emplace_back (I);
3406
+ continue ;
3407
+ }
3408
+
3409
+ case Instruction::Load: {
3410
+ LoadInst *LI = cast<LoadInst>(I);
3411
+ if (LI->isVolatile () || LI->getPointerOperand () != PI)
3412
+ return std::nullopt;
3413
+ if (isModSet (Access))
3414
+ return std::nullopt;
3415
+ Access |= ModRefInfo::Ref;
3392
3416
Users.emplace_back (I);
3393
3417
continue ;
3394
3418
}
3395
3419
}
3396
3420
llvm_unreachable (" missing a return?" );
3397
3421
}
3398
3422
} while (!Worklist.empty ());
3399
- return true ;
3423
+
3424
+ assert (Access != ModRefInfo::ModRef);
3425
+ return Access;
3400
3426
}
3401
3427
3402
3428
Instruction *InstCombinerImpl::visitAllocSite (Instruction &MI) {
@@ -3424,10 +3450,31 @@ Instruction *InstCombinerImpl::visitAllocSite(Instruction &MI) {
3424
3450
DIB.reset (new DIBuilder (*MI.getModule (), /* AllowUnresolved=*/ false ));
3425
3451
}
3426
3452
3427
- if (isAllocSiteRemovable (&MI, Users, TLI)) {
3453
+ // Determine what getInitialValueOfAllocation would return without actually
3454
+ // allocating the result.
3455
+ bool KnowInitUndef = false ;
3456
+ bool KnowInitZero = false ;
3457
+ Constant *Init =
3458
+ getInitialValueOfAllocation (&MI, &TLI, Type::getInt8Ty (MI.getContext ()));
3459
+ if (Init) {
3460
+ if (isa<UndefValue>(Init))
3461
+ KnowInitUndef = true ;
3462
+ else if (Init->isNullValue ())
3463
+ KnowInitZero = true ;
3464
+ }
3465
+ // The various sanitizers don't actually return undef memory, but rather
3466
+ // memory initialized with special forms of runtime poison
3467
+ auto &F = *MI.getFunction ();
3468
+ if (F.hasFnAttribute (Attribute::SanitizeMemory) ||
3469
+ F.hasFnAttribute (Attribute::SanitizeAddress))
3470
+ KnowInitUndef = false ;
3471
+
3472
+ auto Removable =
3473
+ isAllocSiteRemovable (&MI, Users, TLI, KnowInitZero | KnowInitUndef);
3474
+ if (Removable) {
3428
3475
for (unsigned i = 0 , e = Users.size (); i != e; ++i) {
3429
- // Lowering all @llvm.objectsize calls first because they may
3430
- // use a bitcast/GEP of the alloca we are removing.
3476
+ // Lowering all @llvm.objectsize and MTI calls first because they may use
3477
+ // a bitcast/GEP of the alloca we are removing.
3431
3478
if (!Users[i])
3432
3479
continue ;
3433
3480
@@ -3444,6 +3491,17 @@ Instruction *InstCombinerImpl::visitAllocSite(Instruction &MI) {
3444
3491
eraseInstFromFunction (*I);
3445
3492
Users[i] = nullptr ; // Skip examining in the next loop.
3446
3493
}
3494
+ if (auto *MTI = dyn_cast<MemTransferInst>(I)) {
3495
+ if (KnowInitZero && isRefSet (*Removable)) {
3496
+ IRBuilderBase::InsertPointGuard Guard (Builder);
3497
+ Builder.SetInsertPoint (MTI);
3498
+ auto *M = Builder.CreateMemSet (
3499
+ MTI->getRawDest (),
3500
+ ConstantInt::get (Type::getInt8Ty (MI.getContext ()), 0 ),
3501
+ MTI->getLength (), MTI->getDestAlign ());
3502
+ M->copyMetadata (*MTI);
3503
+ }
3504
+ }
3447
3505
}
3448
3506
}
3449
3507
for (unsigned i = 0 , e = Users.size (); i != e; ++i) {
@@ -3466,7 +3524,14 @@ Instruction *InstCombinerImpl::visitAllocSite(Instruction &MI) {
3466
3524
} else {
3467
3525
// Casts, GEP, or anything else: we're about to delete this instruction,
3468
3526
// so it can not have any valid uses.
3469
- replaceInstUsesWith (*I, PoisonValue::get (I->getType ()));
3527
+ Constant *Replace;
3528
+ if (isa<LoadInst>(I)) {
3529
+ assert (KnowInitZero || KnowInitUndef);
3530
+ Replace = KnowInitUndef ? UndefValue::get (I->getType ())
3531
+ : Constant::getNullValue (I->getType ());
3532
+ } else
3533
+ Replace = PoisonValue::get (I->getType ());
3534
+ replaceInstUsesWith (*I, Replace);
3470
3535
}
3471
3536
eraseInstFromFunction (*I);
3472
3537
}
0 commit comments