@@ -179,6 +179,8 @@ const char kAMDGPUAddressPrivateName[] = "llvm.amdgcn.is.private";
179
179
const char kAMDGPUBallotName [] = " llvm.amdgcn.ballot.i64" ;
180
180
const char kAMDGPUUnreachableName [] = " llvm.amdgcn.unreachable" ;
181
181
182
+ const char kAsanMemToShadow [] = " __asan_mem_to_shadow" ;
183
+
182
184
// Accesses sizes are powers of two: 1, 2, 4, 8, 16.
183
185
static const size_t kNumberOfAccessSizes = 5 ;
184
186
@@ -447,7 +449,7 @@ static cl::opt<AsanDtorKind> ClOverrideDestructorKind(
447
449
static cl::opt<bool >
448
450
ClSpirOffloadPrivates (" asan-spir-privates" ,
449
451
cl::desc (" instrument private pointer" ), cl::Hidden,
450
- cl::init(false ));
452
+ cl::init(true ));
451
453
452
454
static cl::opt<bool > ClSpirOffloadGlobals (" asan-spir-globals" ,
453
455
cl::desc (" instrument global pointer" ),
@@ -820,14 +822,15 @@ struct AddressSanitizer {
820
822
Value *SizeArgument, uint32_t Exp,
821
823
RuntimeCallInserter &RTCI);
822
824
void instrumentMemIntrinsic (MemIntrinsic *MI, RuntimeCallInserter &RTCI);
823
- Value *memToShadow (Value *Shadow, IRBuilder<> &IRB);
825
+ Value *memToShadow (Value *Shadow, IRBuilder<> &IRB,
826
+ uint32_t AddressSpace = kSpirOffloadPrivateAS );
824
827
bool suppressInstrumentationSiteForDebug (int &Instrumented);
825
828
bool instrumentFunction (Function &F, const TargetLibraryInfo *TLI);
826
829
bool maybeInsertAsanInitAtFunctionEntry (Function &F);
827
830
bool maybeInsertDynamicShadowAtFunctionEntry (Function &F);
828
831
void markEscapedLocalAllocas (Function &F);
829
832
void instrumentSyclStaticLocalMemory (CallInst *CI);
830
- void instrumentSyclDynamicLocalMemory (Function &F);
833
+ bool instrumentSyclDynamicLocalMemory (Function &F);
831
834
832
835
GlobalVariable *GetOrCreateGlobalString (Module &M, StringRef Name,
833
836
StringRef Value,
@@ -899,6 +902,8 @@ struct AddressSanitizer {
899
902
FunctionCallee AMDGPUAddressPrivate;
900
903
int InstrumentationWithCallsThreshold;
901
904
uint32_t MaxInlinePoisoningSize;
905
+
906
+ FunctionCallee AsanMemToShadow;
902
907
};
903
908
904
909
class ModuleAddressSanitizer {
@@ -1067,7 +1072,7 @@ struct FunctionStackPoisoner : public InstVisitor<FunctionStackPoisoner> {
1067
1072
DIB (*F.getParent(), /* AllowUnresolved*/ false), C(ASan.C),
1068
1073
IntptrTy(ASan.IntptrTy), IntptrPtrTy(PointerType::get(IntptrTy, 0 )),
1069
1074
Mapping(ASan.Mapping),
1070
- PoisonStack(ClStack &&
1075
+ PoisonStack(( ClStack || ClSpirOffloadPrivates) &&
1071
1076
!Triple(F.getParent()->getTargetTriple()).isAMDGPU()) {}
1072
1077
1073
1078
bool runOnFunction () {
@@ -1350,7 +1355,7 @@ static void ExtendSpirKernelArgs(Module &M, FunctionAnalysisManager &FAM) {
1350
1355
}
1351
1356
1352
1357
// Fixup all users
1353
- for (auto [F, NewF] : SpirFuncs) {
1358
+ for (auto & [F, NewF] : SpirFuncs) {
1354
1359
SmallVector<User *, 16 > Users (F->users ());
1355
1360
for (User *U : Users) {
1356
1361
if (auto *CI = dyn_cast<CallInst>(U)) {
@@ -1544,13 +1549,13 @@ void AddressSanitizer::AppendDebugInfoToArgs(Instruction *InsertBefore,
1544
1549
Args.push_back (ConstantExpr::getPointerCast (FuncNameGV, ConstASPtrTy));
1545
1550
}
1546
1551
1547
- Value *AddressSanitizer::memToShadow (Value *Shadow, IRBuilder<> &IRB) {
1552
+ Value *AddressSanitizer::memToShadow (Value *Shadow, IRBuilder<> &IRB,
1553
+ uint32_t AddressSpace) {
1548
1554
if (TargetTriple.isSPIR ()) {
1549
- // ((Shadow & 0xffffffff) >> 3) + __AsanShadowMemoryPrivateStart;
1550
- Shadow = IRB.CreateAnd (Shadow, ConstantInt::get (IntptrTy, 0xffffffff ));
1551
- Shadow = IRB.CreateLShr (Shadow, Mapping.Scale );
1552
- Value *ShadowBase = IRB.CreateLoad (IntptrTy, AsanShadowDevicePrivate);
1553
- return IRB.CreateAdd (Shadow, ShadowBase);
1555
+ return IRB.CreateCall (
1556
+ AsanMemToShadow,
1557
+ {Shadow, ConstantInt::get (IRB.getInt32Ty (), AddressSpace)},
1558
+ " shadow_ptr" );
1554
1559
}
1555
1560
// Shadow >> scale
1556
1561
Shadow = IRB.CreateLShr (Shadow, Mapping.Scale );
@@ -1619,7 +1624,7 @@ void AddressSanitizer::instrumentSyclStaticLocalMemory(CallInst *CI) {
1619
1624
}
1620
1625
1621
1626
// Instument dynamic local memory
1622
- void AddressSanitizer::instrumentSyclDynamicLocalMemory (Function &F) {
1627
+ bool AddressSanitizer::instrumentSyclDynamicLocalMemory (Function &F) {
1623
1628
InstrumentationIRBuilder IRB (F.getEntryBlock ().getFirstNonPHI ());
1624
1629
1625
1630
// Save "__asan_launch" into local memory "__AsanLaunchInfo"
@@ -1631,13 +1636,12 @@ void AddressSanitizer::instrumentSyclDynamicLocalMemory(Function &F) {
1631
1636
SmallVector<Argument *> LocalArgs;
1632
1637
for (auto &Arg : F.args ()) {
1633
1638
Type *PtrTy = dyn_cast<PointerType>(Arg.getType ()->getScalarType ());
1634
- // Local address space
1635
- if (PtrTy && PtrTy->getPointerAddressSpace () == 3 )
1639
+ if (PtrTy && PtrTy->getPointerAddressSpace () == kSpirOffloadLocalAS )
1636
1640
LocalArgs.push_back (&Arg);
1637
1641
}
1638
1642
1639
1643
if (LocalArgs.empty ())
1640
- return ;
1644
+ return false ;
1641
1645
1642
1646
AllocaInst *ArgsArray = IRB.CreateAlloca (
1643
1647
IntptrTy, ConstantInt::get (Int32Ty, LocalArgs.size ()), " local_args" );
@@ -1649,6 +1653,7 @@ void AddressSanitizer::instrumentSyclDynamicLocalMemory(Function &F) {
1649
1653
IRB.CreateCall (AsanSetShadowDynamicLocalFunc,
1650
1654
{IRB.CreatePointerCast (ArgsArray, IntptrTy),
1651
1655
ConstantInt::get (Int32Ty, LocalArgs.size ())});
1656
+ return true ;
1652
1657
}
1653
1658
1654
1659
// Instrument memset/memmove/memcpy
@@ -3232,14 +3237,6 @@ void AddressSanitizer::initializeCallbacks(Module &M, const TargetLibraryInfo *T
3232
3237
ArrayType::get (IRB.getInt8Ty (), 0 ));
3233
3238
3234
3239
if (TargetTriple.isSPIR ()) {
3235
- AsanShadowDevicePrivate =
3236
- M.getOrInsertGlobal (" __AsanShadowMemoryPrivateStart" , IntptrTy, [&] {
3237
- return new GlobalVariable (M, IntptrTy, true ,
3238
- GlobalVariable::ExternalLinkage, nullptr ,
3239
- " __AsanShadowMemoryPrivateStart" , nullptr ,
3240
- GlobalVariable::NotThreadLocal, 1 );
3241
- });
3242
-
3243
3240
// __asan_set_shadow_static_local(
3244
3241
// uptr ptr,
3245
3242
// size_t size,
@@ -3263,6 +3260,9 @@ void AddressSanitizer::initializeCallbacks(Module &M, const TargetLibraryInfo *T
3263
3260
GlobalVariable::ExternalLinkage, nullptr , " __AsanLaunchInfo" ,
3264
3261
nullptr , GlobalVariable::NotThreadLocal, kSpirOffloadLocalAS );
3265
3262
});
3263
+
3264
+ AsanMemToShadow = M.getOrInsertFunction (kAsanMemToShadow , IntptrTy,
3265
+ IntptrTy, Type::getInt32Ty (*C));
3266
3266
}
3267
3267
3268
3268
AMDGPUAddressShared =
@@ -3391,10 +3391,6 @@ bool AddressSanitizer::instrumentFunction(Function &F,
3391
3391
// can be passed to that intrinsic.
3392
3392
markEscapedLocalAllocas (F);
3393
3393
3394
- if (F.getCallingConv () == CallingConv::SPIR_KERNEL) {
3395
- instrumentSyclDynamicLocalMemory (F);
3396
- }
3397
-
3398
3394
// We want to instrument every address only once per basic block (unless there
3399
3395
// are calls between uses).
3400
3396
SmallPtrSet<Value *, 16 > TempsToInstrument;
@@ -3514,6 +3510,11 @@ bool AddressSanitizer::instrumentFunction(Function &F,
3514
3510
if (ChangedStack || !NoReturnCalls.empty ())
3515
3511
FunctionModified = true ;
3516
3512
3513
+ // We need to instrument dynamic local arguments after stack poisoner
3514
+ if (F.getCallingConv () == CallingConv::SPIR_KERNEL) {
3515
+ FunctionModified |= instrumentSyclDynamicLocalMemory (F);
3516
+ }
3517
+
3517
3518
LLVM_DEBUG (dbgs () << " ASAN done instrumenting: " << FunctionModified << " "
3518
3519
<< F << " \n " );
3519
3520
@@ -3999,32 +4000,39 @@ void FunctionStackPoisoner::processStaticAllocas() {
3999
4000
AI->replaceAllUsesWith (NewAllocaPtr);
4000
4001
}
4001
4002
4003
+ auto TargetTriple = Triple (F.getParent ()->getTargetTriple ());
4004
+
4002
4005
// The left-most redzone has enough space for at least 4 pointers.
4003
- // Write the Magic value to redzone[0].
4004
4006
Value *BasePlus0 = IRB.CreateIntToPtr (LocalStackBase, IntptrPtrTy);
4005
- IRB.CreateStore (ConstantInt::get (IntptrTy, kCurrentStackFrameMagic ),
4006
- BasePlus0);
4007
- // Write the frame description constant to redzone[1].
4008
- Value *BasePlus1 = IRB.CreateIntToPtr (
4009
- IRB.CreateAdd (LocalStackBase,
4010
- ConstantInt::get (IntptrTy, ASan.LongSize / 8 )),
4011
- IntptrPtrTy);
4012
- GlobalVariable *StackDescriptionGlobal =
4013
- createPrivateGlobalForString (*F.getParent (), DescriptionString,
4014
- /* AllowMerging*/ true , kAsanGenPrefix );
4015
- Value *Description = IRB.CreatePointerCast (StackDescriptionGlobal, IntptrTy);
4016
- IRB.CreateStore (Description, BasePlus1);
4017
- // Write the PC to redzone[2].
4018
- Value *BasePlus2 = IRB.CreateIntToPtr (
4019
- IRB.CreateAdd (LocalStackBase,
4020
- ConstantInt::get (IntptrTy, 2 * ASan.LongSize / 8 )),
4021
- IntptrPtrTy);
4022
- IRB.CreateStore (IRB.CreatePointerCast (&F, IntptrTy), BasePlus2);
4007
+ // SPIRV doesn't use the following metadata
4008
+ if (!TargetTriple.isSPIR ()) {
4009
+ // Write the Magic value to redzone[0].
4010
+ IRB.CreateStore (ConstantInt::get (IntptrTy, kCurrentStackFrameMagic ),
4011
+ BasePlus0);
4012
+ // Write the frame description constant to redzone[1].
4013
+ Value *BasePlus1 = IRB.CreateIntToPtr (
4014
+ IRB.CreateAdd (LocalStackBase,
4015
+ ConstantInt::get (IntptrTy, ASan.LongSize / 8 )),
4016
+ IntptrPtrTy);
4017
+ GlobalVariable *StackDescriptionGlobal =
4018
+ createPrivateGlobalForString (*F.getParent (), DescriptionString,
4019
+ /* AllowMerging*/ true , kAsanGenPrefix );
4020
+ Value *Description =
4021
+ IRB.CreatePointerCast (StackDescriptionGlobal, IntptrTy);
4022
+ IRB.CreateStore (Description, BasePlus1);
4023
+ // Write the PC to redzone[2].
4024
+ Value *BasePlus2 = IRB.CreateIntToPtr (
4025
+ IRB.CreateAdd (LocalStackBase,
4026
+ ConstantInt::get (IntptrTy, 2 * ASan.LongSize / 8 )),
4027
+ IntptrPtrTy);
4028
+ IRB.CreateStore (IRB.CreatePointerCast (&F, IntptrTy), BasePlus2);
4029
+ }
4023
4030
4024
4031
const auto &ShadowAfterScope = GetShadowBytesAfterScope (SVD, L);
4025
4032
4026
4033
// Poison the stack red zones at the entry.
4027
- Value *ShadowBase = ASan.memToShadow (LocalStackBase, IRB);
4034
+ Value *ShadowBase =
4035
+ ASan.memToShadow (LocalStackBase, IRB, kSpirOffloadPrivateAS );
4028
4036
// As mask we must use most poisoned case: red zones and after scope.
4029
4037
// As bytes we can use either the same or just red zones only.
4030
4038
copyToShadow (ShadowAfterScope, ShadowAfterScope, IRB, ShadowBase);
0 commit comments