@@ -572,6 +572,9 @@ class MemorySanitizer {
572
572
// / uninitialized value and returns an updated origin id encoding this info.
573
573
FunctionCallee MsanChainOriginFn;
574
574
575
+ // / Run-time helper that paints an origin over a region.
576
+ FunctionCallee MsanSetOriginFn;
577
+
575
578
// / MSan runtime replacements for memmove, memcpy and memset.
576
579
FunctionCallee MemmoveFn, MemcpyFn, MemsetFn;
577
580
@@ -850,6 +853,9 @@ void MemorySanitizer::initializeCallbacks(Module &M) {
850
853
// instrumentation.
851
854
MsanChainOriginFn = M.getOrInsertFunction (
852
855
" __msan_chain_origin" , IRB.getInt32Ty (), IRB.getInt32Ty ());
856
+ MsanSetOriginFn =
857
+ M.getOrInsertFunction (" __msan_set_origin" , IRB.getVoidTy (),
858
+ IRB.getInt8PtrTy (), IntptrTy, IRB.getInt32Ty ());
853
859
MemmoveFn = M.getOrInsertFunction (
854
860
" __msan_memmove" , IRB.getInt8PtrTy (), IRB.getInt8PtrTy (),
855
861
IRB.getInt8PtrTy (), IntptrTy);
@@ -1769,6 +1775,24 @@ struct MemorySanitizerVisitor : public InstVisitor<MemorySanitizerVisitor> {
1769
1775
llvm_unreachable (" Unknown ordering" );
1770
1776
}
1771
1777
1778
+ Value *makeAddReleaseOrderingTable (IRBuilder<> &IRB) {
1779
+ constexpr int NumOrderings = (int )AtomicOrderingCABI::seq_cst + 1 ;
1780
+ uint32_t OrderingTable[NumOrderings] = {};
1781
+
1782
+ OrderingTable[(int )AtomicOrderingCABI::relaxed] =
1783
+ OrderingTable[(int )AtomicOrderingCABI::release] =
1784
+ (int )AtomicOrderingCABI::release;
1785
+ OrderingTable[(int )AtomicOrderingCABI::consume] =
1786
+ OrderingTable[(int )AtomicOrderingCABI::acquire] =
1787
+ OrderingTable[(int )AtomicOrderingCABI::acq_rel] =
1788
+ (int )AtomicOrderingCABI::acq_rel;
1789
+ OrderingTable[(int )AtomicOrderingCABI::seq_cst] =
1790
+ (int )AtomicOrderingCABI::seq_cst;
1791
+
1792
+ return ConstantDataVector::get (IRB.getContext (),
1793
+ makeArrayRef (OrderingTable, NumOrderings));
1794
+ }
1795
+
1772
1796
AtomicOrdering addAcquireOrdering (AtomicOrdering a) {
1773
1797
switch (a) {
1774
1798
case AtomicOrdering::NotAtomic:
@@ -1786,6 +1810,24 @@ struct MemorySanitizerVisitor : public InstVisitor<MemorySanitizerVisitor> {
1786
1810
llvm_unreachable (" Unknown ordering" );
1787
1811
}
1788
1812
1813
+ Value *makeAddAcquireOrderingTable (IRBuilder<> &IRB) {
1814
+ constexpr int NumOrderings = (int )AtomicOrderingCABI::seq_cst + 1 ;
1815
+ uint32_t OrderingTable[NumOrderings] = {};
1816
+
1817
+ OrderingTable[(int )AtomicOrderingCABI::relaxed] =
1818
+ OrderingTable[(int )AtomicOrderingCABI::acquire] =
1819
+ OrderingTable[(int )AtomicOrderingCABI::consume] =
1820
+ (int )AtomicOrderingCABI::acquire;
1821
+ OrderingTable[(int )AtomicOrderingCABI::release] =
1822
+ OrderingTable[(int )AtomicOrderingCABI::acq_rel] =
1823
+ (int )AtomicOrderingCABI::acq_rel;
1824
+ OrderingTable[(int )AtomicOrderingCABI::seq_cst] =
1825
+ (int )AtomicOrderingCABI::seq_cst;
1826
+
1827
+ return ConstantDataVector::get (IRB.getContext (),
1828
+ makeArrayRef (OrderingTable, NumOrderings));
1829
+ }
1830
+
1789
1831
// ------------------- Visitors.
1790
1832
using InstVisitor<MemorySanitizerVisitor>::visit;
1791
1833
void visit (Instruction &I) {
@@ -3404,6 +3446,60 @@ struct MemorySanitizerVisitor : public InstVisitor<MemorySanitizerVisitor> {
3404
3446
}
3405
3447
}
3406
3448
3449
+ void visitLibAtomicLoad (CallBase &CB) {
3450
+ IRBuilder<> IRB (&CB);
3451
+ Value *Size = CB.getArgOperand (0 );
3452
+ Value *SrcPtr = CB.getArgOperand (1 );
3453
+ Value *DstPtr = CB.getArgOperand (2 );
3454
+ Value *Ordering = CB.getArgOperand (3 );
3455
+ // Convert the call to have at least Acquire ordering to make sure
3456
+ // the shadow operations aren't reordered before it.
3457
+ Value *NewOrdering =
3458
+ IRB.CreateExtractElement (makeAddAcquireOrderingTable (IRB), Ordering);
3459
+ CB.setArgOperand (3 , NewOrdering);
3460
+
3461
+ IRBuilder<> NextIRB (CB.getNextNode ());
3462
+ NextIRB.SetCurrentDebugLocation (CB.getDebugLoc ());
3463
+
3464
+ Value *SrcShadowPtr, *SrcOriginPtr;
3465
+ std::tie (SrcShadowPtr, SrcOriginPtr) =
3466
+ getShadowOriginPtr (SrcPtr, NextIRB, NextIRB.getInt8Ty (), Align (1 ),
3467
+ /* isStore*/ false );
3468
+ Value *DstShadowPtr =
3469
+ getShadowOriginPtr (DstPtr, NextIRB, NextIRB.getInt8Ty (), Align (1 ),
3470
+ /* isStore*/ true )
3471
+ .first ;
3472
+
3473
+ NextIRB.CreateMemCpy (DstShadowPtr, Align (1 ), SrcShadowPtr, Align (1 ), Size);
3474
+ if (MS.TrackOrigins ) {
3475
+ Value *SrcOrigin = NextIRB.CreateAlignedLoad (MS.OriginTy , SrcOriginPtr,
3476
+ kMinOriginAlignment );
3477
+ Value *NewOrigin = updateOrigin (SrcOrigin, NextIRB);
3478
+ NextIRB.CreateCall (MS.MsanSetOriginFn , {DstPtr, Size, NewOrigin});
3479
+ }
3480
+ }
3481
+
3482
+ void visitLibAtomicStore (CallBase &CB) {
3483
+ IRBuilder<> IRB (&CB);
3484
+ Value *Size = CB.getArgOperand (0 );
3485
+ Value *DstPtr = CB.getArgOperand (2 );
3486
+ Value *Ordering = CB.getArgOperand (3 );
3487
+ // Convert the call to have at least Release ordering to make sure
3488
+ // the shadow operations aren't reordered after it.
3489
+ Value *NewOrdering =
3490
+ IRB.CreateExtractElement (makeAddReleaseOrderingTable (IRB), Ordering);
3491
+ CB.setArgOperand (3 , NewOrdering);
3492
+
3493
+ Value *DstShadowPtr =
3494
+ getShadowOriginPtr (DstPtr, IRB, IRB.getInt8Ty (), Align (1 ),
3495
+ /* isStore*/ true )
3496
+ .first ;
3497
+
3498
+ // Atomic store always paints clean shadow/origin. See file header.
3499
+ IRB.CreateMemSet (DstShadowPtr, getCleanShadow (IRB.getInt8Ty ()), Size,
3500
+ Align (1 ));
3501
+ }
3502
+
3407
3503
void visitCallBase (CallBase &CB) {
3408
3504
assert (!CB.getMetadata (" nosanitize" ));
3409
3505
if (CB.isInlineAsm ()) {
@@ -3417,6 +3513,23 @@ struct MemorySanitizerVisitor : public InstVisitor<MemorySanitizerVisitor> {
3417
3513
visitInstruction (CB);
3418
3514
return ;
3419
3515
}
3516
+ LibFunc LF;
3517
+ if (TLI->getLibFunc (CB, LF)) {
3518
+ // libatomic.a functions need to have special handling because there isn't
3519
+ // a good way to intercept them or compile the library with
3520
+ // instrumentation.
3521
+ switch (LF) {
3522
+ case LibFunc_atomic_load:
3523
+ visitLibAtomicLoad (CB);
3524
+ return ;
3525
+ case LibFunc_atomic_store:
3526
+ visitLibAtomicStore (CB);
3527
+ return ;
3528
+ default :
3529
+ break ;
3530
+ }
3531
+ }
3532
+
3420
3533
if (auto *Call = dyn_cast<CallInst>(&CB)) {
3421
3534
assert (!isa<IntrinsicInst>(Call) && " intrinsics are handled elsewhere" );
3422
3535
0 commit comments