@@ -1122,6 +1122,104 @@ static llvm::Instruction::CastOps getLLVMCastOp(Instruction::Opcode Opc) {
1122
1122
}
1123
1123
}
1124
1124
1125
+ void AtomicCmpXchgInst::setSyncScopeID (SyncScope::ID SSID) {
1126
+ Ctx.getTracker ()
1127
+ .emplaceIfTracking <GenericSetter<&AtomicCmpXchgInst::getSyncScopeID,
1128
+ &AtomicCmpXchgInst::setSyncScopeID>>(
1129
+ this );
1130
+ cast<llvm::AtomicCmpXchgInst>(Val)->setSyncScopeID (SSID);
1131
+ }
1132
+
1133
+ Value *AtomicCmpXchgInst::getPointerOperand () {
1134
+ return Ctx.getValue (cast<llvm::AtomicCmpXchgInst>(Val)->getPointerOperand ());
1135
+ }
1136
+
1137
+ Value *AtomicCmpXchgInst::getCompareOperand () {
1138
+ return Ctx.getValue (cast<llvm::AtomicCmpXchgInst>(Val)->getCompareOperand ());
1139
+ }
1140
+
1141
+ Value *AtomicCmpXchgInst::getNewValOperand () {
1142
+ return Ctx.getValue (cast<llvm::AtomicCmpXchgInst>(Val)->getNewValOperand ());
1143
+ }
1144
+
1145
+ AtomicCmpXchgInst *
1146
+ AtomicCmpXchgInst::create (Value *Ptr, Value *Cmp, Value *New, MaybeAlign Align,
1147
+ AtomicOrdering SuccessOrdering,
1148
+ AtomicOrdering FailureOrdering, BBIterator WhereIt,
1149
+ BasicBlock *WhereBB, Context &Ctx, SyncScope::ID SSID,
1150
+ const Twine &Name) {
1151
+ auto &Builder = Ctx.getLLVMIRBuilder ();
1152
+ if (WhereIt == WhereBB->end ())
1153
+ Builder.SetInsertPoint (cast<llvm::BasicBlock>(WhereBB->Val ));
1154
+ else
1155
+ Builder.SetInsertPoint ((*WhereIt).getTopmostLLVMInstruction ());
1156
+ auto *LLVMAtomicCmpXchg =
1157
+ Builder.CreateAtomicCmpXchg (Ptr->Val , Cmp->Val , New->Val , Align,
1158
+ SuccessOrdering, FailureOrdering, SSID);
1159
+ LLVMAtomicCmpXchg->setName (Name);
1160
+ return Ctx.createAtomicCmpXchgInst (LLVMAtomicCmpXchg);
1161
+ }
1162
+
1163
+ AtomicCmpXchgInst *AtomicCmpXchgInst::create (Value *Ptr, Value *Cmp, Value *New,
1164
+ MaybeAlign Align,
1165
+ AtomicOrdering SuccessOrdering,
1166
+ AtomicOrdering FailureOrdering,
1167
+ Instruction *InsertBefore,
1168
+ Context &Ctx, SyncScope::ID SSID,
1169
+ const Twine &Name) {
1170
+ return create (Ptr, Cmp, New, Align, SuccessOrdering, FailureOrdering,
1171
+ InsertBefore->getIterator (), InsertBefore->getParent (), Ctx,
1172
+ SSID, Name);
1173
+ }
1174
+
1175
+ AtomicCmpXchgInst *AtomicCmpXchgInst::create (Value *Ptr, Value *Cmp, Value *New,
1176
+ MaybeAlign Align,
1177
+ AtomicOrdering SuccessOrdering,
1178
+ AtomicOrdering FailureOrdering,
1179
+ BasicBlock *InsertAtEnd,
1180
+ Context &Ctx, SyncScope::ID SSID,
1181
+ const Twine &Name) {
1182
+ return create (Ptr, Cmp, New, Align, SuccessOrdering, FailureOrdering,
1183
+ InsertAtEnd->end (), InsertAtEnd, Ctx, SSID, Name);
1184
+ }
1185
+
1186
+ void AtomicCmpXchgInst::setAlignment (Align Align) {
1187
+ Ctx.getTracker ()
1188
+ .emplaceIfTracking <GenericSetter<&AtomicCmpXchgInst::getAlign,
1189
+ &AtomicCmpXchgInst::setAlignment>>(this );
1190
+ cast<llvm::AtomicCmpXchgInst>(Val)->setAlignment (Align);
1191
+ }
1192
+
1193
+ void AtomicCmpXchgInst::setVolatile (bool V) {
1194
+ Ctx.getTracker ()
1195
+ .emplaceIfTracking <GenericSetter<&AtomicCmpXchgInst::isVolatile,
1196
+ &AtomicCmpXchgInst::setVolatile>>(this );
1197
+ cast<llvm::AtomicCmpXchgInst>(Val)->setVolatile (V);
1198
+ }
1199
+
1200
+ void AtomicCmpXchgInst::setWeak (bool IsWeak) {
1201
+ Ctx.getTracker ()
1202
+ .emplaceIfTracking <GenericSetter<&AtomicCmpXchgInst::isWeak,
1203
+ &AtomicCmpXchgInst::setWeak>>(this );
1204
+ cast<llvm::AtomicCmpXchgInst>(Val)->setWeak (IsWeak);
1205
+ }
1206
+
1207
+ void AtomicCmpXchgInst::setSuccessOrdering (AtomicOrdering Ordering) {
1208
+ Ctx.getTracker ()
1209
+ .emplaceIfTracking <GenericSetter<&AtomicCmpXchgInst::getSuccessOrdering,
1210
+ &AtomicCmpXchgInst::setSuccessOrdering>>(
1211
+ this );
1212
+ cast<llvm::AtomicCmpXchgInst>(Val)->setSuccessOrdering (Ordering);
1213
+ }
1214
+
1215
+ void AtomicCmpXchgInst::setFailureOrdering (AtomicOrdering Ordering) {
1216
+ Ctx.getTracker ()
1217
+ .emplaceIfTracking <GenericSetter<&AtomicCmpXchgInst::getFailureOrdering,
1218
+ &AtomicCmpXchgInst::setFailureOrdering>>(
1219
+ this );
1220
+ cast<llvm::AtomicCmpXchgInst>(Val)->setFailureOrdering (Ordering);
1221
+ }
1222
+
1125
1223
AllocaInst *AllocaInst::create (Type *Ty, unsigned AddrSpace, BBIterator WhereIt,
1126
1224
BasicBlock *WhereBB, Context &Ctx,
1127
1225
Value *ArraySize, const Twine &Name) {
@@ -1433,6 +1531,12 @@ Value *Context::getOrCreateValueInternal(llvm::Value *LLVMV, llvm::User *U) {
1433
1531
new GetElementPtrInst (LLVMGEP, *this ));
1434
1532
return It->second .get ();
1435
1533
}
1534
+ case llvm::Instruction::AtomicCmpXchg: {
1535
+ auto *LLVMAtomicCmpXchg = cast<llvm::AtomicCmpXchgInst>(LLVMV);
1536
+ It->second = std::unique_ptr<AtomicCmpXchgInst>(
1537
+ new AtomicCmpXchgInst (LLVMAtomicCmpXchg, *this ));
1538
+ return It->second .get ();
1539
+ }
1436
1540
case llvm::Instruction::Alloca: {
1437
1541
auto *LLVMAlloca = cast<llvm::AllocaInst>(LLVMV);
1438
1542
It->second = std::unique_ptr<AllocaInst>(new AllocaInst (LLVMAlloca, *this ));
@@ -1550,6 +1654,12 @@ Context::createGetElementPtrInst(llvm::GetElementPtrInst *I) {
1550
1654
std::unique_ptr<GetElementPtrInst>(new GetElementPtrInst (I, *this ));
1551
1655
return cast<GetElementPtrInst>(registerValue (std::move (NewPtr)));
1552
1656
}
1657
+ AtomicCmpXchgInst *
1658
+ Context::createAtomicCmpXchgInst (llvm::AtomicCmpXchgInst *I) {
1659
+ auto NewPtr =
1660
+ std::unique_ptr<AtomicCmpXchgInst>(new AtomicCmpXchgInst (I, *this ));
1661
+ return cast<AtomicCmpXchgInst>(registerValue (std::move (NewPtr)));
1662
+ }
1553
1663
AllocaInst *Context::createAllocaInst (llvm::AllocaInst *I) {
1554
1664
auto NewPtr = std::unique_ptr<AllocaInst>(new AllocaInst (I, *this ));
1555
1665
return cast<AllocaInst>(registerValue (std::move (NewPtr)));
0 commit comments