|
8 | 8 |
|
9 | 9 | #ifdef AARCH64_AVAILABLE
|
10 | 10 | #include "AArch64Subtarget.h"
|
| 11 | +#include "MCTargetDesc/AArch64MCTargetDesc.h" |
11 | 12 | #endif // AARCH64_AVAILABLE
|
12 | 13 |
|
13 | 14 | #ifdef X86_AVAILABLE
|
|
19 | 20 | #include "bolt/Rewrite/RewriteInstance.h"
|
20 | 21 | #include "llvm/BinaryFormat/ELF.h"
|
21 | 22 | #include "llvm/DebugInfo/DWARF/DWARFContext.h"
|
| 23 | +#include "llvm/MC/MCInstBuilder.h" |
22 | 24 | #include "llvm/Support/TargetSelect.h"
|
23 | 25 | #include "gtest/gtest.h"
|
24 | 26 |
|
@@ -70,6 +72,20 @@ struct MCPlusBuilderTester : public testing::TestWithParam<Triple::ArchType> {
|
70 | 72 | BC->MRI.get(), BC->STI.get())));
|
71 | 73 | }
|
72 | 74 |
|
| 75 | + void assertRegMask(const BitVector &RegMask, |
| 76 | + std::initializer_list<MCPhysReg> ExpectedRegs) { |
| 77 | + ASSERT_EQ(RegMask.count(), ExpectedRegs.size()); |
| 78 | + for (MCPhysReg Reg : ExpectedRegs) |
| 79 | + ASSERT_TRUE(RegMask[Reg]) << "Expected " << BC->MRI->getName(Reg) << "."; |
| 80 | + } |
| 81 | + |
| 82 | + void assertRegMask(std::function<void(BitVector &)> FillRegMask, |
| 83 | + std::initializer_list<MCPhysReg> ExpectedRegs) { |
| 84 | + BitVector RegMask(BC->MRI->getNumRegs()); |
| 85 | + FillRegMask(RegMask); |
| 86 | + assertRegMask(RegMask, ExpectedRegs); |
| 87 | + } |
| 88 | + |
73 | 89 | void testRegAliases(Triple::ArchType Arch, uint64_t Register,
|
74 | 90 | uint64_t *Aliases, size_t Count,
|
75 | 91 | bool OnlySmaller = false) {
|
@@ -107,6 +123,100 @@ TEST_P(MCPlusBuilderTester, AliasSmallerX0) {
|
107 | 123 | testRegAliases(Triple::aarch64, AArch64::X0, AliasesX0, AliasesX0Count, true);
|
108 | 124 | }
|
109 | 125 |
|
| 126 | +TEST_P(MCPlusBuilderTester, testAccessedRegsImplicitDef) { |
| 127 | + if (GetParam() != Triple::aarch64) |
| 128 | + GTEST_SKIP(); |
| 129 | + |
| 130 | + // adds x0, x5, #42 |
| 131 | + MCInst Inst = MCInstBuilder(AArch64::ADDSXri) |
| 132 | + .addReg(AArch64::X0) |
| 133 | + .addReg(AArch64::X5) |
| 134 | + .addImm(42) |
| 135 | + .addImm(0); |
| 136 | + |
| 137 | + assertRegMask([&](BitVector &BV) { BC->MIB->getClobberedRegs(Inst, BV); }, |
| 138 | + {AArch64::NZCV, AArch64::W0, AArch64::X0, AArch64::W0_HI, |
| 139 | + AArch64::X0_X1_X2_X3_X4_X5_X6_X7, AArch64::W0_W1, |
| 140 | + AArch64::X0_X1}); |
| 141 | + |
| 142 | + assertRegMask( |
| 143 | + [&](BitVector &BV) { BC->MIB->getTouchedRegs(Inst, BV); }, |
| 144 | + {AArch64::NZCV, AArch64::W0, AArch64::W5, AArch64::X0, AArch64::X5, |
| 145 | + AArch64::W0_HI, AArch64::W5_HI, AArch64::X0_X1_X2_X3_X4_X5_X6_X7, |
| 146 | + AArch64::X2_X3_X4_X5_X6_X7_X8_X9, AArch64::X4_X5_X6_X7_X8_X9_X10_X11, |
| 147 | + AArch64::W0_W1, AArch64::W4_W5, AArch64::X0_X1, AArch64::X4_X5}); |
| 148 | + |
| 149 | + assertRegMask([&](BitVector &BV) { BC->MIB->getWrittenRegs(Inst, BV); }, |
| 150 | + {AArch64::NZCV, AArch64::W0, AArch64::X0, AArch64::W0_HI}); |
| 151 | + |
| 152 | + assertRegMask([&](BitVector &BV) { BC->MIB->getUsedRegs(Inst, BV); }, |
| 153 | + {AArch64::W5, AArch64::X5, AArch64::W5_HI}); |
| 154 | + |
| 155 | + assertRegMask([&](BitVector &BV) { BC->MIB->getSrcRegs(Inst, BV); }, |
| 156 | + {AArch64::W5, AArch64::X5, AArch64::W5_HI}); |
| 157 | +} |
| 158 | + |
| 159 | +TEST_P(MCPlusBuilderTester, testAccessedRegsImplicitUse) { |
| 160 | + if (GetParam() != Triple::aarch64) |
| 161 | + GTEST_SKIP(); |
| 162 | + |
| 163 | + // b.eq <label> |
| 164 | + MCInst Inst = |
| 165 | + MCInstBuilder(AArch64::Bcc) |
| 166 | + .addImm(AArch64CC::EQ) |
| 167 | + .addImm(0); // <label> - should be Expr, but immediate 0 works too. |
| 168 | + |
| 169 | + assertRegMask([&](BitVector &BV) { BC->MIB->getClobberedRegs(Inst, BV); }, |
| 170 | + {}); |
| 171 | + |
| 172 | + assertRegMask([&](BitVector &BV) { BC->MIB->getTouchedRegs(Inst, BV); }, |
| 173 | + {AArch64::NZCV}); |
| 174 | + |
| 175 | + assertRegMask([&](BitVector &BV) { BC->MIB->getWrittenRegs(Inst, BV); }, {}); |
| 176 | + |
| 177 | + assertRegMask([&](BitVector &BV) { BC->MIB->getUsedRegs(Inst, BV); }, |
| 178 | + {AArch64::NZCV}); |
| 179 | + |
| 180 | + assertRegMask([&](BitVector &BV) { BC->MIB->getSrcRegs(Inst, BV); }, |
| 181 | + {AArch64::NZCV}); |
| 182 | +} |
| 183 | + |
| 184 | +TEST_P(MCPlusBuilderTester, testAccessedRegsMultipleDefs) { |
| 185 | + if (GetParam() != Triple::aarch64) |
| 186 | + GTEST_SKIP(); |
| 187 | + |
| 188 | + // ldr x0, [x5], #16 |
| 189 | + MCInst Inst = MCInstBuilder(AArch64::LDRXpost) |
| 190 | + .addReg(AArch64::X5) |
| 191 | + .addReg(AArch64::X0) |
| 192 | + .addReg(AArch64::X5) |
| 193 | + .addImm(16); |
| 194 | + |
| 195 | + assertRegMask( |
| 196 | + [&](BitVector &BV) { BC->MIB->getClobberedRegs(Inst, BV); }, |
| 197 | + {AArch64::W0, AArch64::W5, AArch64::X0, AArch64::X5, AArch64::W0_HI, |
| 198 | + AArch64::W5_HI, AArch64::X0_X1_X2_X3_X4_X5_X6_X7, |
| 199 | + AArch64::X2_X3_X4_X5_X6_X7_X8_X9, AArch64::X4_X5_X6_X7_X8_X9_X10_X11, |
| 200 | + AArch64::W0_W1, AArch64::W4_W5, AArch64::X0_X1, AArch64::X4_X5}); |
| 201 | + |
| 202 | + assertRegMask( |
| 203 | + [&](BitVector &BV) { BC->MIB->getTouchedRegs(Inst, BV); }, |
| 204 | + {AArch64::W0, AArch64::W5, AArch64::X0, AArch64::X5, AArch64::W0_HI, |
| 205 | + AArch64::W5_HI, AArch64::X0_X1_X2_X3_X4_X5_X6_X7, |
| 206 | + AArch64::X2_X3_X4_X5_X6_X7_X8_X9, AArch64::X4_X5_X6_X7_X8_X9_X10_X11, |
| 207 | + AArch64::W0_W1, AArch64::W4_W5, AArch64::X0_X1, AArch64::X4_X5}); |
| 208 | + |
| 209 | + assertRegMask([&](BitVector &BV) { BC->MIB->getWrittenRegs(Inst, BV); }, |
| 210 | + {AArch64::W0, AArch64::X0, AArch64::W0_HI, AArch64::W5, |
| 211 | + AArch64::X5, AArch64::W5_HI}); |
| 212 | + |
| 213 | + assertRegMask([&](BitVector &BV) { BC->MIB->getUsedRegs(Inst, BV); }, |
| 214 | + {AArch64::W5, AArch64::X5, AArch64::W5_HI}); |
| 215 | + |
| 216 | + assertRegMask([&](BitVector &BV) { BC->MIB->getSrcRegs(Inst, BV); }, |
| 217 | + {AArch64::W5, AArch64::X5, AArch64::W5_HI}); |
| 218 | +} |
| 219 | + |
110 | 220 | #endif // AARCH64_AVAILABLE
|
111 | 221 |
|
112 | 222 | #ifdef X86_AVAILABLE
|
|
0 commit comments