Skip to content

Commit cbd4596

Browse files
committed
Recommmit "[RISCV] Improve contant materialization to end with 'not' if the cons… (#66950)"
With MC test updates. Original commit message We can invert the value and treat it as if it had leading zeroes.
1 parent 69074bf commit cbd4596

File tree

3 files changed

+135
-135
lines changed

3 files changed

+135
-135
lines changed

llvm/lib/Target/RISCV/MCTargetDesc/RISCVMatInt.cpp

Lines changed: 62 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -171,6 +171,57 @@ static unsigned extractRotateInfo(int64_t Val) {
171171
return 0;
172172
}
173173

174+
static void generateInstSeqLeadingZeros(int64_t Val,
175+
const FeatureBitset &ActiveFeatures,
176+
RISCVMatInt::InstSeq &Res) {
177+
assert(Val > 0 && "Expected postive val");
178+
179+
unsigned LeadingZeros = llvm::countl_zero((uint64_t)Val);
180+
uint64_t ShiftedVal = (uint64_t)Val << LeadingZeros;
181+
// Fill in the bits that will be shifted out with 1s. An example where this
182+
// helps is trailing one masks with 32 or more ones. This will generate
183+
// ADDI -1 and an SRLI.
184+
ShiftedVal |= maskTrailingOnes<uint64_t>(LeadingZeros);
185+
186+
RISCVMatInt::InstSeq TmpSeq;
187+
generateInstSeqImpl(ShiftedVal, ActiveFeatures, TmpSeq);
188+
189+
// Keep the new sequence if it is an improvement or the original is empty.
190+
if ((TmpSeq.size() + 1) < Res.size() ||
191+
(Res.empty() && TmpSeq.size() < 8)) {
192+
TmpSeq.emplace_back(RISCV::SRLI, LeadingZeros);
193+
Res = TmpSeq;
194+
}
195+
196+
// Some cases can benefit from filling the lower bits with zeros instead.
197+
ShiftedVal &= maskTrailingZeros<uint64_t>(LeadingZeros);
198+
TmpSeq.clear();
199+
generateInstSeqImpl(ShiftedVal, ActiveFeatures, TmpSeq);
200+
201+
// Keep the new sequence if it is an improvement or the original is empty.
202+
if ((TmpSeq.size() + 1) < Res.size() ||
203+
(Res.empty() && TmpSeq.size() < 8)) {
204+
TmpSeq.emplace_back(RISCV::SRLI, LeadingZeros);
205+
Res = TmpSeq;
206+
}
207+
208+
// If we have exactly 32 leading zeros and Zba, we can try using zext.w at
209+
// the end of the sequence.
210+
if (LeadingZeros == 32 && ActiveFeatures[RISCV::FeatureStdExtZba]) {
211+
// Try replacing upper bits with 1.
212+
uint64_t LeadingOnesVal = Val | maskLeadingOnes<uint64_t>(LeadingZeros);
213+
TmpSeq.clear();
214+
generateInstSeqImpl(LeadingOnesVal, ActiveFeatures, TmpSeq);
215+
216+
// Keep the new sequence if it is an improvement.
217+
if ((TmpSeq.size() + 1) < Res.size() ||
218+
(Res.empty() && TmpSeq.size() < 8)) {
219+
TmpSeq.emplace_back(RISCV::ADD_UW, 0);
220+
Res = TmpSeq;
221+
}
222+
}
223+
}
224+
174225
namespace llvm::RISCVMatInt {
175226
InstSeq generateInstSeq(int64_t Val, const FeatureBitset &ActiveFeatures) {
176227
RISCVMatInt::InstSeq Res;
@@ -210,47 +261,21 @@ InstSeq generateInstSeq(int64_t Val, const FeatureBitset &ActiveFeatures) {
210261
// with no leading zeros and use a final SRLI to restore them.
211262
if (Val > 0) {
212263
assert(Res.size() > 2 && "Expected longer sequence");
213-
unsigned LeadingZeros = llvm::countl_zero((uint64_t)Val);
214-
uint64_t ShiftedVal = (uint64_t)Val << LeadingZeros;
215-
// Fill in the bits that will be shifted out with 1s. An example where this
216-
// helps is trailing one masks with 32 or more ones. This will generate
217-
// ADDI -1 and an SRLI.
218-
ShiftedVal |= maskTrailingOnes<uint64_t>(LeadingZeros);
264+
generateInstSeqLeadingZeros(Val, ActiveFeatures, Res);
265+
}
219266

267+
// If the constant is negative, trying inverting and using our trailing zero
268+
// optimizations. Use an xori to invert the final value.
269+
if (Val < 0 && Res.size() > 3) {
270+
uint64_t InvertedVal = ~(uint64_t)Val;
220271
RISCVMatInt::InstSeq TmpSeq;
221-
generateInstSeqImpl(ShiftedVal, ActiveFeatures, TmpSeq);
272+
generateInstSeqLeadingZeros(InvertedVal, ActiveFeatures, TmpSeq);
222273

223-
// Keep the new sequence if it is an improvement.
224-
if ((TmpSeq.size() + 1) < Res.size()) {
225-
TmpSeq.emplace_back(RISCV::SRLI, LeadingZeros);
226-
Res = TmpSeq;
227-
}
228-
229-
// Some cases can benefit from filling the lower bits with zeros instead.
230-
ShiftedVal &= maskTrailingZeros<uint64_t>(LeadingZeros);
231-
TmpSeq.clear();
232-
generateInstSeqImpl(ShiftedVal, ActiveFeatures, TmpSeq);
233-
234-
// Keep the new sequence if it is an improvement.
235-
if ((TmpSeq.size() + 1) < Res.size()) {
236-
TmpSeq.emplace_back(RISCV::SRLI, LeadingZeros);
274+
// Keep it if we found a sequence that is smaller after inverting.
275+
if (!TmpSeq.empty() && (TmpSeq.size() + 1) < Res.size()) {
276+
TmpSeq.emplace_back(RISCV::XORI, -1);
237277
Res = TmpSeq;
238278
}
239-
240-
// If we have exactly 32 leading zeros and Zba, we can try using zext.w at
241-
// the end of the sequence.
242-
if (LeadingZeros == 32 && ActiveFeatures[RISCV::FeatureStdExtZba]) {
243-
// Try replacing upper bits with 1.
244-
uint64_t LeadingOnesVal = Val | maskLeadingOnes<uint64_t>(LeadingZeros);
245-
TmpSeq.clear();
246-
generateInstSeqImpl(LeadingOnesVal, ActiveFeatures, TmpSeq);
247-
248-
// Keep the new sequence if it is an improvement.
249-
if ((TmpSeq.size() + 1) < Res.size()) {
250-
TmpSeq.emplace_back(RISCV::ADD_UW, 0);
251-
Res = TmpSeq;
252-
}
253-
}
254279
}
255280

256281
// If the Low and High halves are the same, use pack. The pack instruction
@@ -429,6 +454,7 @@ OpndKind Inst::getOpndKind() const {
429454
return RISCVMatInt::RegReg;
430455
case RISCV::ADDI:
431456
case RISCV::ADDIW:
457+
case RISCV::XORI:
432458
case RISCV::SLLI:
433459
case RISCV::SRLI:
434460
case RISCV::SLLI_UW:

llvm/test/CodeGen/RISCV/imm.ll

Lines changed: 61 additions & 79 deletions
Original file line numberDiff line numberDiff line change
@@ -1058,47 +1058,37 @@ define i64 @imm_end_xori_1() nounwind {
10581058
;
10591059
; RV64I-LABEL: imm_end_xori_1:
10601060
; RV64I: # %bb.0:
1061-
; RV64I-NEXT: li a0, -1
1062-
; RV64I-NEXT: slli a0, a0, 36
1063-
; RV64I-NEXT: addi a0, a0, 1
1064-
; RV64I-NEXT: slli a0, a0, 25
1065-
; RV64I-NEXT: addi a0, a0, -1
1061+
; RV64I-NEXT: lui a0, 983040
1062+
; RV64I-NEXT: srli a0, a0, 3
1063+
; RV64I-NEXT: not a0, a0
10661064
; RV64I-NEXT: ret
10671065
;
10681066
; RV64IZBA-LABEL: imm_end_xori_1:
10691067
; RV64IZBA: # %bb.0:
1070-
; RV64IZBA-NEXT: li a0, -1
1071-
; RV64IZBA-NEXT: slli a0, a0, 36
1072-
; RV64IZBA-NEXT: addi a0, a0, 1
1073-
; RV64IZBA-NEXT: slli a0, a0, 25
1074-
; RV64IZBA-NEXT: addi a0, a0, -1
1068+
; RV64IZBA-NEXT: lui a0, 983040
1069+
; RV64IZBA-NEXT: srli a0, a0, 3
1070+
; RV64IZBA-NEXT: not a0, a0
10751071
; RV64IZBA-NEXT: ret
10761072
;
10771073
; RV64IZBB-LABEL: imm_end_xori_1:
10781074
; RV64IZBB: # %bb.0:
1079-
; RV64IZBB-NEXT: li a0, -1
1080-
; RV64IZBB-NEXT: slli a0, a0, 36
1081-
; RV64IZBB-NEXT: addi a0, a0, 1
1082-
; RV64IZBB-NEXT: slli a0, a0, 25
1083-
; RV64IZBB-NEXT: addi a0, a0, -1
1075+
; RV64IZBB-NEXT: lui a0, 983040
1076+
; RV64IZBB-NEXT: srli a0, a0, 3
1077+
; RV64IZBB-NEXT: not a0, a0
10841078
; RV64IZBB-NEXT: ret
10851079
;
10861080
; RV64IZBS-LABEL: imm_end_xori_1:
10871081
; RV64IZBS: # %bb.0:
1088-
; RV64IZBS-NEXT: li a0, -1
1089-
; RV64IZBS-NEXT: slli a0, a0, 36
1090-
; RV64IZBS-NEXT: addi a0, a0, 1
1091-
; RV64IZBS-NEXT: slli a0, a0, 25
1092-
; RV64IZBS-NEXT: addi a0, a0, -1
1082+
; RV64IZBS-NEXT: lui a0, 983040
1083+
; RV64IZBS-NEXT: srli a0, a0, 3
1084+
; RV64IZBS-NEXT: not a0, a0
10931085
; RV64IZBS-NEXT: ret
10941086
;
10951087
; RV64IXTHEADBB-LABEL: imm_end_xori_1:
10961088
; RV64IXTHEADBB: # %bb.0:
1097-
; RV64IXTHEADBB-NEXT: li a0, -1
1098-
; RV64IXTHEADBB-NEXT: slli a0, a0, 36
1099-
; RV64IXTHEADBB-NEXT: addi a0, a0, 1
1100-
; RV64IXTHEADBB-NEXT: slli a0, a0, 25
1101-
; RV64IXTHEADBB-NEXT: addi a0, a0, -1
1089+
; RV64IXTHEADBB-NEXT: lui a0, 983040
1090+
; RV64IXTHEADBB-NEXT: srli a0, a0, 3
1091+
; RV64IXTHEADBB-NEXT: not a0, a0
11021092
; RV64IXTHEADBB-NEXT: ret
11031093
ret i64 -2305843009180139521 ; 0xE000_0000_01FF_FFFF
11041094
}
@@ -1174,13 +1164,12 @@ define i64 @imm_2reg_1() nounwind {
11741164
;
11751165
; RV64-NOPOOL-LABEL: imm_2reg_1:
11761166
; RV64-NOPOOL: # %bb.0:
1177-
; RV64-NOPOOL-NEXT: li a0, -1
1178-
; RV64-NOPOOL-NEXT: slli a0, a0, 35
1179-
; RV64-NOPOOL-NEXT: addi a0, a0, 9
1167+
; RV64-NOPOOL-NEXT: lui a0, 1048430
1168+
; RV64-NOPOOL-NEXT: addiw a0, a0, 1493
11801169
; RV64-NOPOOL-NEXT: slli a0, a0, 13
1181-
; RV64-NOPOOL-NEXT: addi a0, a0, 837
1182-
; RV64-NOPOOL-NEXT: slli a0, a0, 12
1183-
; RV64-NOPOOL-NEXT: addi a0, a0, 1656
1170+
; RV64-NOPOOL-NEXT: addi a0, a0, -1921
1171+
; RV64-NOPOOL-NEXT: srli a0, a0, 4
1172+
; RV64-NOPOOL-NEXT: not a0, a0
11841173
; RV64-NOPOOL-NEXT: ret
11851174
;
11861175
; RV64I-POOL-LABEL: imm_2reg_1:
@@ -1191,45 +1180,42 @@ define i64 @imm_2reg_1() nounwind {
11911180
;
11921181
; RV64IZBA-LABEL: imm_2reg_1:
11931182
; RV64IZBA: # %bb.0:
1194-
; RV64IZBA-NEXT: li a0, -1
1195-
; RV64IZBA-NEXT: slli a0, a0, 35
1196-
; RV64IZBA-NEXT: addi a0, a0, 9
1183+
; RV64IZBA-NEXT: lui a0, 1048430
1184+
; RV64IZBA-NEXT: addiw a0, a0, 1493
11971185
; RV64IZBA-NEXT: slli a0, a0, 13
1198-
; RV64IZBA-NEXT: addi a0, a0, 837
1199-
; RV64IZBA-NEXT: slli a0, a0, 12
1200-
; RV64IZBA-NEXT: addi a0, a0, 1656
1186+
; RV64IZBA-NEXT: addi a0, a0, -1921
1187+
; RV64IZBA-NEXT: srli a0, a0, 4
1188+
; RV64IZBA-NEXT: not a0, a0
12011189
; RV64IZBA-NEXT: ret
12021190
;
12031191
; RV64IZBB-LABEL: imm_2reg_1:
12041192
; RV64IZBB: # %bb.0:
1205-
; RV64IZBB-NEXT: li a0, -1
1206-
; RV64IZBB-NEXT: slli a0, a0, 35
1207-
; RV64IZBB-NEXT: addi a0, a0, 9
1193+
; RV64IZBB-NEXT: lui a0, 1048430
1194+
; RV64IZBB-NEXT: addiw a0, a0, 1493
12081195
; RV64IZBB-NEXT: slli a0, a0, 13
1209-
; RV64IZBB-NEXT: addi a0, a0, 837
1210-
; RV64IZBB-NEXT: slli a0, a0, 12
1211-
; RV64IZBB-NEXT: addi a0, a0, 1656
1196+
; RV64IZBB-NEXT: addi a0, a0, -1921
1197+
; RV64IZBB-NEXT: srli a0, a0, 4
1198+
; RV64IZBB-NEXT: not a0, a0
12121199
; RV64IZBB-NEXT: ret
12131200
;
12141201
; RV64IZBS-LABEL: imm_2reg_1:
12151202
; RV64IZBS: # %bb.0:
1216-
; RV64IZBS-NEXT: lui a0, 74565
1217-
; RV64IZBS-NEXT: addiw a0, a0, 1656
1218-
; RV64IZBS-NEXT: bseti a0, a0, 60
1219-
; RV64IZBS-NEXT: bseti a0, a0, 61
1220-
; RV64IZBS-NEXT: bseti a0, a0, 62
1221-
; RV64IZBS-NEXT: bseti a0, a0, 63
1203+
; RV64IZBS-NEXT: lui a0, 1048430
1204+
; RV64IZBS-NEXT: addiw a0, a0, 1493
1205+
; RV64IZBS-NEXT: slli a0, a0, 13
1206+
; RV64IZBS-NEXT: addi a0, a0, -1921
1207+
; RV64IZBS-NEXT: srli a0, a0, 4
1208+
; RV64IZBS-NEXT: not a0, a0
12221209
; RV64IZBS-NEXT: ret
12231210
;
12241211
; RV64IXTHEADBB-LABEL: imm_2reg_1:
12251212
; RV64IXTHEADBB: # %bb.0:
1226-
; RV64IXTHEADBB-NEXT: li a0, -1
1227-
; RV64IXTHEADBB-NEXT: slli a0, a0, 35
1228-
; RV64IXTHEADBB-NEXT: addi a0, a0, 9
1213+
; RV64IXTHEADBB-NEXT: lui a0, 1048430
1214+
; RV64IXTHEADBB-NEXT: addiw a0, a0, 1493
12291215
; RV64IXTHEADBB-NEXT: slli a0, a0, 13
1230-
; RV64IXTHEADBB-NEXT: addi a0, a0, 837
1231-
; RV64IXTHEADBB-NEXT: slli a0, a0, 12
1232-
; RV64IXTHEADBB-NEXT: addi a0, a0, 1656
1216+
; RV64IXTHEADBB-NEXT: addi a0, a0, -1921
1217+
; RV64IXTHEADBB-NEXT: srli a0, a0, 4
1218+
; RV64IXTHEADBB-NEXT: not a0, a0
12331219
; RV64IXTHEADBB-NEXT: ret
12341220
ret i64 -1152921504301427080 ; 0xF000_0000_1234_5678
12351221
}
@@ -1724,13 +1710,12 @@ define i64 @imm_neg_9223372034778874949() {
17241710
;
17251711
; RV64-NOPOOL-LABEL: imm_neg_9223372034778874949:
17261712
; RV64-NOPOOL: # %bb.0:
1727-
; RV64-NOPOOL-NEXT: li a0, -1
1728-
; RV64-NOPOOL-NEXT: slli a0, a0, 37
1729-
; RV64-NOPOOL-NEXT: addi a0, a0, 31
1713+
; RV64-NOPOOL-NEXT: lui a0, 1048329
1714+
; RV64-NOPOOL-NEXT: addiw a0, a0, -1911
17301715
; RV64-NOPOOL-NEXT: slli a0, a0, 12
1731-
; RV64-NOPOOL-NEXT: addi a0, a0, -273
1732-
; RV64-NOPOOL-NEXT: slli a0, a0, 14
1733-
; RV64-NOPOOL-NEXT: addi a0, a0, -1093
1716+
; RV64-NOPOOL-NEXT: addi a0, a0, -1911
1717+
; RV64-NOPOOL-NEXT: srli a0, a0, 1
1718+
; RV64-NOPOOL-NEXT: not a0, a0
17341719
; RV64-NOPOOL-NEXT: ret
17351720
;
17361721
; RV64I-POOL-LABEL: imm_neg_9223372034778874949:
@@ -1741,24 +1726,22 @@ define i64 @imm_neg_9223372034778874949() {
17411726
;
17421727
; RV64IZBA-LABEL: imm_neg_9223372034778874949:
17431728
; RV64IZBA: # %bb.0:
1744-
; RV64IZBA-NEXT: li a0, -1
1745-
; RV64IZBA-NEXT: slli a0, a0, 37
1746-
; RV64IZBA-NEXT: addi a0, a0, 31
1729+
; RV64IZBA-NEXT: lui a0, 1048329
1730+
; RV64IZBA-NEXT: addiw a0, a0, -1911
17471731
; RV64IZBA-NEXT: slli a0, a0, 12
1748-
; RV64IZBA-NEXT: addi a0, a0, -273
1749-
; RV64IZBA-NEXT: slli a0, a0, 14
1750-
; RV64IZBA-NEXT: addi a0, a0, -1093
1732+
; RV64IZBA-NEXT: addi a0, a0, -1911
1733+
; RV64IZBA-NEXT: srli a0, a0, 1
1734+
; RV64IZBA-NEXT: not a0, a0
17511735
; RV64IZBA-NEXT: ret
17521736
;
17531737
; RV64IZBB-LABEL: imm_neg_9223372034778874949:
17541738
; RV64IZBB: # %bb.0:
1755-
; RV64IZBB-NEXT: li a0, -1
1756-
; RV64IZBB-NEXT: slli a0, a0, 37
1757-
; RV64IZBB-NEXT: addi a0, a0, 31
1739+
; RV64IZBB-NEXT: lui a0, 1048329
1740+
; RV64IZBB-NEXT: addiw a0, a0, -1911
17581741
; RV64IZBB-NEXT: slli a0, a0, 12
1759-
; RV64IZBB-NEXT: addi a0, a0, -273
1760-
; RV64IZBB-NEXT: slli a0, a0, 14
1761-
; RV64IZBB-NEXT: addi a0, a0, -1093
1742+
; RV64IZBB-NEXT: addi a0, a0, -1911
1743+
; RV64IZBB-NEXT: srli a0, a0, 1
1744+
; RV64IZBB-NEXT: not a0, a0
17621745
; RV64IZBB-NEXT: ret
17631746
;
17641747
; RV64IZBS-LABEL: imm_neg_9223372034778874949:
@@ -1770,13 +1753,12 @@ define i64 @imm_neg_9223372034778874949() {
17701753
;
17711754
; RV64IXTHEADBB-LABEL: imm_neg_9223372034778874949:
17721755
; RV64IXTHEADBB: # %bb.0:
1773-
; RV64IXTHEADBB-NEXT: li a0, -1
1774-
; RV64IXTHEADBB-NEXT: slli a0, a0, 37
1775-
; RV64IXTHEADBB-NEXT: addi a0, a0, 31
1756+
; RV64IXTHEADBB-NEXT: lui a0, 1048329
1757+
; RV64IXTHEADBB-NEXT: addiw a0, a0, -1911
17761758
; RV64IXTHEADBB-NEXT: slli a0, a0, 12
1777-
; RV64IXTHEADBB-NEXT: addi a0, a0, -273
1778-
; RV64IXTHEADBB-NEXT: slli a0, a0, 14
1779-
; RV64IXTHEADBB-NEXT: addi a0, a0, -1093
1759+
; RV64IXTHEADBB-NEXT: addi a0, a0, -1911
1760+
; RV64IXTHEADBB-NEXT: srli a0, a0, 1
1761+
; RV64IXTHEADBB-NEXT: not a0, a0
17801762
; RV64IXTHEADBB-NEXT: ret
17811763
ret i64 -9223372034778874949 ; 0x800000007bbbbbbb
17821764
}

llvm/test/MC/RISCV/rv64i-aliases-valid.s

Lines changed: 12 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -153,16 +153,12 @@ li x8, 0xFFFFFFF001
153153
# CHECK-EXPAND-NEXT: slli s1, s1, 20
154154
# CHECK-EXPAND-NEXT: addi s1, s1, -3
155155
li x9, 0x1000FFFFFFFD
156-
# CHECK-INST: addi a0, zero, -1
157-
# CHECK-INST-NEXT: slli a0, a0, 36
158-
# CHECK-INST-NEXT: addi a0, a0, 1
159-
# CHECK-INST-NEXT: slli a0, a0, 25
160-
# CHECK-INST-NEXT: addi a0, a0, -1
161-
# CHECK-ALIAS: li a0, -1
162-
# CHECK-ALIAS-NEXT: slli a0, a0, 36
163-
# CHECK-ALIAS-NEXT: addi a0, a0, 1
164-
# CHECK-ALIAS-NEXT: slli a0, a0, 25
165-
# CHECK-ALIAS-NEXT: addi a0, a0, -1
156+
# CHECK-INST: lui a0, 983040
157+
# CHECK-INST-NEXT: srli a0, a0, 3
158+
# CHECK-INST-NEXT: xori a0, a0, -1
159+
# CHECK-ALIAS: lui a0, 983040
160+
# CHECK-ALIAS-NEXT: srli a0, a0, 3
161+
# CHECK-ALIAS-NEXT: not a0, a0
166162
li x10, 0xE000000001FFFFFF
167163
# CHECK-INST: addi a1, zero, -2047
168164
# CHECK-INST-NEXT: slli a1, a1, 27
@@ -393,16 +389,12 @@ lla x8, 0xFFFFFFF001
393389
# CHECK-EXPAND-NEXT: addi s1, s1, -3
394390
la x9, 0x1000FFFFFFFD
395391
lla x9, 0x1000FFFFFFFD
396-
# CHECK-INST: addi a0, zero, -1
397-
# CHECK-INST-NEXT: slli a0, a0, 36
398-
# CHECK-INST-NEXT: addi a0, a0, 1
399-
# CHECK-INST-NEXT: slli a0, a0, 25
400-
# CHECK-INST-NEXT: addi a0, a0, -1
401-
# CHECK-ALIAS: li a0, -1
402-
# CHECK-ALIAS-NEXT: slli a0, a0, 36
403-
# CHECK-ALIAS-NEXT: addi a0, a0, 1
404-
# CHECK-ALIAS-NEXT: slli a0, a0, 25
405-
# CHECK-ALIAS-NEXT: addi a0, a0, -1
392+
# CHECK-INST: lui a0, 983040
393+
# CHECK-INST-NEXT: srli a0, a0, 3
394+
# CHECK-INST-NEXT: xori a0, a0, -1
395+
# CHECK-ALIAS: lui a0, 983040
396+
# CHECK-ALIAS-NEXT: srli a0, a0, 3
397+
# CHECK-ALIAS-NEXT: not a0, a0
406398
la x10, 0xE000000001FFFFFF
407399
lla x10, 0xE000000001FFFFFF
408400
# CHECK-INST: addi a1, zero, -2047

0 commit comments

Comments
 (0)