Skip to content

Commit 1ce3afd

Browse files
committed
[ValueTracking] Teach computeKnownBits about riscv.vsetvli.opt and riscv.vsetvlimax.opt intrinsics.
These are like the intrinsic without opt, but don't have side effects. Add missing test cases for riscv.vsetvlimax.
1 parent c5fa6b1 commit 1ce3afd

File tree

2 files changed

+122
-0
lines changed

2 files changed

+122
-0
lines changed

llvm/lib/Analysis/ValueTracking.cpp

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1735,7 +1735,9 @@ static void computeKnownBitsFromOperator(const Operator *I,
17351735
Known.Zero.setBitsFrom(32);
17361736
break;
17371737
case Intrinsic::riscv_vsetvli:
1738+
case Intrinsic::riscv_vsetvli_opt:
17381739
case Intrinsic::riscv_vsetvlimax:
1740+
case Intrinsic::riscv_vsetvlimax_opt:
17391741
// Assume that VL output is positive and would fit in an int32_t.
17401742
// TODO: VLEN might be capped at 16 bits in a future V spec update.
17411743
if (BitWidth >= 32)

llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvli-knownbits.ll

Lines changed: 120 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,12 @@
33

44
declare i32 @llvm.riscv.vsetvli.i32(i32, i32, i32)
55
declare i64 @llvm.riscv.vsetvli.i64(i64, i64, i64)
6+
declare i32 @llvm.riscv.vsetvlimax.i32(i32, i32)
7+
declare i64 @llvm.riscv.vsetvlimax.i64(i64, i64)
8+
declare i32 @llvm.riscv.vsetvli.opt.i32(i32, i32, i32)
9+
declare i64 @llvm.riscv.vsetvli.opt.i64(i64, i64, i64)
10+
declare i32 @llvm.riscv.vsetvlimax.opt.i32(i32, i32)
11+
declare i64 @llvm.riscv.vsetvlimax.opt.i64(i64, i64)
612

713
define i32 @vsetvli_i32() nounwind {
814
; CHECK-LABEL: @vsetvli_i32(
@@ -41,3 +47,117 @@ entry:
4147
%2 = zext i32 %1 to i64
4248
ret i64 %2
4349
}
50+
51+
define i32 @vsetvlimax_i32() nounwind {
52+
; CHECK-LABEL: @vsetvlimax_i32(
53+
; CHECK-NEXT: entry:
54+
; CHECK-NEXT: [[TMP0:%.*]] = call i32 @llvm.riscv.vsetvlimax.i32(i32 1, i32 1)
55+
; CHECK-NEXT: ret i32 [[TMP0]]
56+
;
57+
entry:
58+
%0 = call i32 @llvm.riscv.vsetvlimax.i32(i32 1, i32 1)
59+
%1 = and i32 %0, 2147483647
60+
ret i32 %1
61+
}
62+
63+
define i64 @vsetvlimax_sext_i64() nounwind {
64+
; CHECK-LABEL: @vsetvlimax_sext_i64(
65+
; CHECK-NEXT: entry:
66+
; CHECK-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1)
67+
; CHECK-NEXT: ret i64 [[TMP0]]
68+
;
69+
entry:
70+
%0 = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1)
71+
%1 = trunc i64 %0 to i32
72+
%2 = sext i32 %1 to i64
73+
ret i64 %2
74+
}
75+
76+
define i64 @vsetvlimax_zext_i64() nounwind {
77+
; CHECK-LABEL: @vsetvlimax_zext_i64(
78+
; CHECK-NEXT: entry:
79+
; CHECK-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1)
80+
; CHECK-NEXT: ret i64 [[TMP0]]
81+
;
82+
entry:
83+
%0 = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1)
84+
%1 = trunc i64 %0 to i32
85+
%2 = zext i32 %1 to i64
86+
ret i64 %2
87+
}
88+
89+
define i32 @vsetvli_opt_i32() nounwind {
90+
; CHECK-LABEL: @vsetvli_opt_i32(
91+
; CHECK-NEXT: entry:
92+
; CHECK-NEXT: [[TMP0:%.*]] = call i32 @llvm.riscv.vsetvli.opt.i32(i32 1, i32 1, i32 1)
93+
; CHECK-NEXT: ret i32 [[TMP0]]
94+
;
95+
entry:
96+
%0 = call i32 @llvm.riscv.vsetvli.opt.i32(i32 1, i32 1, i32 1)
97+
%1 = and i32 %0, 2147483647
98+
ret i32 %1
99+
}
100+
101+
define i64 @vsetvli_opt_sext_i64() nounwind {
102+
; CHECK-LABEL: @vsetvli_opt_sext_i64(
103+
; CHECK-NEXT: entry:
104+
; CHECK-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.vsetvli.opt.i64(i64 1, i64 1, i64 1)
105+
; CHECK-NEXT: ret i64 [[TMP0]]
106+
;
107+
entry:
108+
%0 = call i64 @llvm.riscv.vsetvli.opt.i64(i64 1, i64 1, i64 1)
109+
%1 = trunc i64 %0 to i32
110+
%2 = sext i32 %1 to i64
111+
ret i64 %2
112+
}
113+
114+
define i64 @vsetvli_opt_zext_i64() nounwind {
115+
; CHECK-LABEL: @vsetvli_opt_zext_i64(
116+
; CHECK-NEXT: entry:
117+
; CHECK-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.vsetvli.opt.i64(i64 1, i64 1, i64 1)
118+
; CHECK-NEXT: ret i64 [[TMP0]]
119+
;
120+
entry:
121+
%0 = call i64 @llvm.riscv.vsetvli.opt.i64(i64 1, i64 1, i64 1)
122+
%1 = trunc i64 %0 to i32
123+
%2 = zext i32 %1 to i64
124+
ret i64 %2
125+
}
126+
127+
define i32 @vsetvlimax_opt_i32() nounwind {
128+
; CHECK-LABEL: @vsetvlimax_opt_i32(
129+
; CHECK-NEXT: entry:
130+
; CHECK-NEXT: [[TMP0:%.*]] = call i32 @llvm.riscv.vsetvlimax.opt.i32(i32 1, i32 1)
131+
; CHECK-NEXT: ret i32 [[TMP0]]
132+
;
133+
entry:
134+
%0 = call i32 @llvm.riscv.vsetvlimax.opt.i32(i32 1, i32 1)
135+
%1 = and i32 %0, 2147483647
136+
ret i32 %1
137+
}
138+
139+
define i64 @vsetvlimax_opt_sext_i64() nounwind {
140+
; CHECK-LABEL: @vsetvlimax_opt_sext_i64(
141+
; CHECK-NEXT: entry:
142+
; CHECK-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.vsetvlimax.opt.i64(i64 1, i64 1)
143+
; CHECK-NEXT: ret i64 [[TMP0]]
144+
;
145+
entry:
146+
%0 = call i64 @llvm.riscv.vsetvlimax.opt.i64(i64 1, i64 1)
147+
%1 = trunc i64 %0 to i32
148+
%2 = sext i32 %1 to i64
149+
ret i64 %2
150+
}
151+
152+
define i64 @vsetvlimax_opt_zext_i64() nounwind {
153+
; CHECK-LABEL: @vsetvlimax_opt_zext_i64(
154+
; CHECK-NEXT: entry:
155+
; CHECK-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.vsetvlimax.opt.i64(i64 1, i64 1)
156+
; CHECK-NEXT: ret i64 [[TMP0]]
157+
;
158+
entry:
159+
%0 = call i64 @llvm.riscv.vsetvlimax.opt.i64(i64 1, i64 1)
160+
%1 = trunc i64 %0 to i32
161+
%2 = zext i32 %1 to i64
162+
ret i64 %2
163+
}

0 commit comments

Comments
 (0)