1
1
; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 4
2
- ; RUN: llc -o - -mtriple=arm64e-apple-macosx %s | FileCheck %s
2
+ ; RUN: not --crash llc -o - -mtriple=arm64e-apple-macosx -min-jump-table-entries=2 %s
3
3
4
4
target datalayout = "e-m:o-i64:64-i128:128-n32:64-S128"
5
5
6
- define swifttailcc void @test_async_with_jumptable (ptr %src , ptr swiftasync %as ) #0 {
7
- ; CHECK-LABEL: test_async_with_jumptable:
8
- ; CHECK: ; %bb.0: ; %entry
9
- ; CHECK-NEXT: orr x29, x29, #0x1000000000000000
10
- ; CHECK-NEXT: str x19, [sp, #-32]! ; 8-byte Folded Spill
11
- ; CHECK-NEXT: stp x29, x30, [sp, #16] ; 16-byte Folded Spill
12
- ; CHECK-NEXT: add x16, sp, #8
13
- ; CHECK-NEXT: movk x16, #49946, lsl #48
14
- ; CHECK-NEXT: mov x17, x22
15
- ; CHECK-NEXT: pacdb x17, x16
16
- ; CHECK-NEXT: str x17, [sp, #8]
17
- ; CHECK-NEXT: add x29, sp, #16
18
- ; CHECK-NEXT: .cfi_def_cfa w29, 16
19
- ; CHECK-NEXT: .cfi_offset w30, -8
20
- ; CHECK-NEXT: .cfi_offset w29, -16
21
- ; CHECK-NEXT: .cfi_offset w19, -32
22
- ; CHECK-NEXT: ldr x16, [x0]
23
- ; CHECK-NEXT: mov x20, x22
24
- ; CHECK-NEXT: mov x22, x0
25
- ; CHECK-NEXT: mov x19, x20
26
- ; CHECK-NEXT: cmp x16, #3
27
- ; CHECK-NEXT: csel x16, x16, xzr, ls
28
- ; CHECK-NEXT: Lloh0:
29
- ; CHECK-NEXT: adrp x17, LJTI0_0@PAGE
30
- ; CHECK-NEXT: Lloh1:
31
- ; CHECK-NEXT: add x17, x17, LJTI0_0@PAGEOFF
32
- ; CHECK-NEXT: ldrsw x16, [x17, x16, lsl #2]
33
- ; CHECK-NEXT: Ltmp0:
34
- ; CHECK-NEXT: adr x17, Ltmp0
35
- ; CHECK-NEXT: add x16, x17, x16
36
- ; CHECK-NEXT: br x16
37
- ; CHECK-NEXT: LBB0_1: ; %then.2
38
- ; CHECK-NEXT: mov x19, #0 ; =0x0
39
- ; CHECK-NEXT: b LBB0_3
40
- ; CHECK-NEXT: LBB0_2: ; %then.3
41
- ; CHECK-NEXT: mov x19, x22
42
- ; CHECK-NEXT: LBB0_3: ; %exit
43
- ; CHECK-NEXT: bl _foo
44
- ; CHECK-NEXT: mov x2, x0
45
- ; CHECK-NEXT: mov x0, x19
46
- ; CHECK-NEXT: mov x1, x20
47
- ; CHECK-NEXT: ldp x29, x30, [sp, #16] ; 16-byte Folded Reload
48
- ; CHECK-NEXT: ldr x19, [sp], #32 ; 8-byte Folded Reload
49
- ; CHECK-NEXT: and x29, x29, #0xefffffffffffffff
50
- ; CHECK-NEXT: br x2
51
- ; CHECK-NEXT: .loh AdrpAdd Lloh0, Lloh1
52
- ; CHECK-NEXT: .cfi_endproc
53
- ; CHECK-NEXT: .section __TEXT,__const
54
- ; CHECK-NEXT: .p2align 2, 0x0
55
- ; CHECK-NEXT: LJTI0_0:
56
- ; CHECK-NEXT: .long LBB0_3-Ltmp0
57
- ; CHECK-NEXT: .long LBB0_1-Ltmp0
58
- ; CHECK-NEXT: .long LBB0_1-Ltmp0
59
- ; CHECK-NEXT: .long LBB0_2-Ltmp0
6
+ define swifttailcc void @test_async_with_jumptable_x16_clobbered (ptr %src , ptr swiftasync %as ) #0 {
60
7
entry:
8
+ %x16 = tail call i64 asm "" , "={x16}" ()
61
9
%l = load i64 , ptr %src , align 8
62
10
switch i64 %l , label %dead [
63
11
i64 0 , label %exit
@@ -80,6 +28,230 @@ dead: ; preds = %entryresume.5
80
28
81
29
exit:
82
30
%p = phi ptr [ %src , %then.3 ], [ null , %then.2 ], [ %as , %entry ], [ null , %then.1 ]
31
+ tail call void asm sideeffect "" , "{x16}" (i64 %x16 )
32
+ %r = call i64 @foo ()
33
+ %fn = inttoptr i64 %r to ptr
34
+ musttail call swifttailcc void %fn (ptr swiftasync %src , ptr %p , ptr %as )
35
+ ret void
36
+ }
37
+
38
+ define swifttailcc void @test_async_with_jumptable_x17_clobbered (ptr %src , ptr swiftasync %as ) #0 {
39
+ entry:
40
+ %x17 = tail call i64 asm "" , "={x17}" ()
41
+ %l = load i64 , ptr %src , align 8
42
+ switch i64 %l , label %dead [
43
+ i64 0 , label %exit
44
+ i64 1 , label %then.1
45
+ i64 2 , label %then.2
46
+ i64 3 , label %then.3
47
+ ]
48
+
49
+ then.1 :
50
+ br label %exit
51
+
52
+ then.2 :
53
+ br label %exit
54
+
55
+ then.3 :
56
+ br label %exit
57
+
58
+ dead: ; preds = %entryresume.5
59
+ unreachable
60
+
61
+ exit:
62
+ %p = phi ptr [ %src , %then.3 ], [ null , %then.2 ], [ %as , %entry ], [ null , %then.1 ]
63
+ tail call void asm sideeffect "" , "{x17}" (i64 %x17 )
64
+ %r = call i64 @foo ()
65
+ %fn = inttoptr i64 %r to ptr
66
+ musttail call swifttailcc void %fn (ptr swiftasync %src , ptr %p , ptr %as )
67
+ ret void
68
+ }
69
+
70
+ define swifttailcc void @test_async_with_jumptable_x1_clobbered (ptr %src , ptr swiftasync %as ) #0 {
71
+ entry:
72
+ %x1 = tail call i64 asm "" , "={x1}" ()
73
+ %l = load i64 , ptr %src , align 8
74
+ switch i64 %l , label %dead [
75
+ i64 0 , label %exit
76
+ i64 1 , label %then.1
77
+ i64 2 , label %then.2
78
+ i64 3 , label %then.3
79
+ ]
80
+
81
+ then.1 :
82
+ br label %exit
83
+
84
+ then.2 :
85
+ br label %exit
86
+
87
+ then.3 :
88
+ br label %exit
89
+
90
+ dead: ; preds = %entryresume.5
91
+ unreachable
92
+
93
+ exit:
94
+ %p = phi ptr [ %src , %then.3 ], [ null , %then.2 ], [ %as , %entry ], [ null , %then.1 ]
95
+ tail call void asm sideeffect "" , "{x1}" (i64 %x1 )
96
+ %r = call i64 @foo ()
97
+ %fn = inttoptr i64 %r to ptr
98
+ musttail call swifttailcc void %fn (ptr swiftasync %src , ptr %p , ptr %as )
99
+ ret void
100
+ }
101
+
102
+ define swifttailcc void @test_async_with_jumptable_x1_x9_clobbered (ptr %src , ptr swiftasync %as ) #0 {
103
+ entry:
104
+ %x1 = tail call i64 asm "" , "={x1}" ()
105
+ %x9 = tail call i64 asm "" , "={x9}" ()
106
+ %l = load i64 , ptr %src , align 8
107
+ switch i64 %l , label %dead [
108
+ i64 0 , label %exit
109
+ i64 1 , label %then.1
110
+ i64 2 , label %then.2
111
+ i64 3 , label %then.3
112
+ ]
113
+
114
+ then.1 :
115
+ br label %exit
116
+
117
+ then.2 :
118
+ br label %exit
119
+
120
+ then.3 :
121
+ br label %exit
122
+
123
+ dead: ; preds = %entryresume.5
124
+ unreachable
125
+
126
+ exit:
127
+ %p = phi ptr [ %src , %then.3 ], [ null , %then.2 ], [ %as , %entry ], [ null , %then.1 ]
128
+ tail call void asm sideeffect "" , "{x1}" (i64 %x1 )
129
+ tail call void asm sideeffect "" , "{x9}" (i64 %x9 )
130
+ %r = call i64 @foo ()
131
+ %fn = inttoptr i64 %r to ptr
132
+ musttail call swifttailcc void %fn (ptr swiftasync %src , ptr %p , ptr %as )
133
+ ret void
134
+ }
135
+
136
+ ; There are 2 available scratch registers left, shrink-wrapping can happen.
137
+ define swifttailcc void @test_async_with_jumptable_2_available_regs_left (ptr %src , ptr swiftasync %as ) #0 {
138
+ entry:
139
+ %x1 = tail call i64 asm "" , "={x1}" ()
140
+ %x2 = tail call i64 asm "" , "={x2}" ()
141
+ %x3 = tail call i64 asm "" , "={x3}" ()
142
+ %x4 = tail call i64 asm "" , "={x4}" ()
143
+ %x5 = tail call i64 asm "" , "={x5}" ()
144
+ %x6 = tail call i64 asm "" , "={x6}" ()
145
+ %x7 = tail call i64 asm "" , "={x7}" ()
146
+ %x8 = tail call i64 asm "" , "={x8}" ()
147
+ %x9 = tail call i64 asm "" , "={x9}" ()
148
+ %x11 = tail call i64 asm "" , "={x11}" ()
149
+ %x12 = tail call i64 asm "" , "={x12}" ()
150
+ %x13 = tail call i64 asm "" , "={x13}" ()
151
+ %x14 = tail call i64 asm "" , "={x14}" ()
152
+ %x15 = tail call i64 asm "" , "={x15}" ()
153
+ %x16 = tail call i64 asm "" , "={x16}" ()
154
+ %l = load i64 , ptr %src , align 8
155
+ switch i64 %l , label %dead [
156
+ i64 0 , label %exit
157
+ i64 1 , label %then.1
158
+ i64 2 , label %then.2
159
+ i64 3 , label %then.3
160
+ ]
161
+
162
+ then.1 :
163
+ br label %exit
164
+
165
+ then.2 :
166
+ br label %exit
167
+
168
+ then.3 :
169
+ br label %exit
170
+
171
+ dead: ; preds = %entryresume.5
172
+ unreachable
173
+
174
+ exit:
175
+ %p = phi ptr [ %src , %then.3 ], [ null , %then.2 ], [ %as , %entry ], [ null , %then.1 ]
176
+ tail call void asm sideeffect "" , "{x1}" (i64 %x1 )
177
+ tail call void asm sideeffect "" , "{x2}" (i64 %x2 )
178
+ tail call void asm sideeffect "" , "{x3}" (i64 %x3 )
179
+ tail call void asm sideeffect "" , "{x4}" (i64 %x4 )
180
+ tail call void asm sideeffect "" , "{x5}" (i64 %x5 )
181
+ tail call void asm sideeffect "" , "{x6}" (i64 %x6 )
182
+ tail call void asm sideeffect "" , "{x7}" (i64 %x7 )
183
+ tail call void asm sideeffect "" , "{x8}" (i64 %x8 )
184
+ tail call void asm sideeffect "" , "{x9}" (i64 %x9 )
185
+ tail call void asm sideeffect "" , "{x11}" (i64 %x11 )
186
+ tail call void asm sideeffect "" , "{x12}" (i64 %x12 )
187
+ tail call void asm sideeffect "" , "{x13}" (i64 %x13 )
188
+ tail call void asm sideeffect "" , "{x14}" (i64 %x14 )
189
+ tail call void asm sideeffect "" , "{x15}" (i64 %x15 )
190
+ tail call void asm sideeffect "" , "{x16}" (i64 %x16 )
191
+ %r = call i64 @foo ()
192
+ %fn = inttoptr i64 %r to ptr
193
+ musttail call swifttailcc void %fn (ptr swiftasync %src , ptr %p , ptr %as )
194
+ ret void
195
+ }
196
+
197
+ ; There is only 1 available scratch registers left, shrink-wrapping cannot
198
+ ; happen because StoreSwiftAsyncContext needs 2 free scratch registers.
199
+ define swifttailcc void @test_async_with_jumptable_1_available_reg_left (ptr %src , ptr swiftasync %as ) #0 {
200
+ entry:
201
+ %x1 = tail call i64 asm "" , "={x1}" ()
202
+ %x2 = tail call i64 asm "" , "={x2}" ()
203
+ %x3 = tail call i64 asm "" , "={x3}" ()
204
+ %x4 = tail call i64 asm "" , "={x4}" ()
205
+ %x5 = tail call i64 asm "" , "={x5}" ()
206
+ %x6 = tail call i64 asm "" , "={x6}" ()
207
+ %x7 = tail call i64 asm "" , "={x7}" ()
208
+ %x8 = tail call i64 asm "" , "={x8}" ()
209
+ %x9 = tail call i64 asm "" , "={x9}" ()
210
+ %x11 = tail call i64 asm "" , "={x11}" ()
211
+ %x12 = tail call i64 asm "" , "={x12}" ()
212
+ %x13 = tail call i64 asm "" , "={x13}" ()
213
+ %x14 = tail call i64 asm "" , "={x14}" ()
214
+ %x15 = tail call i64 asm "" , "={x15}" ()
215
+ %x16 = tail call i64 asm "" , "={x16}" ()
216
+ %x17 = tail call i64 asm "" , "={x17}" ()
217
+ %l = load i64 , ptr %src , align 8
218
+ switch i64 %l , label %dead [
219
+ i64 0 , label %exit
220
+ i64 1 , label %then.1
221
+ i64 2 , label %then.2
222
+ i64 3 , label %then.3
223
+ ]
224
+
225
+ then.1 :
226
+ br label %exit
227
+
228
+ then.2 :
229
+ br label %exit
230
+
231
+ then.3 :
232
+ br label %exit
233
+
234
+ dead: ; preds = %entryresume.5
235
+ unreachable
236
+
237
+ exit:
238
+ %p = phi ptr [ %src , %then.3 ], [ null , %then.2 ], [ %as , %entry ], [ null , %then.1 ]
239
+ tail call void asm sideeffect "" , "{x1}" (i64 %x1 )
240
+ tail call void asm sideeffect "" , "{x2}" (i64 %x2 )
241
+ tail call void asm sideeffect "" , "{x3}" (i64 %x3 )
242
+ tail call void asm sideeffect "" , "{x4}" (i64 %x4 )
243
+ tail call void asm sideeffect "" , "{x5}" (i64 %x5 )
244
+ tail call void asm sideeffect "" , "{x6}" (i64 %x6 )
245
+ tail call void asm sideeffect "" , "{x7}" (i64 %x7 )
246
+ tail call void asm sideeffect "" , "{x8}" (i64 %x8 )
247
+ tail call void asm sideeffect "" , "{x9}" (i64 %x9 )
248
+ tail call void asm sideeffect "" , "{x11}" (i64 %x11 )
249
+ tail call void asm sideeffect "" , "{x12}" (i64 %x12 )
250
+ tail call void asm sideeffect "" , "{x13}" (i64 %x13 )
251
+ tail call void asm sideeffect "" , "{x14}" (i64 %x14 )
252
+ tail call void asm sideeffect "" , "{x15}" (i64 %x15 )
253
+ tail call void asm sideeffect "" , "{x16}" (i64 %x16 )
254
+ tail call void asm sideeffect "" , "{x17}" (i64 %x17 )
83
255
%r = call i64 @foo ()
84
256
%fn = inttoptr i64 %r to ptr
85
257
musttail call swifttailcc void %fn (ptr swiftasync %src , ptr %p , ptr %as )
0 commit comments