@@ -10,6 +10,7 @@ body: |
10
10
; CHECK-LABEL: name: splat_zero_nxv1i8
11
11
; CHECK: [[COPY:%[0-9]+]]:gpr = COPY $x0
12
12
; CHECK-NEXT: [[DEF:%[0-9]+]]:vr = IMPLICIT_DEF
13
+ ; CHECK-NEXT: [[ADDI:%[0-9]+]]:gpr = ADDI $x0, -1
13
14
; CHECK-NEXT: [[PseudoVMV_V_X_MF8_:%[0-9]+]]:vr = PseudoVMV_V_X_MF8 [[DEF]], [[COPY]], -1, 3 /* e8 */, 0 /* tu, mu */
14
15
; CHECK-NEXT: $v8 = COPY [[PseudoVMV_V_X_MF8_]]
15
16
; CHECK-NEXT: PseudoRET implicit $v8
@@ -28,6 +29,7 @@ body: |
28
29
; CHECK-LABEL: name: splat_zero_nxv2i8
29
30
; CHECK: [[COPY:%[0-9]+]]:gpr = COPY $x0
30
31
; CHECK-NEXT: [[DEF:%[0-9]+]]:vr = IMPLICIT_DEF
32
+ ; CHECK-NEXT: [[ADDI:%[0-9]+]]:gpr = ADDI $x0, -1
31
33
; CHECK-NEXT: [[PseudoVMV_V_X_MF4_:%[0-9]+]]:vr = PseudoVMV_V_X_MF4 [[DEF]], [[COPY]], -1, 3 /* e8 */, 0 /* tu, mu */
32
34
; CHECK-NEXT: $v8 = COPY [[PseudoVMV_V_X_MF4_]]
33
35
; CHECK-NEXT: PseudoRET implicit $v8
@@ -46,6 +48,7 @@ body: |
46
48
; CHECK-LABEL: name: splat_zero_nxv4i8
47
49
; CHECK: [[COPY:%[0-9]+]]:gpr = COPY $x0
48
50
; CHECK-NEXT: [[DEF:%[0-9]+]]:vr = IMPLICIT_DEF
51
+ ; CHECK-NEXT: [[ADDI:%[0-9]+]]:gpr = ADDI $x0, -1
49
52
; CHECK-NEXT: [[PseudoVMV_V_X_MF2_:%[0-9]+]]:vr = PseudoVMV_V_X_MF2 [[DEF]], [[COPY]], -1, 3 /* e8 */, 0 /* tu, mu */
50
53
; CHECK-NEXT: $v8 = COPY [[PseudoVMV_V_X_MF2_]]
51
54
; CHECK-NEXT: PseudoRET implicit $v8
@@ -64,6 +67,7 @@ body: |
64
67
; CHECK-LABEL: name: splat_zero_nxv8i8
65
68
; CHECK: [[COPY:%[0-9]+]]:gpr = COPY $x0
66
69
; CHECK-NEXT: [[DEF:%[0-9]+]]:vr = IMPLICIT_DEF
70
+ ; CHECK-NEXT: [[ADDI:%[0-9]+]]:gpr = ADDI $x0, -1
67
71
; CHECK-NEXT: [[PseudoVMV_V_X_M1_:%[0-9]+]]:vr = PseudoVMV_V_X_M1 [[DEF]], [[COPY]], -1, 3 /* e8 */, 0 /* tu, mu */
68
72
; CHECK-NEXT: $v8 = COPY [[PseudoVMV_V_X_M1_]]
69
73
; CHECK-NEXT: PseudoRET implicit $v8
@@ -82,6 +86,7 @@ body: |
82
86
; CHECK-LABEL: name: splat_zero_nxv16i8
83
87
; CHECK: [[COPY:%[0-9]+]]:gpr = COPY $x0
84
88
; CHECK-NEXT: [[DEF:%[0-9]+]]:vrm2 = IMPLICIT_DEF
89
+ ; CHECK-NEXT: [[ADDI:%[0-9]+]]:gpr = ADDI $x0, -1
85
90
; CHECK-NEXT: [[PseudoVMV_V_X_M2_:%[0-9]+]]:vrm2 = PseudoVMV_V_X_M2 [[DEF]], [[COPY]], -1, 3 /* e8 */, 0 /* tu, mu */
86
91
; CHECK-NEXT: $v8m2 = COPY [[PseudoVMV_V_X_M2_]]
87
92
; CHECK-NEXT: PseudoRET implicit $v8m2
@@ -100,6 +105,7 @@ body: |
100
105
; CHECK-LABEL: name: splat_zero_nxv32i8
101
106
; CHECK: [[COPY:%[0-9]+]]:gpr = COPY $x0
102
107
; CHECK-NEXT: [[DEF:%[0-9]+]]:vrm4 = IMPLICIT_DEF
108
+ ; CHECK-NEXT: [[ADDI:%[0-9]+]]:gpr = ADDI $x0, -1
103
109
; CHECK-NEXT: [[PseudoVMV_V_X_M4_:%[0-9]+]]:vrm4 = PseudoVMV_V_X_M4 [[DEF]], [[COPY]], -1, 3 /* e8 */, 0 /* tu, mu */
104
110
; CHECK-NEXT: $v8m4 = COPY [[PseudoVMV_V_X_M4_]]
105
111
; CHECK-NEXT: PseudoRET implicit $v8m4
@@ -118,6 +124,7 @@ body: |
118
124
; CHECK-LABEL: name: splat_zero_nxv64i8
119
125
; CHECK: [[COPY:%[0-9]+]]:gpr = COPY $x0
120
126
; CHECK-NEXT: [[DEF:%[0-9]+]]:vrm8 = IMPLICIT_DEF
127
+ ; CHECK-NEXT: [[ADDI:%[0-9]+]]:gpr = ADDI $x0, -1
121
128
; CHECK-NEXT: [[PseudoVMV_V_X_M8_:%[0-9]+]]:vrm8 = PseudoVMV_V_X_M8 [[DEF]], [[COPY]], -1, 3 /* e8 */, 0 /* tu, mu */
122
129
; CHECK-NEXT: $v8m8 = COPY [[PseudoVMV_V_X_M8_]]
123
130
; CHECK-NEXT: PseudoRET implicit $v8m8
@@ -136,6 +143,7 @@ body: |
136
143
; CHECK-LABEL: name: splat_zero_nxv1i16
137
144
; CHECK: [[COPY:%[0-9]+]]:gpr = COPY $x0
138
145
; CHECK-NEXT: [[DEF:%[0-9]+]]:vr = IMPLICIT_DEF
146
+ ; CHECK-NEXT: [[ADDI:%[0-9]+]]:gpr = ADDI $x0, -1
139
147
; CHECK-NEXT: [[PseudoVMV_V_X_MF4_:%[0-9]+]]:vr = PseudoVMV_V_X_MF4 [[DEF]], [[COPY]], -1, 4 /* e16 */, 0 /* tu, mu */
140
148
; CHECK-NEXT: $v8 = COPY [[PseudoVMV_V_X_MF4_]]
141
149
; CHECK-NEXT: PseudoRET implicit $v8
@@ -154,6 +162,7 @@ body: |
154
162
; CHECK-LABEL: name: splat_zero_nxv2i16
155
163
; CHECK: [[COPY:%[0-9]+]]:gpr = COPY $x0
156
164
; CHECK-NEXT: [[DEF:%[0-9]+]]:vr = IMPLICIT_DEF
165
+ ; CHECK-NEXT: [[ADDI:%[0-9]+]]:gpr = ADDI $x0, -1
157
166
; CHECK-NEXT: [[PseudoVMV_V_X_MF2_:%[0-9]+]]:vr = PseudoVMV_V_X_MF2 [[DEF]], [[COPY]], -1, 4 /* e16 */, 0 /* tu, mu */
158
167
; CHECK-NEXT: $v8 = COPY [[PseudoVMV_V_X_MF2_]]
159
168
; CHECK-NEXT: PseudoRET implicit $v8
@@ -172,6 +181,7 @@ body: |
172
181
; CHECK-LABEL: name: splat_zero_nxv4i16
173
182
; CHECK: [[COPY:%[0-9]+]]:gpr = COPY $x0
174
183
; CHECK-NEXT: [[DEF:%[0-9]+]]:vr = IMPLICIT_DEF
184
+ ; CHECK-NEXT: [[ADDI:%[0-9]+]]:gpr = ADDI $x0, -1
175
185
; CHECK-NEXT: [[PseudoVMV_V_X_M1_:%[0-9]+]]:vr = PseudoVMV_V_X_M1 [[DEF]], [[COPY]], -1, 4 /* e16 */, 0 /* tu, mu */
176
186
; CHECK-NEXT: $v8 = COPY [[PseudoVMV_V_X_M1_]]
177
187
; CHECK-NEXT: PseudoRET implicit $v8
@@ -190,6 +200,7 @@ body: |
190
200
; CHECK-LABEL: name: splat_zero_nxv8i16
191
201
; CHECK: [[COPY:%[0-9]+]]:gpr = COPY $x0
192
202
; CHECK-NEXT: [[DEF:%[0-9]+]]:vrm2 = IMPLICIT_DEF
203
+ ; CHECK-NEXT: [[ADDI:%[0-9]+]]:gpr = ADDI $x0, -1
193
204
; CHECK-NEXT: [[PseudoVMV_V_X_M2_:%[0-9]+]]:vrm2 = PseudoVMV_V_X_M2 [[DEF]], [[COPY]], -1, 4 /* e16 */, 0 /* tu, mu */
194
205
; CHECK-NEXT: $v8m2 = COPY [[PseudoVMV_V_X_M2_]]
195
206
; CHECK-NEXT: PseudoRET implicit $v8m2
@@ -208,6 +219,7 @@ body: |
208
219
; CHECK-LABEL: name: splat_zero_nxv16i16
209
220
; CHECK: [[COPY:%[0-9]+]]:gpr = COPY $x0
210
221
; CHECK-NEXT: [[DEF:%[0-9]+]]:vrm4 = IMPLICIT_DEF
222
+ ; CHECK-NEXT: [[ADDI:%[0-9]+]]:gpr = ADDI $x0, -1
211
223
; CHECK-NEXT: [[PseudoVMV_V_X_M4_:%[0-9]+]]:vrm4 = PseudoVMV_V_X_M4 [[DEF]], [[COPY]], -1, 4 /* e16 */, 0 /* tu, mu */
212
224
; CHECK-NEXT: $v8m4 = COPY [[PseudoVMV_V_X_M4_]]
213
225
; CHECK-NEXT: PseudoRET implicit $v8m4
@@ -226,6 +238,7 @@ body: |
226
238
; CHECK-LABEL: name: splat_zero_nxv32i16
227
239
; CHECK: [[COPY:%[0-9]+]]:gpr = COPY $x0
228
240
; CHECK-NEXT: [[DEF:%[0-9]+]]:vrm8 = IMPLICIT_DEF
241
+ ; CHECK-NEXT: [[ADDI:%[0-9]+]]:gpr = ADDI $x0, -1
229
242
; CHECK-NEXT: [[PseudoVMV_V_X_M8_:%[0-9]+]]:vrm8 = PseudoVMV_V_X_M8 [[DEF]], [[COPY]], -1, 4 /* e16 */, 0 /* tu, mu */
230
243
; CHECK-NEXT: $v8m8 = COPY [[PseudoVMV_V_X_M8_]]
231
244
; CHECK-NEXT: PseudoRET implicit $v8m8
@@ -244,6 +257,7 @@ body: |
244
257
; CHECK-LABEL: name: splat_zero_nxv1i32
245
258
; CHECK: [[COPY:%[0-9]+]]:gpr = COPY $x0
246
259
; CHECK-NEXT: [[DEF:%[0-9]+]]:vr = IMPLICIT_DEF
260
+ ; CHECK-NEXT: [[ADDI:%[0-9]+]]:gpr = ADDI $x0, -1
247
261
; CHECK-NEXT: [[PseudoVMV_V_X_MF2_:%[0-9]+]]:vr = PseudoVMV_V_X_MF2 [[DEF]], [[COPY]], -1, 5 /* e32 */, 0 /* tu, mu */
248
262
; CHECK-NEXT: $v8 = COPY [[PseudoVMV_V_X_MF2_]]
249
263
; CHECK-NEXT: PseudoRET implicit $v8
@@ -262,6 +276,7 @@ body: |
262
276
; CHECK-LABEL: name: splat_zero_nxv2i32
263
277
; CHECK: [[COPY:%[0-9]+]]:gpr = COPY $x0
264
278
; CHECK-NEXT: [[DEF:%[0-9]+]]:vr = IMPLICIT_DEF
279
+ ; CHECK-NEXT: [[ADDI:%[0-9]+]]:gpr = ADDI $x0, -1
265
280
; CHECK-NEXT: [[PseudoVMV_V_X_M1_:%[0-9]+]]:vr = PseudoVMV_V_X_M1 [[DEF]], [[COPY]], -1, 5 /* e32 */, 0 /* tu, mu */
266
281
; CHECK-NEXT: $v8 = COPY [[PseudoVMV_V_X_M1_]]
267
282
; CHECK-NEXT: PseudoRET implicit $v8
@@ -280,6 +295,7 @@ body: |
280
295
; CHECK-LABEL: name: splat_zero_nxv4i32
281
296
; CHECK: [[COPY:%[0-9]+]]:gpr = COPY $x0
282
297
; CHECK-NEXT: [[DEF:%[0-9]+]]:vrm2 = IMPLICIT_DEF
298
+ ; CHECK-NEXT: [[ADDI:%[0-9]+]]:gpr = ADDI $x0, -1
283
299
; CHECK-NEXT: [[PseudoVMV_V_X_M2_:%[0-9]+]]:vrm2 = PseudoVMV_V_X_M2 [[DEF]], [[COPY]], -1, 5 /* e32 */, 0 /* tu, mu */
284
300
; CHECK-NEXT: $v8m2 = COPY [[PseudoVMV_V_X_M2_]]
285
301
; CHECK-NEXT: PseudoRET implicit $v8m2
@@ -298,6 +314,7 @@ body: |
298
314
; CHECK-LABEL: name: splat_zero_nxv8i32
299
315
; CHECK: [[COPY:%[0-9]+]]:gpr = COPY $x0
300
316
; CHECK-NEXT: [[DEF:%[0-9]+]]:vrm4 = IMPLICIT_DEF
317
+ ; CHECK-NEXT: [[ADDI:%[0-9]+]]:gpr = ADDI $x0, -1
301
318
; CHECK-NEXT: [[PseudoVMV_V_X_M4_:%[0-9]+]]:vrm4 = PseudoVMV_V_X_M4 [[DEF]], [[COPY]], -1, 5 /* e32 */, 0 /* tu, mu */
302
319
; CHECK-NEXT: $v8m4 = COPY [[PseudoVMV_V_X_M4_]]
303
320
; CHECK-NEXT: PseudoRET implicit $v8m4
@@ -316,6 +333,7 @@ body: |
316
333
; CHECK-LABEL: name: splat_zero_nxv16i32
317
334
; CHECK: [[COPY:%[0-9]+]]:gpr = COPY $x0
318
335
; CHECK-NEXT: [[DEF:%[0-9]+]]:vrm8 = IMPLICIT_DEF
336
+ ; CHECK-NEXT: [[ADDI:%[0-9]+]]:gpr = ADDI $x0, -1
319
337
; CHECK-NEXT: [[PseudoVMV_V_X_M8_:%[0-9]+]]:vrm8 = PseudoVMV_V_X_M8 [[DEF]], [[COPY]], -1, 5 /* e32 */, 0 /* tu, mu */
320
338
; CHECK-NEXT: $v8m8 = COPY [[PseudoVMV_V_X_M8_]]
321
339
; CHECK-NEXT: PseudoRET implicit $v8m8
@@ -336,6 +354,7 @@ body: |
336
354
; CHECK-NEXT: [[COPY1:%[0-9]+]]:gpr = COPY $x0
337
355
; CHECK-NEXT: [[BuildPairF64Pseudo:%[0-9]+]]:fpr64 = BuildPairF64Pseudo [[COPY]], [[COPY1]]
338
356
; CHECK-NEXT: [[DEF:%[0-9]+]]:vr = IMPLICIT_DEF
357
+ ; CHECK-NEXT: [[ADDI:%[0-9]+]]:gpr = ADDI $x0, -1
339
358
; CHECK-NEXT: [[PseudoVFMV_V_FPR64_M1_:%[0-9]+]]:vr = PseudoVFMV_V_FPR64_M1 [[DEF]], [[BuildPairF64Pseudo]], -1, 6 /* e64 */, 0 /* tu, mu */
340
359
; CHECK-NEXT: $v8 = COPY [[PseudoVFMV_V_FPR64_M1_]]
341
360
; CHECK-NEXT: PseudoRET implicit $v8
@@ -358,6 +377,7 @@ body: |
358
377
; CHECK-NEXT: [[COPY1:%[0-9]+]]:gpr = COPY $x0
359
378
; CHECK-NEXT: [[BuildPairF64Pseudo:%[0-9]+]]:fpr64 = BuildPairF64Pseudo [[COPY]], [[COPY1]]
360
379
; CHECK-NEXT: [[DEF:%[0-9]+]]:vrm2 = IMPLICIT_DEF
380
+ ; CHECK-NEXT: [[ADDI:%[0-9]+]]:gpr = ADDI $x0, -1
361
381
; CHECK-NEXT: [[PseudoVFMV_V_FPR64_M2_:%[0-9]+]]:vrm2 = PseudoVFMV_V_FPR64_M2 [[DEF]], [[BuildPairF64Pseudo]], -1, 6 /* e64 */, 0 /* tu, mu */
362
382
; CHECK-NEXT: $v8m2 = COPY [[PseudoVFMV_V_FPR64_M2_]]
363
383
; CHECK-NEXT: PseudoRET implicit $v8m2
@@ -380,6 +400,7 @@ body: |
380
400
; CHECK-NEXT: [[COPY1:%[0-9]+]]:gpr = COPY $x0
381
401
; CHECK-NEXT: [[BuildPairF64Pseudo:%[0-9]+]]:fpr64 = BuildPairF64Pseudo [[COPY]], [[COPY1]]
382
402
; CHECK-NEXT: [[DEF:%[0-9]+]]:vrm4 = IMPLICIT_DEF
403
+ ; CHECK-NEXT: [[ADDI:%[0-9]+]]:gpr = ADDI $x0, -1
383
404
; CHECK-NEXT: [[PseudoVFMV_V_FPR64_M4_:%[0-9]+]]:vrm4 = PseudoVFMV_V_FPR64_M4 [[DEF]], [[BuildPairF64Pseudo]], -1, 6 /* e64 */, 0 /* tu, mu */
384
405
; CHECK-NEXT: $v8m4 = COPY [[PseudoVFMV_V_FPR64_M4_]]
385
406
; CHECK-NEXT: PseudoRET implicit $v8m4
@@ -402,6 +423,7 @@ body: |
402
423
; CHECK-NEXT: [[COPY1:%[0-9]+]]:gpr = COPY $x0
403
424
; CHECK-NEXT: [[BuildPairF64Pseudo:%[0-9]+]]:fpr64 = BuildPairF64Pseudo [[COPY]], [[COPY1]]
404
425
; CHECK-NEXT: [[DEF:%[0-9]+]]:vrm8 = IMPLICIT_DEF
426
+ ; CHECK-NEXT: [[ADDI:%[0-9]+]]:gpr = ADDI $x0, -1
405
427
; CHECK-NEXT: [[PseudoVFMV_V_FPR64_M8_:%[0-9]+]]:vrm8 = PseudoVFMV_V_FPR64_M8 [[DEF]], [[BuildPairF64Pseudo]], -1, 6 /* e64 */, 0 /* tu, mu */
406
428
; CHECK-NEXT: $v8m8 = COPY [[PseudoVFMV_V_FPR64_M8_]]
407
429
; CHECK-NEXT: PseudoRET implicit $v8m8
@@ -424,6 +446,7 @@ body: |
424
446
; CHECK-NEXT: [[FMV_W_X:%[0-9]+]]:fpr32 = FMV_W_X [[COPY]]
425
447
; CHECK-NEXT: [[COPY1:%[0-9]+]]:gpr = COPY [[FMV_W_X]]
426
448
; CHECK-NEXT: [[DEF:%[0-9]+]]:vr = IMPLICIT_DEF
449
+ ; CHECK-NEXT: [[ADDI:%[0-9]+]]:gpr = ADDI $x0, -1
427
450
; CHECK-NEXT: [[PseudoVMV_V_X_MF2_:%[0-9]+]]:vr = PseudoVMV_V_X_MF2 [[DEF]], [[COPY1]], -1, 5 /* e32 */, 0 /* tu, mu */
428
451
; CHECK-NEXT: $v8 = COPY [[PseudoVMV_V_X_MF2_]]
429
452
; CHECK-NEXT: PseudoRET implicit $v8
@@ -445,6 +468,7 @@ body: |
445
468
; CHECK-NEXT: [[FMV_W_X:%[0-9]+]]:fpr32 = FMV_W_X [[COPY]]
446
469
; CHECK-NEXT: [[COPY1:%[0-9]+]]:gpr = COPY [[FMV_W_X]]
447
470
; CHECK-NEXT: [[DEF:%[0-9]+]]:vr = IMPLICIT_DEF
471
+ ; CHECK-NEXT: [[ADDI:%[0-9]+]]:gpr = ADDI $x0, -1
448
472
; CHECK-NEXT: [[PseudoVMV_V_X_M1_:%[0-9]+]]:vr = PseudoVMV_V_X_M1 [[DEF]], [[COPY1]], -1, 5 /* e32 */, 0 /* tu, mu */
449
473
; CHECK-NEXT: $v8 = COPY [[PseudoVMV_V_X_M1_]]
450
474
; CHECK-NEXT: PseudoRET implicit $v8
@@ -466,6 +490,7 @@ body: |
466
490
; CHECK-NEXT: [[FMV_W_X:%[0-9]+]]:fpr32 = FMV_W_X [[COPY]]
467
491
; CHECK-NEXT: [[COPY1:%[0-9]+]]:gpr = COPY [[FMV_W_X]]
468
492
; CHECK-NEXT: [[DEF:%[0-9]+]]:vrm2 = IMPLICIT_DEF
493
+ ; CHECK-NEXT: [[ADDI:%[0-9]+]]:gpr = ADDI $x0, -1
469
494
; CHECK-NEXT: [[PseudoVMV_V_X_M2_:%[0-9]+]]:vrm2 = PseudoVMV_V_X_M2 [[DEF]], [[COPY1]], -1, 5 /* e32 */, 0 /* tu, mu */
470
495
; CHECK-NEXT: $v8m2 = COPY [[PseudoVMV_V_X_M2_]]
471
496
; CHECK-NEXT: PseudoRET implicit $v8m2
@@ -487,6 +512,7 @@ body: |
487
512
; CHECK-NEXT: [[FMV_W_X:%[0-9]+]]:fpr32 = FMV_W_X [[COPY]]
488
513
; CHECK-NEXT: [[COPY1:%[0-9]+]]:gpr = COPY [[FMV_W_X]]
489
514
; CHECK-NEXT: [[DEF:%[0-9]+]]:vrm4 = IMPLICIT_DEF
515
+ ; CHECK-NEXT: [[ADDI:%[0-9]+]]:gpr = ADDI $x0, -1
490
516
; CHECK-NEXT: [[PseudoVMV_V_X_M4_:%[0-9]+]]:vrm4 = PseudoVMV_V_X_M4 [[DEF]], [[COPY1]], -1, 5 /* e32 */, 0 /* tu, mu */
491
517
; CHECK-NEXT: $v8m4 = COPY [[PseudoVMV_V_X_M4_]]
492
518
; CHECK-NEXT: PseudoRET implicit $v8m4
@@ -508,6 +534,7 @@ body: |
508
534
; CHECK-NEXT: [[FMV_W_X:%[0-9]+]]:fpr32 = FMV_W_X [[COPY]]
509
535
; CHECK-NEXT: [[COPY1:%[0-9]+]]:gpr = COPY [[FMV_W_X]]
510
536
; CHECK-NEXT: [[DEF:%[0-9]+]]:vrm8 = IMPLICIT_DEF
537
+ ; CHECK-NEXT: [[ADDI:%[0-9]+]]:gpr = ADDI $x0, -1
511
538
; CHECK-NEXT: [[PseudoVMV_V_X_M8_:%[0-9]+]]:vrm8 = PseudoVMV_V_X_M8 [[DEF]], [[COPY1]], -1, 5 /* e32 */, 0 /* tu, mu */
512
539
; CHECK-NEXT: $v8m8 = COPY [[PseudoVMV_V_X_M8_]]
513
540
; CHECK-NEXT: PseudoRET implicit $v8m8
@@ -529,6 +556,7 @@ body: |
529
556
; CHECK-NEXT: [[COPY1:%[0-9]+]]:gpr = COPY $x0
530
557
; CHECK-NEXT: [[BuildPairF64Pseudo:%[0-9]+]]:fpr64 = BuildPairF64Pseudo [[COPY1]], [[COPY]]
531
558
; CHECK-NEXT: [[DEF:%[0-9]+]]:vr = IMPLICIT_DEF
559
+ ; CHECK-NEXT: [[ADDI:%[0-9]+]]:gpr = ADDI $x0, -1
532
560
; CHECK-NEXT: [[PseudoVFMV_V_FPR64_M1_:%[0-9]+]]:vr = PseudoVFMV_V_FPR64_M1 [[DEF]], [[BuildPairF64Pseudo]], -1, 6 /* e64 */, 0 /* tu, mu */
533
561
; CHECK-NEXT: $v8 = COPY [[PseudoVFMV_V_FPR64_M1_]]
534
562
; CHECK-NEXT: PseudoRET implicit $v8
@@ -549,6 +577,7 @@ body: |
549
577
; CHECK-NEXT: [[COPY1:%[0-9]+]]:gpr = COPY $x0
550
578
; CHECK-NEXT: [[BuildPairF64Pseudo:%[0-9]+]]:fpr64 = BuildPairF64Pseudo [[COPY1]], [[COPY]]
551
579
; CHECK-NEXT: [[DEF:%[0-9]+]]:vrm2 = IMPLICIT_DEF
580
+ ; CHECK-NEXT: [[ADDI:%[0-9]+]]:gpr = ADDI $x0, -1
552
581
; CHECK-NEXT: [[PseudoVFMV_V_FPR64_M2_:%[0-9]+]]:vrm2 = PseudoVFMV_V_FPR64_M2 [[DEF]], [[BuildPairF64Pseudo]], -1, 6 /* e64 */, 0 /* tu, mu */
553
582
; CHECK-NEXT: $v8m2 = COPY [[PseudoVFMV_V_FPR64_M2_]]
554
583
; CHECK-NEXT: PseudoRET implicit $v8m2
@@ -569,6 +598,7 @@ body: |
569
598
; CHECK-NEXT: [[COPY1:%[0-9]+]]:gpr = COPY $x0
570
599
; CHECK-NEXT: [[BuildPairF64Pseudo:%[0-9]+]]:fpr64 = BuildPairF64Pseudo [[COPY1]], [[COPY]]
571
600
; CHECK-NEXT: [[DEF:%[0-9]+]]:vrm4 = IMPLICIT_DEF
601
+ ; CHECK-NEXT: [[ADDI:%[0-9]+]]:gpr = ADDI $x0, -1
572
602
; CHECK-NEXT: [[PseudoVFMV_V_FPR64_M4_:%[0-9]+]]:vrm4 = PseudoVFMV_V_FPR64_M4 [[DEF]], [[BuildPairF64Pseudo]], -1, 6 /* e64 */, 0 /* tu, mu */
573
603
; CHECK-NEXT: $v8m4 = COPY [[PseudoVFMV_V_FPR64_M4_]]
574
604
; CHECK-NEXT: PseudoRET implicit $v8m4
@@ -589,6 +619,7 @@ body: |
589
619
; CHECK-NEXT: [[COPY1:%[0-9]+]]:gpr = COPY $x0
590
620
; CHECK-NEXT: [[BuildPairF64Pseudo:%[0-9]+]]:fpr64 = BuildPairF64Pseudo [[COPY1]], [[COPY]]
591
621
; CHECK-NEXT: [[DEF:%[0-9]+]]:vrm8 = IMPLICIT_DEF
622
+ ; CHECK-NEXT: [[ADDI:%[0-9]+]]:gpr = ADDI $x0, -1
592
623
; CHECK-NEXT: [[PseudoVFMV_V_FPR64_M8_:%[0-9]+]]:vrm8 = PseudoVFMV_V_FPR64_M8 [[DEF]], [[BuildPairF64Pseudo]], -1, 6 /* e64 */, 0 /* tu, mu */
593
624
; CHECK-NEXT: $v8m8 = COPY [[PseudoVFMV_V_FPR64_M8_]]
594
625
; CHECK-NEXT: PseudoRET implicit $v8m8
0 commit comments