@@ -312,3 +312,50 @@ define void @vmv.v.x_live(ptr %p, i64 %x) {
312
312
store volatile i64 %x , ptr %p
313
313
ret void
314
314
}
315
+
316
+ define void @vfmv.v.f (ptr %p , double %x ) {
317
+ ; CHECK-LABEL: vfmv.v.f:
318
+ ; CHECK: # %bb.0:
319
+ ; CHECK-NEXT: addi sp, sp, -16
320
+ ; CHECK-NEXT: .cfi_def_cfa_offset 16
321
+ ; CHECK-NEXT: csrr a1, vlenb
322
+ ; CHECK-NEXT: slli a1, a1, 3
323
+ ; CHECK-NEXT: sub sp, sp, a1
324
+ ; CHECK-NEXT: .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x08, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 8 * vlenb
325
+ ; CHECK-NEXT: vsetvli a1, zero, e64, m8, ta, ma
326
+ ; CHECK-NEXT: vfmv.v.f v8, fa0
327
+ ; CHECK-NEXT: vs8r.v v8, (a0)
328
+ ; CHECK-NEXT: vl8re64.v v16, (a0)
329
+ ; CHECK-NEXT: addi a1, sp, 16
330
+ ; CHECK-NEXT: vs8r.v v16, (a1) # Unknown-size Folded Spill
331
+ ; CHECK-NEXT: vl8re64.v v24, (a0)
332
+ ; CHECK-NEXT: vl8re64.v v0, (a0)
333
+ ; CHECK-NEXT: vl8re64.v v16, (a0)
334
+ ; CHECK-NEXT: vs8r.v v16, (a0)
335
+ ; CHECK-NEXT: vs8r.v v0, (a0)
336
+ ; CHECK-NEXT: vs8r.v v24, (a0)
337
+ ; CHECK-NEXT: vl8r.v v16, (a1) # Unknown-size Folded Reload
338
+ ; CHECK-NEXT: vs8r.v v16, (a0)
339
+ ; CHECK-NEXT: vs8r.v v8, (a0)
340
+ ; CHECK-NEXT: fsd fa0, 0(a0)
341
+ ; CHECK-NEXT: csrr a0, vlenb
342
+ ; CHECK-NEXT: slli a0, a0, 3
343
+ ; CHECK-NEXT: add sp, sp, a0
344
+ ; CHECK-NEXT: addi sp, sp, 16
345
+ ; CHECK-NEXT: ret
346
+ %vfmv.v.f = call <vscale x 8 x double > @llvm.riscv.vfmv.v.f.nxv8f64 (<vscale x 8 x double > poison, double %x , i64 -1 )
347
+ store volatile <vscale x 8 x double > %vfmv.v.f , ptr %p
348
+
349
+ %a = load volatile <vscale x 8 x double >, ptr %p
350
+ %b = load volatile <vscale x 8 x double >, ptr %p
351
+ %c = load volatile <vscale x 8 x double >, ptr %p
352
+ %d = load volatile <vscale x 8 x double >, ptr %p
353
+ store volatile <vscale x 8 x double > %d , ptr %p
354
+ store volatile <vscale x 8 x double > %c , ptr %p
355
+ store volatile <vscale x 8 x double > %b , ptr %p
356
+ store volatile <vscale x 8 x double > %a , ptr %p
357
+
358
+ store volatile <vscale x 8 x double > %vfmv.v.f , ptr %p
359
+ store volatile double %x , ptr %p
360
+ ret void
361
+ }
0 commit comments