|
| 1 | +; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 2 |
| 2 | +; RUN: llc -mtriple=riscv64 -mattr=+v,+zvl512b < %s | FileCheck %s |
| 3 | + |
| 4 | +; A single source shuffle with an offset not representable in an i8 |
| 5 | +; vector, and a type which can't be promoted to i16 element type while |
| 6 | +; remaining a valid type. Note that splitting the vector is legal here |
| 7 | +define <512 x i8> @single_source(<512 x i8> %a) { |
| 8 | +; CHECK-LABEL: single_source: |
| 9 | +; CHECK: # %bb.0: |
| 10 | +; CHECK-NEXT: li a0, 512 |
| 11 | +; CHECK-NEXT: vsetvli zero, a0, e8, m8, ta, ma |
| 12 | +; CHECK-NEXT: vmv.v.i v16, 0 |
| 13 | +; CHECK-NEXT: li a1, 258 |
| 14 | +; CHECK-NEXT: vslide1down.vx v24, v16, a1 |
| 15 | +; CHECK-NEXT: vsetvli zero, a0, e8, m1, ta, ma |
| 16 | +; CHECK-NEXT: vmv.v.i v16, 5 |
| 17 | +; CHECK-NEXT: li a1, 432 |
| 18 | +; CHECK-NEXT: li a2, 431 |
| 19 | +; CHECK-NEXT: vsetvli zero, a1, e8, m8, tu, ma |
| 20 | +; CHECK-NEXT: vslideup.vx v24, v16, a2 |
| 21 | +; CHECK-NEXT: vsetvli zero, a0, e8, m1, ta, ma |
| 22 | +; CHECK-NEXT: vmv.v.i v16, 4 |
| 23 | +; CHECK-NEXT: li a1, 466 |
| 24 | +; CHECK-NEXT: li a2, 465 |
| 25 | +; CHECK-NEXT: vsetvli zero, a1, e8, m8, tu, ma |
| 26 | +; CHECK-NEXT: vslideup.vx v24, v16, a2 |
| 27 | +; CHECK-NEXT: li a1, 500 |
| 28 | +; CHECK-NEXT: vmv.s.x v16, a1 |
| 29 | +; CHECK-NEXT: li a2, 501 |
| 30 | +; CHECK-NEXT: vsetvli zero, a2, e8, m8, tu, ma |
| 31 | +; CHECK-NEXT: vslideup.vx v24, v16, a1 |
| 32 | +; CHECK-NEXT: vsetvli zero, a0, e8, m8, ta, ma |
| 33 | +; CHECK-NEXT: vrgather.vv v16, v8, v24 |
| 34 | +; CHECK-NEXT: vmv.v.v v8, v16 |
| 35 | +; CHECK-NEXT: ret |
| 36 | + %res = shufflevector <512 x i8> %a, <512 x i8> poison, <512 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 5, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 4, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 500, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 258> |
| 37 | + ret <512 x i8> %res |
| 38 | +} |
| 39 | + |
| 40 | +; Like the above, but the actual values of the index are all representable |
| 41 | +define <512 x i8> @range_restriction(<512 x i8> %a) { |
| 42 | +; CHECK-LABEL: range_restriction: |
| 43 | +; CHECK: # %bb.0: |
| 44 | +; CHECK-NEXT: li a0, 512 |
| 45 | +; CHECK-NEXT: vsetvli zero, a0, e8, m8, ta, ma |
| 46 | +; CHECK-NEXT: vmv.v.i v16, 0 |
| 47 | +; CHECK-NEXT: li a1, 254 |
| 48 | +; CHECK-NEXT: vslide1down.vx v24, v16, a1 |
| 49 | +; CHECK-NEXT: vsetvli zero, a0, e8, m1, ta, ma |
| 50 | +; CHECK-NEXT: vmv.v.i v16, 5 |
| 51 | +; CHECK-NEXT: li a1, 432 |
| 52 | +; CHECK-NEXT: li a2, 431 |
| 53 | +; CHECK-NEXT: vsetvli zero, a1, e8, m8, tu, ma |
| 54 | +; CHECK-NEXT: vslideup.vx v24, v16, a2 |
| 55 | +; CHECK-NEXT: vsetvli zero, a0, e8, m1, ta, ma |
| 56 | +; CHECK-NEXT: vmv.v.i v16, 4 |
| 57 | +; CHECK-NEXT: li a1, 466 |
| 58 | +; CHECK-NEXT: li a2, 465 |
| 59 | +; CHECK-NEXT: vsetvli zero, a1, e8, m8, tu, ma |
| 60 | +; CHECK-NEXT: vslideup.vx v24, v16, a2 |
| 61 | +; CHECK-NEXT: li a1, 44 |
| 62 | +; CHECK-NEXT: vmv.s.x v16, a1 |
| 63 | +; CHECK-NEXT: li a1, 501 |
| 64 | +; CHECK-NEXT: li a2, 500 |
| 65 | +; CHECK-NEXT: vsetvli zero, a1, e8, m8, tu, ma |
| 66 | +; CHECK-NEXT: vslideup.vx v24, v16, a2 |
| 67 | +; CHECK-NEXT: vsetvli zero, a0, e8, m8, ta, ma |
| 68 | +; CHECK-NEXT: vrgather.vv v16, v8, v24 |
| 69 | +; CHECK-NEXT: vmv.v.v v8, v16 |
| 70 | +; CHECK-NEXT: ret |
| 71 | + %res = shufflevector <512 x i8> %a, <512 x i8> poison, <512 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 5, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 4, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 44, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 254> |
| 72 | + ret <512 x i8> %res |
| 73 | +} |
| 74 | + |
| 75 | + |
| 76 | +define <512 x i8> @two_source(<512 x i8> %a, <512 x i8> %b) { |
| 77 | +; CHECK-LABEL: two_source: |
| 78 | +; CHECK: # %bb.0: |
| 79 | +; CHECK-NEXT: addi sp, sp, -2032 |
| 80 | +; CHECK-NEXT: .cfi_def_cfa_offset 2032 |
| 81 | +; CHECK-NEXT: sd ra, 2024(sp) # 8-byte Folded Spill |
| 82 | +; CHECK-NEXT: sd s0, 2016(sp) # 8-byte Folded Spill |
| 83 | +; CHECK-NEXT: .cfi_offset ra, -8 |
| 84 | +; CHECK-NEXT: .cfi_offset s0, -16 |
| 85 | +; CHECK-NEXT: addi s0, sp, 2032 |
| 86 | +; CHECK-NEXT: .cfi_def_cfa s0, 0 |
| 87 | +; CHECK-NEXT: addi sp, sp, -16 |
| 88 | +; CHECK-NEXT: andi sp, sp, -512 |
| 89 | +; CHECK-NEXT: vmv8r.v v24, v8 |
| 90 | +; CHECK-NEXT: li a0, 512 |
| 91 | +; CHECK-NEXT: addi a1, sp, 1024 |
| 92 | +; CHECK-NEXT: vsetvli zero, a0, e8, m8, ta, ma |
| 93 | +; CHECK-NEXT: vse8.v v8, (a1) |
| 94 | +; CHECK-NEXT: addi a1, sp, 512 |
| 95 | +; CHECK-NEXT: vse8.v v16, (a1) |
| 96 | +; CHECK-NEXT: vmv.x.s a1, v24 |
| 97 | +; CHECK-NEXT: vmv.v.x v8, a1 |
| 98 | +; CHECK-NEXT: li a1, 43 |
| 99 | +; CHECK-NEXT: vsetivli zero, 1, e8, m1, ta, ma |
| 100 | +; CHECK-NEXT: vslidedown.vx v17, v16, a1 |
| 101 | +; CHECK-NEXT: vmv.x.s a1, v17 |
| 102 | +; CHECK-NEXT: vsetvli zero, a0, e8, m8, ta, ma |
| 103 | +; CHECK-NEXT: vslide1down.vx v8, v8, a1 |
| 104 | +; CHECK-NEXT: li a0, 36 |
| 105 | +; CHECK-NEXT: vsetivli zero, 1, e8, m1, ta, ma |
| 106 | +; CHECK-NEXT: vslidedown.vx v17, v16, a0 |
| 107 | +; CHECK-NEXT: vmv.x.s a0, v17 |
| 108 | +; CHECK-NEXT: vmv.s.x v0, a0 |
| 109 | +; CHECK-NEXT: li a0, 399 |
| 110 | +; CHECK-NEXT: li a1, 398 |
| 111 | +; CHECK-NEXT: vsetvli zero, a0, e8, m8, tu, ma |
| 112 | +; CHECK-NEXT: vslideup.vx v8, v0, a1 |
| 113 | +; CHECK-NEXT: vsetivli zero, 1, e8, m1, ta, ma |
| 114 | +; CHECK-NEXT: vslidedown.vi v17, v24, 5 |
| 115 | +; CHECK-NEXT: vmv.x.s a0, v17 |
| 116 | +; CHECK-NEXT: vmv.s.x v0, a0 |
| 117 | +; CHECK-NEXT: li a0, 432 |
| 118 | +; CHECK-NEXT: li a1, 431 |
| 119 | +; CHECK-NEXT: vsetvli zero, a0, e8, m8, tu, ma |
| 120 | +; CHECK-NEXT: vslideup.vx v8, v0, a1 |
| 121 | +; CHECK-NEXT: vsetivli zero, 1, e8, m1, ta, ma |
| 122 | +; CHECK-NEXT: vslidedown.vi v17, v24, 4 |
| 123 | +; CHECK-NEXT: vmv.x.s a0, v17 |
| 124 | +; CHECK-NEXT: vmv.s.x v24, a0 |
| 125 | +; CHECK-NEXT: li a0, 466 |
| 126 | +; CHECK-NEXT: li a1, 465 |
| 127 | +; CHECK-NEXT: vsetvli zero, a0, e8, m8, tu, ma |
| 128 | +; CHECK-NEXT: vslideup.vx v8, v24, a1 |
| 129 | +; CHECK-NEXT: li a1, 62 |
| 130 | +; CHECK-NEXT: vsetivli zero, 1, e8, m1, ta, ma |
| 131 | +; CHECK-NEXT: vslidedown.vx v16, v16, a1 |
| 132 | +; CHECK-NEXT: vmv.x.s a1, v16 |
| 133 | +; CHECK-NEXT: vmv.s.x v16, a1 |
| 134 | +; CHECK-NEXT: li a1, 467 |
| 135 | +; CHECK-NEXT: vsetvli zero, a1, e8, m8, tu, ma |
| 136 | +; CHECK-NEXT: lbu a1, 1497(sp) |
| 137 | +; CHECK-NEXT: vslideup.vx v8, v16, a0 |
| 138 | +; CHECK-NEXT: vmv.s.x v16, a1 |
| 139 | +; CHECK-NEXT: li a0, 478 |
| 140 | +; CHECK-NEXT: vsetvli zero, a0, e8, m8, tu, ma |
| 141 | +; CHECK-NEXT: lbu a0, 674(sp) |
| 142 | +; CHECK-NEXT: li a1, 477 |
| 143 | +; CHECK-NEXT: vslideup.vx v8, v16, a1 |
| 144 | +; CHECK-NEXT: vmv.s.x v16, a0 |
| 145 | +; CHECK-NEXT: li a0, 490 |
| 146 | +; CHECK-NEXT: li a1, 489 |
| 147 | +; CHECK-NEXT: vsetvli zero, a0, e8, m8, tu, ma |
| 148 | +; CHECK-NEXT: lbu a0, 1524(sp) |
| 149 | +; CHECK-NEXT: vslideup.vx v8, v16, a1 |
| 150 | +; CHECK-NEXT: vmv.s.x v16, a0 |
| 151 | +; CHECK-NEXT: li a0, 501 |
| 152 | +; CHECK-NEXT: li a1, 500 |
| 153 | +; CHECK-NEXT: vsetvli zero, a0, e8, m8, tu, ma |
| 154 | +; CHECK-NEXT: vslideup.vx v8, v16, a1 |
| 155 | +; CHECK-NEXT: addi sp, s0, -2048 |
| 156 | +; CHECK-NEXT: addi sp, sp, 16 |
| 157 | +; CHECK-NEXT: ld ra, 2024(sp) # 8-byte Folded Reload |
| 158 | +; CHECK-NEXT: ld s0, 2016(sp) # 8-byte Folded Reload |
| 159 | +; CHECK-NEXT: addi sp, sp, 2032 |
| 160 | +; CHECK-NEXT: ret |
| 161 | + %res = shufflevector <512 x i8> %a, <512 x i8> %b, <512 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 548, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 5, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 4, i32 574, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 473, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 674, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 500, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 555> |
| 162 | + ret <512 x i8> %res |
| 163 | +} |
| 164 | + |
0 commit comments