|
| 1 | +# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py |
| 2 | +# RUN: llc -mtriple aarch64 -run-pass=aarch64-prelegalizer-combiner -aarch64prelegalizercombinerhelper-only-enable-rule="load_and_mask" -verify-machineinstrs %s -o - | FileCheck %s |
| 3 | + |
| 4 | +# REQUIRES: asserts |
| 5 | + |
| 6 | +# Check that we can fold and ({any,zext,sext}load, mask) -> zextload |
| 7 | + |
| 8 | +--- |
| 9 | +name: test_anyext_1 |
| 10 | +tracksRegLiveness: true |
| 11 | +body: | |
| 12 | + bb.0: |
| 13 | + liveins: $x0 |
| 14 | + ; CHECK-LABEL: name: test_anyext_1 |
| 15 | + ; CHECK: liveins: $x0 |
| 16 | + ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 |
| 17 | + ; CHECK: [[C:%[0-9]+]]:_(s8) = G_CONSTANT i8 1 |
| 18 | + ; CHECK: [[LOAD:%[0-9]+]]:_(s8) = G_LOAD [[COPY]](p0) :: (load (s8)) |
| 19 | + ; CHECK: [[AND:%[0-9]+]]:_(s8) = G_AND [[LOAD]], [[C]] |
| 20 | + ; CHECK: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[AND]](s8) |
| 21 | + ; CHECK: $w0 = COPY [[ANYEXT]](s32) |
| 22 | + %0:_(p0) = COPY $x0 |
| 23 | + %1:_(s8) = G_CONSTANT i8 1 |
| 24 | + %2:_(s8) = G_LOAD %0 :: (load (s8)) |
| 25 | + %3:_(s8) = G_AND %2, %1 |
| 26 | + %4:_(s32) = G_ANYEXT %3 |
| 27 | + $w0 = COPY %4 |
| 28 | +... |
| 29 | + |
| 30 | +--- |
| 31 | +name: test_anyext_s16 |
| 32 | +tracksRegLiveness: true |
| 33 | +body: | |
| 34 | + bb.0: |
| 35 | + liveins: $x0 |
| 36 | + ; CHECK-LABEL: name: test_anyext_s16 |
| 37 | + ; CHECK: liveins: $x0 |
| 38 | + ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 |
| 39 | + ; CHECK: [[ZEXTLOAD:%[0-9]+]]:_(s16) = G_ZEXTLOAD [[COPY]](p0) :: (load (s8)) |
| 40 | + ; CHECK: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[ZEXTLOAD]](s16) |
| 41 | + ; CHECK: $w0 = COPY [[ANYEXT]](s32) |
| 42 | + %0:_(p0) = COPY $x0 |
| 43 | + %1:_(s16) = G_CONSTANT i16 255 |
| 44 | + %2:_(s16) = G_LOAD %0 :: (load (s8)) |
| 45 | + %3:_(s16) = G_AND %2, %1 |
| 46 | + %4:_(s32) = G_ANYEXT %3 |
| 47 | + $w0 = COPY %4 |
| 48 | +... |
| 49 | + |
| 50 | +--- |
| 51 | +name: test_anyext_s32 |
| 52 | +tracksRegLiveness: true |
| 53 | +body: | |
| 54 | + bb.0: |
| 55 | + liveins: $x0 |
| 56 | + ; CHECK-LABEL: name: test_anyext_s32 |
| 57 | + ; CHECK: liveins: $x0 |
| 58 | + ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 |
| 59 | + ; CHECK: [[ZEXTLOAD:%[0-9]+]]:_(s32) = G_ZEXTLOAD [[COPY]](p0) :: (load (s8)) |
| 60 | + ; CHECK: $w0 = COPY [[ZEXTLOAD]](s32) |
| 61 | + %0:_(p0) = COPY $x0 |
| 62 | + %1:_(s32) = G_CONSTANT i32 255 |
| 63 | + %2:_(s32) = G_LOAD %0 :: (load (s8)) |
| 64 | + %3:_(s32) = G_AND %2, %1 |
| 65 | + $w0 = COPY %3 |
| 66 | +... |
| 67 | + |
| 68 | +--- |
| 69 | +name: test_load_s32 |
| 70 | +tracksRegLiveness: true |
| 71 | +body: | |
| 72 | + bb.0: |
| 73 | + liveins: $x0 |
| 74 | + ; CHECK-LABEL: name: test_load_s32 |
| 75 | + ; CHECK: liveins: $x0 |
| 76 | + ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 |
| 77 | + ; CHECK: [[ZEXTLOAD:%[0-9]+]]:_(s32) = G_ZEXTLOAD [[COPY]](p0) :: (load (s8), align 4) |
| 78 | + ; CHECK: $w0 = COPY [[ZEXTLOAD]](s32) |
| 79 | + %0:_(p0) = COPY $x0 |
| 80 | + %1:_(s32) = G_CONSTANT i32 255 |
| 81 | + %2:_(s32) = G_LOAD %0 :: (load (s32)) |
| 82 | + %3:_(s32) = G_AND %2, %1 |
| 83 | + $w0 = COPY %3 |
| 84 | +... |
| 85 | + |
| 86 | + |
| 87 | +--- |
| 88 | +name: test_load_mask_size_equals_dst_size |
| 89 | +tracksRegLiveness: true |
| 90 | +body: | |
| 91 | + bb.0: |
| 92 | + liveins: $x0 |
| 93 | +
|
| 94 | + ; The combine should only apply if the mask zeroes actual bits of the dst type |
| 95 | + ; If it doesn't, the mask is redundant and we have other combines to fold it away |
| 96 | +
|
| 97 | + ; CHECK-LABEL: name: test_load_mask_size_equals_dst_size |
| 98 | + ; CHECK: liveins: $x0 |
| 99 | + ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 |
| 100 | + ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 -1 |
| 101 | + ; CHECK: [[LOAD:%[0-9]+]]:_(s32) = G_LOAD [[COPY]](p0) :: (load (s32)) |
| 102 | + ; CHECK: [[AND:%[0-9]+]]:_(s32) = G_AND [[LOAD]], [[C]] |
| 103 | + ; CHECK: $w0 = COPY [[AND]](s32) |
| 104 | + %0:_(p0) = COPY $x0 |
| 105 | + %1:_(s32) = G_CONSTANT i32 4294967295 |
| 106 | + %2:_(s32) = G_LOAD %0 :: (load (s32)) |
| 107 | + %3:_(s32) = G_AND %2, %1 |
| 108 | + $w0 = COPY %3 |
| 109 | +... |
| 110 | + |
| 111 | +--- |
| 112 | +name: test_zext |
| 113 | +tracksRegLiveness: true |
| 114 | +body: | |
| 115 | + bb.0: |
| 116 | + liveins: $x0 |
| 117 | + ; CHECK-LABEL: name: test_zext |
| 118 | + ; CHECK: liveins: $x0 |
| 119 | + ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 |
| 120 | + ; CHECK: [[ZEXTLOAD:%[0-9]+]]:_(s32) = G_ZEXTLOAD [[COPY]](p0) :: (load (s8), align 2) |
| 121 | + ; CHECK: $w0 = COPY [[ZEXTLOAD]](s32) |
| 122 | + %0:_(p0) = COPY $x0 |
| 123 | + %1:_(s32) = G_CONSTANT i32 255 |
| 124 | + %2:_(s32) = G_ZEXTLOAD %0 :: (load (s16)) |
| 125 | + %3:_(s32) = G_AND %2, %1 |
| 126 | + $w0 = COPY %3 |
| 127 | +... |
| 128 | + |
| 129 | +--- |
| 130 | +name: test_zext_mask_larger_memsize |
| 131 | +tracksRegLiveness: true |
| 132 | +body: | |
| 133 | + bb.0: |
| 134 | + liveins: $x0 |
| 135 | +
|
| 136 | + ; The combine should only apply if the mask narrows the memory size. |
| 137 | + ; We have another combine that folds redundant masks |
| 138 | +
|
| 139 | + ; CHECK-LABEL: name: test_zext_mask_larger_memsize |
| 140 | + ; CHECK: liveins: $x0 |
| 141 | + ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 |
| 142 | + ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535 |
| 143 | + ; CHECK: [[ZEXTLOAD:%[0-9]+]]:_(s32) = G_ZEXTLOAD [[COPY]](p0) :: (load (s8)) |
| 144 | + ; CHECK: [[AND:%[0-9]+]]:_(s32) = G_AND [[ZEXTLOAD]], [[C]] |
| 145 | + ; CHECK: $w0 = COPY [[AND]](s32) |
| 146 | + %0:_(p0) = COPY $x0 |
| 147 | + %1:_(s32) = G_CONSTANT i32 65535 |
| 148 | + %2:_(s32) = G_ZEXTLOAD %0 :: (load (s8)) |
| 149 | + %3:_(s32) = G_AND %2, %1 |
| 150 | + $w0 = COPY %3 |
| 151 | +... |
| 152 | + |
| 153 | +--- |
| 154 | +name: test_sext |
| 155 | +tracksRegLiveness: true |
| 156 | +body: | |
| 157 | + bb.0: |
| 158 | + liveins: $x0 |
| 159 | + ; CHECK-LABEL: name: test_sext |
| 160 | + ; CHECK: liveins: $x0 |
| 161 | + ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 |
| 162 | + ; CHECK: [[ZEXTLOAD:%[0-9]+]]:_(s32) = G_ZEXTLOAD [[COPY]](p0) :: (load (s8), align 2) |
| 163 | + ; CHECK: $w0 = COPY [[ZEXTLOAD]](s32) |
| 164 | + %0:_(p0) = COPY $x0 |
| 165 | + %1:_(s32) = G_CONSTANT i32 255 |
| 166 | + %2:_(s32) = G_SEXTLOAD %0 :: (load (s16)) |
| 167 | + %3:_(s32) = G_AND %2, %1 |
| 168 | + $w0 = COPY %3 |
| 169 | +... |
| 170 | + |
| 171 | +--- |
| 172 | +name: test_sext_mask_larger_memsize |
| 173 | +tracksRegLiveness: true |
| 174 | +body: | |
| 175 | + bb.0: |
| 176 | + liveins: $x0 |
| 177 | + ; CHECK-LABEL: name: test_sext_mask_larger_memsize |
| 178 | + ; CHECK: liveins: $x0 |
| 179 | + ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 |
| 180 | + ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535 |
| 181 | + ; CHECK: [[SEXTLOAD:%[0-9]+]]:_(s32) = G_SEXTLOAD [[COPY]](p0) :: (load (s8)) |
| 182 | + ; CHECK: [[AND:%[0-9]+]]:_(s32) = G_AND [[SEXTLOAD]], [[C]] |
| 183 | + ; CHECK: $w0 = COPY [[AND]](s32) |
| 184 | + %0:_(p0) = COPY $x0 |
| 185 | + %1:_(s32) = G_CONSTANT i32 65535 |
| 186 | + %2:_(s32) = G_SEXTLOAD %0 :: (load (s8)) |
| 187 | + %3:_(s32) = G_AND %2, %1 |
| 188 | + $w0 = COPY %3 |
| 189 | +... |
| 190 | + |
| 191 | +--- |
| 192 | +name: test_non_pow2_memtype |
| 193 | +tracksRegLiveness: true |
| 194 | +body: | |
| 195 | + bb.0: |
| 196 | + liveins: $x0 |
| 197 | + ; CHECK-LABEL: name: test_non_pow2_memtype |
| 198 | + ; CHECK: liveins: $x0 |
| 199 | + ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 |
| 200 | + ; CHECK: [[C:%[0-9]+]]:_(s24) = G_CONSTANT i24 7 |
| 201 | + ; CHECK: [[LOAD:%[0-9]+]]:_(s24) = G_LOAD [[COPY]](p0) :: (load (s24), align 4) |
| 202 | + ; CHECK: [[AND:%[0-9]+]]:_(s24) = G_AND [[LOAD]], [[C]] |
| 203 | + ; CHECK: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[AND]](s24) |
| 204 | + ; CHECK: $w0 = COPY [[ANYEXT]](s32) |
| 205 | + %0:_(p0) = COPY $x0 |
| 206 | + %1:_(s24) = G_CONSTANT i24 7 |
| 207 | + %2:_(s24) = G_LOAD %0 :: (load (s24)) |
| 208 | + %3:_(s24) = G_AND %2, %1 |
| 209 | + %4:_(s32) = G_ANYEXT %3 |
| 210 | + $w0 = COPY %4 |
| 211 | +... |
| 212 | + |
| 213 | + |
| 214 | +--- |
| 215 | +name: test_no_mask |
| 216 | +tracksRegLiveness: true |
| 217 | +body: | |
| 218 | + bb.0: |
| 219 | + liveins: $x0 |
| 220 | + ; CHECK-LABEL: name: test_no_mask |
| 221 | + ; CHECK: liveins: $x0 |
| 222 | + ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 |
| 223 | + ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 510 |
| 224 | + ; CHECK: [[LOAD:%[0-9]+]]:_(s32) = G_LOAD [[COPY]](p0) :: (load (s8)) |
| 225 | + ; CHECK: [[AND:%[0-9]+]]:_(s32) = G_AND [[LOAD]], [[C]] |
| 226 | + ; CHECK: $w0 = COPY [[AND]](s32) |
| 227 | + %0:_(p0) = COPY $x0 |
| 228 | + %1:_(s32) = G_CONSTANT i32 510 |
| 229 | + %2:_(s32) = G_LOAD %0 :: (load (s8)) |
| 230 | + %3:_(s32) = G_AND %2, %1 |
| 231 | + $w0 = COPY %3 |
| 232 | +... |
| 233 | + |
| 234 | +--- |
| 235 | +name: test_volatile |
| 236 | +tracksRegLiveness: true |
| 237 | +body: | |
| 238 | + bb.0: |
| 239 | + liveins: $x0 |
| 240 | + ; CHECK-LABEL: name: test_volatile |
| 241 | + ; CHECK: liveins: $x0 |
| 242 | + ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 |
| 243 | + ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 255 |
| 244 | + ; CHECK: [[LOAD:%[0-9]+]]:_(s32) = G_LOAD [[COPY]](p0) :: (volatile load (s8)) |
| 245 | + ; CHECK: [[AND:%[0-9]+]]:_(s32) = G_AND [[LOAD]], [[C]] |
| 246 | + ; CHECK: $w0 = COPY [[AND]](s32) |
| 247 | + %0:_(p0) = COPY $x0 |
| 248 | + %1:_(s32) = G_CONSTANT i32 255 |
| 249 | + %2:_(s32) = G_LOAD %0 :: (volatile load (s8)) |
| 250 | + %3:_(s32) = G_AND %2, %1 |
| 251 | + $w0 = COPY %3 |
| 252 | +... |
0 commit comments