@@ -265,13 +265,21 @@ RISCVLegalizerInfo::RISCVLegalizerInfo(const RISCVSubtarget &ST)
265
265
{s32, p0, s16, 16 },
266
266
{s32, p0, s32, 32 },
267
267
{p0, p0, sXLen , XLen},
268
- {nxv1s8, p0, nxv1s8, 8 },
269
268
{nxv2s8, p0, nxv2s8, 8 },
270
269
{nxv4s8, p0, nxv4s8, 8 },
271
270
{nxv8s8, p0, nxv8s8, 8 },
272
- {nxv16s8, p0, nxv16s8, 8 }})
273
- .widenScalarToNextPow2 (0 , /* MinSize = */ 8 )
274
- .lowerIfMemSizeNotByteSizePow2 ();
271
+ {nxv16s8, p0, nxv16s8, 8 },
272
+ {nxv32s8, p0, nxv32s8, 8 },
273
+ {nxv64s8, p0, nxv64s8, 8 },
274
+ {nxv2s16, p0, nxv2s16, 16 },
275
+ {nxv4s16, p0, nxv4s16, 16 },
276
+ {nxv8s16, p0, nxv8s16, 16 },
277
+ {nxv16s16, p0, nxv16s16, 16 },
278
+ {nxv32s16, p0, nxv32s16, 16 },
279
+ {nxv2s32, p0, nxv2s32, 32 },
280
+ {nxv4s32, p0, nxv4s32, 32 },
281
+ {nxv8s32, p0, nxv8s32, 32 },
282
+ {nxv16s32, p0, nxv16s32, 32 }});
275
283
276
284
auto &ExtLoadActions =
277
285
getActionDefinitionsBuilder ({G_SEXTLOAD, G_ZEXTLOAD})
@@ -286,6 +294,18 @@ RISCVLegalizerInfo::RISCVLegalizerInfo(const RISCVSubtarget &ST)
286
294
} else if (ST.hasStdExtD ()) {
287
295
LoadStoreActions.legalForTypesWithMemDesc ({{s64, p0, s64, 64 }});
288
296
}
297
+ if (ST.getELen () == 64 )
298
+ LoadStoreActions.legalForTypesWithMemDesc ({{nxv1s8, p0, nxv1s8, 8 },
299
+ {nxv1s16, p0, nxv1s16, 16 },
300
+ {nxv1s32, p0, nxv1s32, 32 }});
301
+ if (ST.hasVInstructionsI64 ())
302
+ LoadStoreActions.legalForTypesWithMemDesc ({{nxv1s64, p0, nxv1s64, 64 },
303
+ {nxv2s64, p0, nxv2s64, 64 },
304
+ {nxv4s64, p0, nxv4s64, 64 },
305
+ {nxv8s64, p0, nxv8s64, 64 }});
306
+ LoadStoreActions.widenScalarToNextPow2 (0 , /* MinSize = */ 8 )
307
+ .lowerIfMemSizeNotByteSizePow2 ();
308
+
289
309
LoadStoreActions.clampScalar (0 , s32, sXLen ).lower ();
290
310
ExtLoadActions.widenScalarToNextPow2 (0 ).clampScalar (0 , s32, sXLen ).lower ();
291
311
0 commit comments