@@ -1464,9 +1464,15 @@ bool RISCVTargetLowering::getTgtMemIntrinsic(IntrinsicInfo &Info,
1464
1464
auto &DL = I.getModule()->getDataLayout();
1465
1465
1466
1466
auto SetRVVLoadStoreInfo = [&](unsigned PtrOp, bool IsStore,
1467
- bool IsUnitStrided) {
1467
+ bool IsUnitStrided, bool UsePtrVal = false ) {
1468
1468
Info.opc = IsStore ? ISD::INTRINSIC_VOID : ISD::INTRINSIC_W_CHAIN;
1469
- Info.ptrVal = I.getArgOperand(PtrOp);
1469
+ // We can't use ptrVal if the intrinsic can access memory before the
1470
+ // pointer. This means we can't use it for strided or indexed intrinsics.
1471
+ if (UsePtrVal)
1472
+ Info.ptrVal = I.getArgOperand(PtrOp);
1473
+ else
1474
+ Info.fallbackAddressSpace =
1475
+ I.getArgOperand(PtrOp)->getType()->getPointerAddressSpace();
1470
1476
Type *MemTy;
1471
1477
if (IsStore) {
1472
1478
// Store value is the first operand.
@@ -1526,7 +1532,7 @@ bool RISCVTargetLowering::getTgtMemIntrinsic(IntrinsicInfo &Info,
1526
1532
case Intrinsic::riscv_seg7_load:
1527
1533
case Intrinsic::riscv_seg8_load:
1528
1534
return SetRVVLoadStoreInfo(/*PtrOp*/ 0, /*IsStore*/ false,
1529
- /*IsUnitStrided*/ false);
1535
+ /*IsUnitStrided*/ false, /*UsePtrVal*/ true );
1530
1536
case Intrinsic::riscv_seg2_store:
1531
1537
case Intrinsic::riscv_seg3_store:
1532
1538
case Intrinsic::riscv_seg4_store:
@@ -1537,19 +1543,21 @@ bool RISCVTargetLowering::getTgtMemIntrinsic(IntrinsicInfo &Info,
1537
1543
// Operands are (vec, ..., vec, ptr, vl)
1538
1544
return SetRVVLoadStoreInfo(/*PtrOp*/ I.arg_size() - 2,
1539
1545
/*IsStore*/ true,
1540
- /*IsUnitStrided*/ false);
1546
+ /*IsUnitStrided*/ false, /*UsePtrVal*/ true );
1541
1547
case Intrinsic::riscv_vle:
1542
1548
case Intrinsic::riscv_vle_mask:
1543
1549
case Intrinsic::riscv_vleff:
1544
1550
case Intrinsic::riscv_vleff_mask:
1545
1551
return SetRVVLoadStoreInfo(/*PtrOp*/ 1,
1546
1552
/*IsStore*/ false,
1547
- /*IsUnitStrided*/ true);
1553
+ /*IsUnitStrided*/ true,
1554
+ /*UsePtrVal*/ true);
1548
1555
case Intrinsic::riscv_vse:
1549
1556
case Intrinsic::riscv_vse_mask:
1550
1557
return SetRVVLoadStoreInfo(/*PtrOp*/ 1,
1551
1558
/*IsStore*/ true,
1552
- /*IsUnitStrided*/ true);
1559
+ /*IsUnitStrided*/ true,
1560
+ /*UsePtrVal*/ true);
1553
1561
case Intrinsic::riscv_vlse:
1554
1562
case Intrinsic::riscv_vlse_mask:
1555
1563
case Intrinsic::riscv_vloxei:
@@ -1584,7 +1592,7 @@ bool RISCVTargetLowering::getTgtMemIntrinsic(IntrinsicInfo &Info,
1584
1592
case Intrinsic::riscv_vlseg8ff:
1585
1593
return SetRVVLoadStoreInfo(/*PtrOp*/ I.arg_size() - 2,
1586
1594
/*IsStore*/ false,
1587
- /*IsUnitStrided*/ false);
1595
+ /*IsUnitStrided*/ false, /*UsePtrVal*/ true );
1588
1596
case Intrinsic::riscv_vlseg2_mask:
1589
1597
case Intrinsic::riscv_vlseg3_mask:
1590
1598
case Intrinsic::riscv_vlseg4_mask:
@@ -1601,7 +1609,7 @@ bool RISCVTargetLowering::getTgtMemIntrinsic(IntrinsicInfo &Info,
1601
1609
case Intrinsic::riscv_vlseg8ff_mask:
1602
1610
return SetRVVLoadStoreInfo(/*PtrOp*/ I.arg_size() - 4,
1603
1611
/*IsStore*/ false,
1604
- /*IsUnitStrided*/ false);
1612
+ /*IsUnitStrided*/ false, /*UsePtrVal*/ true );
1605
1613
case Intrinsic::riscv_vlsseg2:
1606
1614
case Intrinsic::riscv_vlsseg3:
1607
1615
case Intrinsic::riscv_vlsseg4:
0 commit comments