@@ -4497,7 +4497,8 @@ void AArch64InstrInfo::copyPhysReg(MachineBasicBlock &MBB,
4497
4497
// Copy a Predicate register by ORRing with itself.
4498
4498
if (AArch64::PPRRegClass.contains (DestReg) &&
4499
4499
AArch64::PPRRegClass.contains (SrcReg)) {
4500
- assert (Subtarget.hasSVEorSME () && " Unexpected SVE register." );
4500
+ assert (Subtarget.isSVEorStreamingSVEAvailable () &&
4501
+ " Unexpected SVE register." );
4501
4502
BuildMI (MBB, I, DL, get (AArch64::ORR_PPzPP), DestReg)
4502
4503
.addReg (SrcReg) // Pg
4503
4504
.addReg (SrcReg)
@@ -4510,8 +4511,6 @@ void AArch64InstrInfo::copyPhysReg(MachineBasicBlock &MBB,
4510
4511
bool DestIsPNR = AArch64::PNRRegClass.contains (DestReg);
4511
4512
bool SrcIsPNR = AArch64::PNRRegClass.contains (SrcReg);
4512
4513
if (DestIsPNR || SrcIsPNR) {
4513
- assert ((Subtarget.hasSVE2p1 () || Subtarget.hasSME2 ()) &&
4514
- " Unexpected predicate-as-counter register." );
4515
4514
auto ToPPR = [](MCRegister R) -> MCRegister {
4516
4515
return (R - AArch64::PN0) + AArch64::P0;
4517
4516
};
@@ -4532,7 +4531,8 @@ void AArch64InstrInfo::copyPhysReg(MachineBasicBlock &MBB,
4532
4531
// Copy a Z register by ORRing with itself.
4533
4532
if (AArch64::ZPRRegClass.contains (DestReg) &&
4534
4533
AArch64::ZPRRegClass.contains (SrcReg)) {
4535
- assert (Subtarget.hasSVEorSME () && " Unexpected SVE register." );
4534
+ assert (Subtarget.isSVEorStreamingSVEAvailable () &&
4535
+ " Unexpected SVE register." );
4536
4536
BuildMI (MBB, I, DL, get (AArch64::ORR_ZZZ), DestReg)
4537
4537
.addReg (SrcReg)
4538
4538
.addReg (SrcReg, getKillRegState (KillSrc));
@@ -4544,7 +4544,8 @@ void AArch64InstrInfo::copyPhysReg(MachineBasicBlock &MBB,
4544
4544
AArch64::ZPR2StridedOrContiguousRegClass.contains (DestReg)) &&
4545
4545
(AArch64::ZPR2RegClass.contains (SrcReg) ||
4546
4546
AArch64::ZPR2StridedOrContiguousRegClass.contains (SrcReg))) {
4547
- assert (Subtarget.hasSVEorSME () && " Unexpected SVE register." );
4547
+ assert (Subtarget.isSVEorStreamingSVEAvailable () &&
4548
+ " Unexpected SVE register." );
4548
4549
static const unsigned Indices[] = {AArch64::zsub0, AArch64::zsub1};
4549
4550
copyPhysRegTuple (MBB, I, DL, DestReg, SrcReg, KillSrc, AArch64::ORR_ZZZ,
4550
4551
Indices);
@@ -4554,7 +4555,8 @@ void AArch64InstrInfo::copyPhysReg(MachineBasicBlock &MBB,
4554
4555
// Copy a Z register triple by copying the individual sub-registers.
4555
4556
if (AArch64::ZPR3RegClass.contains (DestReg) &&
4556
4557
AArch64::ZPR3RegClass.contains (SrcReg)) {
4557
- assert (Subtarget.hasSVEorSME () && " Unexpected SVE register." );
4558
+ assert (Subtarget.isSVEorStreamingSVEAvailable () &&
4559
+ " Unexpected SVE register." );
4558
4560
static const unsigned Indices[] = {AArch64::zsub0, AArch64::zsub1,
4559
4561
AArch64::zsub2};
4560
4562
copyPhysRegTuple (MBB, I, DL, DestReg, SrcReg, KillSrc, AArch64::ORR_ZZZ,
@@ -4567,7 +4569,8 @@ void AArch64InstrInfo::copyPhysReg(MachineBasicBlock &MBB,
4567
4569
AArch64::ZPR4StridedOrContiguousRegClass.contains (DestReg)) &&
4568
4570
(AArch64::ZPR4RegClass.contains (SrcReg) ||
4569
4571
AArch64::ZPR4StridedOrContiguousRegClass.contains (SrcReg))) {
4570
- assert (Subtarget.hasSVEorSME () && " Unexpected SVE register." );
4572
+ assert (Subtarget.isSVEorStreamingSVEAvailable () &&
4573
+ " Unexpected SVE register." );
4571
4574
static const unsigned Indices[] = {AArch64::zsub0, AArch64::zsub1,
4572
4575
AArch64::zsub2, AArch64::zsub3};
4573
4576
copyPhysRegTuple (MBB, I, DL, DestReg, SrcReg, KillSrc, AArch64::ORR_ZZZ,
@@ -4830,14 +4833,12 @@ void AArch64InstrInfo::storeRegToStackSlot(MachineBasicBlock &MBB,
4830
4833
Opc = AArch64::STRBui;
4831
4834
break ;
4832
4835
case 2 : {
4833
- bool IsPNR = AArch64::PNRRegClass.hasSubClassEq (RC);
4834
4836
if (AArch64::FPR16RegClass.hasSubClassEq (RC))
4835
4837
Opc = AArch64::STRHui;
4836
- else if (IsPNR || AArch64::PPRRegClass.hasSubClassEq (RC)) {
4837
- assert (Subtarget.hasSVEorSME () &&
4838
+ else if (AArch64::PNRRegClass.hasSubClassEq (RC) ||
4839
+ AArch64::PPRRegClass.hasSubClassEq (RC)) {
4840
+ assert (Subtarget.isSVEorStreamingSVEAvailable () &&
4838
4841
" Unexpected register store without SVE store instructions" );
4839
- assert ((!IsPNR || Subtarget.hasSVE2p1 () || Subtarget.hasSME2 ()) &&
4840
- " Unexpected register store without SVE2p1 or SME2" );
4841
4842
Opc = AArch64::STR_PXI;
4842
4843
StackID = TargetStackID::ScalableVector;
4843
4844
}
@@ -4886,7 +4887,7 @@ void AArch64InstrInfo::storeRegToStackSlot(MachineBasicBlock &MBB,
4886
4887
AArch64::sube64, AArch64::subo64, FI, MMO);
4887
4888
return ;
4888
4889
} else if (AArch64::ZPRRegClass.hasSubClassEq (RC)) {
4889
- assert (Subtarget.hasSVEorSME () &&
4890
+ assert (Subtarget.isSVEorStreamingSVEAvailable () &&
4890
4891
" Unexpected register store without SVE store instructions" );
4891
4892
Opc = AArch64::STR_ZXI;
4892
4893
StackID = TargetStackID::ScalableVector;
@@ -4910,7 +4911,7 @@ void AArch64InstrInfo::storeRegToStackSlot(MachineBasicBlock &MBB,
4910
4911
Offset = false ;
4911
4912
} else if (AArch64::ZPR2RegClass.hasSubClassEq (RC) ||
4912
4913
AArch64::ZPR2StridedOrContiguousRegClass.hasSubClassEq (RC)) {
4913
- assert (Subtarget.hasSVEorSME () &&
4914
+ assert (Subtarget.isSVEorStreamingSVEAvailable () &&
4914
4915
" Unexpected register store without SVE store instructions" );
4915
4916
Opc = AArch64::STR_ZZXI;
4916
4917
StackID = TargetStackID::ScalableVector;
@@ -4922,7 +4923,7 @@ void AArch64InstrInfo::storeRegToStackSlot(MachineBasicBlock &MBB,
4922
4923
Opc = AArch64::ST1Threev2d;
4923
4924
Offset = false ;
4924
4925
} else if (AArch64::ZPR3RegClass.hasSubClassEq (RC)) {
4925
- assert (Subtarget.hasSVEorSME () &&
4926
+ assert (Subtarget.isSVEorStreamingSVEAvailable () &&
4926
4927
" Unexpected register store without SVE store instructions" );
4927
4928
Opc = AArch64::STR_ZZZXI;
4928
4929
StackID = TargetStackID::ScalableVector;
@@ -4935,7 +4936,7 @@ void AArch64InstrInfo::storeRegToStackSlot(MachineBasicBlock &MBB,
4935
4936
Offset = false ;
4936
4937
} else if (AArch64::ZPR4RegClass.hasSubClassEq (RC) ||
4937
4938
AArch64::ZPR4StridedOrContiguousRegClass.hasSubClassEq (RC)) {
4938
- assert (Subtarget.hasSVEorSME () &&
4939
+ assert (Subtarget.isSVEorStreamingSVEAvailable () &&
4939
4940
" Unexpected register store without SVE store instructions" );
4940
4941
Opc = AArch64::STR_ZZZZXI;
4941
4942
StackID = TargetStackID::ScalableVector;
@@ -5008,10 +5009,8 @@ void AArch64InstrInfo::loadRegFromStackSlot(MachineBasicBlock &MBB,
5008
5009
if (AArch64::FPR16RegClass.hasSubClassEq (RC))
5009
5010
Opc = AArch64::LDRHui;
5010
5011
else if (IsPNR || AArch64::PPRRegClass.hasSubClassEq (RC)) {
5011
- assert (Subtarget.hasSVEorSME () &&
5012
+ assert (Subtarget.isSVEorStreamingSVEAvailable () &&
5012
5013
" Unexpected register load without SVE load instructions" );
5013
- assert ((!IsPNR || Subtarget.hasSVE2p1 () || Subtarget.hasSME2 ()) &&
5014
- " Unexpected register load without SVE2p1 or SME2" );
5015
5014
if (IsPNR)
5016
5015
PNRReg = DestReg;
5017
5016
Opc = AArch64::LDR_PXI;
@@ -5062,7 +5061,7 @@ void AArch64InstrInfo::loadRegFromStackSlot(MachineBasicBlock &MBB,
5062
5061
AArch64::subo64, FI, MMO);
5063
5062
return ;
5064
5063
} else if (AArch64::ZPRRegClass.hasSubClassEq (RC)) {
5065
- assert (Subtarget.hasSVEorSME () &&
5064
+ assert (Subtarget.isSVEorStreamingSVEAvailable () &&
5066
5065
" Unexpected register load without SVE load instructions" );
5067
5066
Opc = AArch64::LDR_ZXI;
5068
5067
StackID = TargetStackID::ScalableVector;
@@ -5086,7 +5085,7 @@ void AArch64InstrInfo::loadRegFromStackSlot(MachineBasicBlock &MBB,
5086
5085
Offset = false ;
5087
5086
} else if (AArch64::ZPR2RegClass.hasSubClassEq (RC) ||
5088
5087
AArch64::ZPR2StridedOrContiguousRegClass.hasSubClassEq (RC)) {
5089
- assert (Subtarget.hasSVEorSME () &&
5088
+ assert (Subtarget.isSVEorStreamingSVEAvailable () &&
5090
5089
" Unexpected register load without SVE load instructions" );
5091
5090
Opc = AArch64::LDR_ZZXI;
5092
5091
StackID = TargetStackID::ScalableVector;
@@ -5098,7 +5097,7 @@ void AArch64InstrInfo::loadRegFromStackSlot(MachineBasicBlock &MBB,
5098
5097
Opc = AArch64::LD1Threev2d;
5099
5098
Offset = false ;
5100
5099
} else if (AArch64::ZPR3RegClass.hasSubClassEq (RC)) {
5101
- assert (Subtarget.hasSVEorSME () &&
5100
+ assert (Subtarget.isSVEorStreamingSVEAvailable () &&
5102
5101
" Unexpected register load without SVE load instructions" );
5103
5102
Opc = AArch64::LDR_ZZZXI;
5104
5103
StackID = TargetStackID::ScalableVector;
@@ -5111,7 +5110,7 @@ void AArch64InstrInfo::loadRegFromStackSlot(MachineBasicBlock &MBB,
5111
5110
Offset = false ;
5112
5111
} else if (AArch64::ZPR4RegClass.hasSubClassEq (RC) ||
5113
5112
AArch64::ZPR4StridedOrContiguousRegClass.hasSubClassEq (RC)) {
5114
- assert (Subtarget.hasSVEorSME () &&
5113
+ assert (Subtarget.isSVEorStreamingSVEAvailable () &&
5115
5114
" Unexpected register load without SVE load instructions" );
5116
5115
Opc = AArch64::LDR_ZZZZXI;
5117
5116
StackID = TargetStackID::ScalableVector;
0 commit comments