@@ -521,13 +521,6 @@ bool llvm::CC_RISCV_FastCC(unsigned ValNo, MVT ValVT, MVT LocVT,
521
521
const RISCVTargetLowering &TLI = *Subtarget.getTargetLowering ();
522
522
RISCVABI::ABI ABI = Subtarget.getTargetABI ();
523
523
524
- if (LocVT == MVT::i32 || LocVT == MVT::i64 ) {
525
- if (MCRegister Reg = State.AllocateReg (getFastCCArgGPRs (ABI))) {
526
- State.addLoc (CCValAssign::getReg (ValNo, ValVT, Reg, LocVT, LocInfo));
527
- return false ;
528
- }
529
- }
530
-
531
524
if ((LocVT == MVT::f16 && Subtarget.hasStdExtZfhmin ()) ||
532
525
(LocVT == MVT::bf16 && Subtarget.hasStdExtZfbfmin ())) {
533
526
static const MCPhysReg FPR16List[] = {
@@ -565,14 +558,16 @@ bool llvm::CC_RISCV_FastCC(unsigned ValNo, MVT ValVT, MVT LocVT,
565
558
}
566
559
}
567
560
561
+ MVT XLenVT = Subtarget.getXLenVT ();
562
+
568
563
// Check if there is an available GPR before hitting the stack.
569
564
if ((LocVT == MVT::f16 && Subtarget.hasStdExtZhinxmin ()) ||
570
565
(LocVT == MVT::f32 && Subtarget.hasStdExtZfinx ()) ||
571
566
(LocVT == MVT::f64 && Subtarget.is64Bit () &&
572
567
Subtarget.hasStdExtZdinx ())) {
573
568
if (MCRegister Reg = State.AllocateReg (getFastCCArgGPRs (ABI))) {
574
569
if (LocVT.getSizeInBits () != Subtarget.getXLen ()) {
575
- LocVT = Subtarget. getXLenVT () ;
570
+ LocVT = XLenVT ;
576
571
State.addLoc (
577
572
CCValAssign::getCustomReg (ValNo, ValVT, Reg, LocVT, LocInfo));
578
573
return false ;
@@ -582,58 +577,39 @@ bool llvm::CC_RISCV_FastCC(unsigned ValNo, MVT ValVT, MVT LocVT,
582
577
}
583
578
}
584
579
585
- if (LocVT == MVT::f16 || LocVT == MVT::bf16 ) {
586
- int64_t Offset2 = State.AllocateStack (2 , Align (2 ));
587
- State.addLoc (CCValAssign::getMem (ValNo, ValVT, Offset2, LocVT, LocInfo));
588
- return false ;
589
- }
590
-
591
- if (LocVT == MVT::i32 || LocVT == MVT::f32 ) {
592
- int64_t Offset4 = State.AllocateStack (4 , Align (4 ));
593
- State.addLoc (CCValAssign::getMem (ValNo, ValVT, Offset4, LocVT, LocInfo));
594
- return false ;
595
- }
596
-
597
- if (LocVT == MVT::i64 || LocVT == MVT::f64 ) {
598
- int64_t Offset5 = State.AllocateStack (8 , Align (8 ));
599
- State.addLoc (CCValAssign::getMem (ValNo, ValVT, Offset5, LocVT, LocInfo));
600
- return false ;
601
- }
580
+ ArrayRef<MCPhysReg> ArgGPRs = getFastCCArgGPRs (ABI);
602
581
603
582
if (LocVT.isVector ()) {
604
583
if (MCRegister Reg = allocateRVVReg (ValVT, ValNo, State, TLI)) {
605
584
// Fixed-length vectors are located in the corresponding scalable-vector
606
585
// container types.
607
- if (ValVT .isFixedLengthVector ())
586
+ if (LocVT .isFixedLengthVector ())
608
587
LocVT = TLI.getContainerForFixedLengthVector (LocVT);
609
588
State.addLoc (CCValAssign::getReg (ValNo, ValVT, Reg, LocVT, LocInfo));
610
589
return false ;
611
590
}
612
591
613
- // Try and pass the address via a "fast" GPR.
614
- if (MCRegister GPRReg = State.AllocateReg (getFastCCArgGPRs (ABI))) {
592
+ // Pass scalable vectors indirectly. Pass fixed vectors indirectly if we
593
+ // have a free GPR.
594
+ if (LocVT.isScalableVector () ||
595
+ State.getFirstUnallocated (ArgGPRs) != ArgGPRs.size ()) {
615
596
LocInfo = CCValAssign::Indirect;
616
- LocVT = Subtarget.getXLenVT ();
617
- State.addLoc (CCValAssign::getReg (ValNo, ValVT, GPRReg, LocVT, LocInfo));
618
- return false ;
597
+ LocVT = XLenVT;
619
598
}
599
+ }
620
600
621
- // Pass scalable vectors indirectly by storing the pointer on the stack.
622
- if (ValVT.isScalableVector ()) {
623
- LocInfo = CCValAssign::Indirect;
624
- LocVT = Subtarget.getXLenVT ();
625
- unsigned XLen = Subtarget.getXLen ();
626
- int64_t StackOffset = State.AllocateStack (XLen / 8 , Align (XLen / 8 ));
627
- State.addLoc (
628
- CCValAssign::getMem (ValNo, ValVT, StackOffset, LocVT, LocInfo));
601
+ if (LocVT == XLenVT) {
602
+ if (MCRegister Reg = State.AllocateReg (getFastCCArgGPRs (ABI))) {
603
+ State.addLoc (CCValAssign::getReg (ValNo, ValVT, Reg, LocVT, LocInfo));
629
604
return false ;
630
605
}
606
+ }
631
607
632
- // Pass fixed-length vectors on the stack.
633
- auto StackAlign = MaybeAlign (ValVT. getScalarSizeInBits () / 8 ). valueOrOne ();
634
- int64_t StackOffset = State. AllocateStack (ValVT.getStoreSize (), StackAlign );
635
- State.addLoc (
636
- CCValAssign::getMem (ValNo, ValVT, StackOffset , LocVT, LocInfo));
608
+ if (LocVT == XLenVT || LocVT == MVT:: f16 || LocVT == MVT:: bf16 ||
609
+ LocVT == MVT:: f32 || LocVT == MVT:: f64 || LocVT. isFixedLengthVector ()) {
610
+ Align StackAlign = MaybeAlign (ValVT.getScalarSizeInBits () / 8 ). valueOrOne ( );
611
+ int64_t Offset = State.AllocateStack (LocVT. getStoreSize (), StackAlign);
612
+ State. addLoc ( CCValAssign::getMem (ValNo, ValVT, Offset , LocVT, LocInfo));
637
613
return false ;
638
614
}
639
615
0 commit comments