Skip to content

Commit 859b785

Browse files
authored
[RISCV] Restructure CC_RISCV_FastCC to reduce code duplication. NFC (#107671)
Move GPR handling closer to the end so we can share it with the indirect handling for vector. Use a single block for storing any type to the stack.
1 parent 8c05515 commit 859b785

File tree

1 file changed

+20
-44
lines changed

1 file changed

+20
-44
lines changed

llvm/lib/Target/RISCV/RISCVCallingConv.cpp

Lines changed: 20 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -521,13 +521,6 @@ bool llvm::CC_RISCV_FastCC(unsigned ValNo, MVT ValVT, MVT LocVT,
521521
const RISCVTargetLowering &TLI = *Subtarget.getTargetLowering();
522522
RISCVABI::ABI ABI = Subtarget.getTargetABI();
523523

524-
if (LocVT == MVT::i32 || LocVT == MVT::i64) {
525-
if (MCRegister Reg = State.AllocateReg(getFastCCArgGPRs(ABI))) {
526-
State.addLoc(CCValAssign::getReg(ValNo, ValVT, Reg, LocVT, LocInfo));
527-
return false;
528-
}
529-
}
530-
531524
if ((LocVT == MVT::f16 && Subtarget.hasStdExtZfhmin()) ||
532525
(LocVT == MVT::bf16 && Subtarget.hasStdExtZfbfmin())) {
533526
static const MCPhysReg FPR16List[] = {
@@ -565,14 +558,16 @@ bool llvm::CC_RISCV_FastCC(unsigned ValNo, MVT ValVT, MVT LocVT,
565558
}
566559
}
567560

561+
MVT XLenVT = Subtarget.getXLenVT();
562+
568563
// Check if there is an available GPR before hitting the stack.
569564
if ((LocVT == MVT::f16 && Subtarget.hasStdExtZhinxmin()) ||
570565
(LocVT == MVT::f32 && Subtarget.hasStdExtZfinx()) ||
571566
(LocVT == MVT::f64 && Subtarget.is64Bit() &&
572567
Subtarget.hasStdExtZdinx())) {
573568
if (MCRegister Reg = State.AllocateReg(getFastCCArgGPRs(ABI))) {
574569
if (LocVT.getSizeInBits() != Subtarget.getXLen()) {
575-
LocVT = Subtarget.getXLenVT();
570+
LocVT = XLenVT;
576571
State.addLoc(
577572
CCValAssign::getCustomReg(ValNo, ValVT, Reg, LocVT, LocInfo));
578573
return false;
@@ -582,58 +577,39 @@ bool llvm::CC_RISCV_FastCC(unsigned ValNo, MVT ValVT, MVT LocVT,
582577
}
583578
}
584579

585-
if (LocVT == MVT::f16 || LocVT == MVT::bf16) {
586-
int64_t Offset2 = State.AllocateStack(2, Align(2));
587-
State.addLoc(CCValAssign::getMem(ValNo, ValVT, Offset2, LocVT, LocInfo));
588-
return false;
589-
}
590-
591-
if (LocVT == MVT::i32 || LocVT == MVT::f32) {
592-
int64_t Offset4 = State.AllocateStack(4, Align(4));
593-
State.addLoc(CCValAssign::getMem(ValNo, ValVT, Offset4, LocVT, LocInfo));
594-
return false;
595-
}
596-
597-
if (LocVT == MVT::i64 || LocVT == MVT::f64) {
598-
int64_t Offset5 = State.AllocateStack(8, Align(8));
599-
State.addLoc(CCValAssign::getMem(ValNo, ValVT, Offset5, LocVT, LocInfo));
600-
return false;
601-
}
580+
ArrayRef<MCPhysReg> ArgGPRs = getFastCCArgGPRs(ABI);
602581

603582
if (LocVT.isVector()) {
604583
if (MCRegister Reg = allocateRVVReg(ValVT, ValNo, State, TLI)) {
605584
// Fixed-length vectors are located in the corresponding scalable-vector
606585
// container types.
607-
if (ValVT.isFixedLengthVector())
586+
if (LocVT.isFixedLengthVector())
608587
LocVT = TLI.getContainerForFixedLengthVector(LocVT);
609588
State.addLoc(CCValAssign::getReg(ValNo, ValVT, Reg, LocVT, LocInfo));
610589
return false;
611590
}
612591

613-
// Try and pass the address via a "fast" GPR.
614-
if (MCRegister GPRReg = State.AllocateReg(getFastCCArgGPRs(ABI))) {
592+
// Pass scalable vectors indirectly. Pass fixed vectors indirectly if we
593+
// have a free GPR.
594+
if (LocVT.isScalableVector() ||
595+
State.getFirstUnallocated(ArgGPRs) != ArgGPRs.size()) {
615596
LocInfo = CCValAssign::Indirect;
616-
LocVT = Subtarget.getXLenVT();
617-
State.addLoc(CCValAssign::getReg(ValNo, ValVT, GPRReg, LocVT, LocInfo));
618-
return false;
597+
LocVT = XLenVT;
619598
}
599+
}
620600

621-
// Pass scalable vectors indirectly by storing the pointer on the stack.
622-
if (ValVT.isScalableVector()) {
623-
LocInfo = CCValAssign::Indirect;
624-
LocVT = Subtarget.getXLenVT();
625-
unsigned XLen = Subtarget.getXLen();
626-
int64_t StackOffset = State.AllocateStack(XLen / 8, Align(XLen / 8));
627-
State.addLoc(
628-
CCValAssign::getMem(ValNo, ValVT, StackOffset, LocVT, LocInfo));
601+
if (LocVT == XLenVT) {
602+
if (MCRegister Reg = State.AllocateReg(getFastCCArgGPRs(ABI))) {
603+
State.addLoc(CCValAssign::getReg(ValNo, ValVT, Reg, LocVT, LocInfo));
629604
return false;
630605
}
606+
}
631607

632-
// Pass fixed-length vectors on the stack.
633-
auto StackAlign = MaybeAlign(ValVT.getScalarSizeInBits() / 8).valueOrOne();
634-
int64_t StackOffset = State.AllocateStack(ValVT.getStoreSize(), StackAlign);
635-
State.addLoc(
636-
CCValAssign::getMem(ValNo, ValVT, StackOffset, LocVT, LocInfo));
608+
if (LocVT == XLenVT || LocVT == MVT::f16 || LocVT == MVT::bf16 ||
609+
LocVT == MVT::f32 || LocVT == MVT::f64 || LocVT.isFixedLengthVector()) {
610+
Align StackAlign = MaybeAlign(ValVT.getScalarSizeInBits() / 8).valueOrOne();
611+
int64_t Offset = State.AllocateStack(LocVT.getStoreSize(), StackAlign);
612+
State.addLoc(CCValAssign::getMem(ValNo, ValVT, Offset, LocVT, LocInfo));
637613
return false;
638614
}
639615

0 commit comments

Comments
 (0)