@@ -992,6 +992,16 @@ void AArch64FrameLowering::emitZeroCallUsedRegs(BitVector RegsToZero,
992
992
}
993
993
}
994
994
995
+ static void getLiveRegsForEntryMBB (LivePhysRegs &LiveRegs,
996
+ const MachineBasicBlock &MBB) {
997
+ const MachineFunction *MF = MBB.getParent ();
998
+ LiveRegs.addLiveIns (MBB);
999
+ // Mark callee saved registers as used so we will not choose them.
1000
+ const MCPhysReg *CSRegs = MF->getRegInfo ().getCalleeSavedRegs ();
1001
+ for (unsigned i = 0 ; CSRegs[i]; ++i)
1002
+ LiveRegs.addReg (CSRegs[i]);
1003
+ }
1004
+
995
1005
// Find a scratch register that we can use at the start of the prologue to
996
1006
// re-align the stack pointer. We avoid using callee-save registers since they
997
1007
// may appear to be free when this is called from canUseAsPrologue (during
@@ -1013,12 +1023,7 @@ static unsigned findScratchNonCalleeSaveRegister(MachineBasicBlock *MBB) {
1013
1023
const AArch64Subtarget &Subtarget = MF->getSubtarget <AArch64Subtarget>();
1014
1024
const AArch64RegisterInfo &TRI = *Subtarget.getRegisterInfo ();
1015
1025
LivePhysRegs LiveRegs (TRI);
1016
- LiveRegs.addLiveIns (*MBB);
1017
-
1018
- // Mark callee saved registers as used so we will not choose them.
1019
- const MCPhysReg *CSRegs = MF->getRegInfo ().getCalleeSavedRegs ();
1020
- for (unsigned i = 0 ; CSRegs[i]; ++i)
1021
- LiveRegs.addReg (CSRegs[i]);
1026
+ getLiveRegsForEntryMBB (LiveRegs, *MBB);
1022
1027
1023
1028
// Prefer X9 since it was historically used for the prologue scratch reg.
1024
1029
const MachineRegisterInfo &MRI = MF->getRegInfo ();
@@ -1039,6 +1044,19 @@ bool AArch64FrameLowering::canUseAsPrologue(
1039
1044
const AArch64Subtarget &Subtarget = MF->getSubtarget <AArch64Subtarget>();
1040
1045
const AArch64RegisterInfo *RegInfo = Subtarget.getRegisterInfo ();
1041
1046
const AArch64TargetLowering *TLI = Subtarget.getTargetLowering ();
1047
+ const AArch64FunctionInfo *AFI = MF->getInfo <AArch64FunctionInfo>();
1048
+
1049
+ if (AFI->hasSwiftAsyncContext ()) {
1050
+ const AArch64RegisterInfo &TRI = *Subtarget.getRegisterInfo ();
1051
+ const MachineRegisterInfo &MRI = MF->getRegInfo ();
1052
+ LivePhysRegs LiveRegs (TRI);
1053
+ getLiveRegsForEntryMBB (LiveRegs, MBB);
1054
+ // The StoreSwiftAsyncContext clobbers X16 and X17. Make sure they are
1055
+ // available.
1056
+ if (!LiveRegs.available (MRI, AArch64::X16) ||
1057
+ !LiveRegs.available (MRI, AArch64::X17))
1058
+ return false ;
1059
+ }
1042
1060
1043
1061
// Don't need a scratch register if we're not going to re-align the stack or
1044
1062
// emit stack probes.
0 commit comments