@@ -5805,7 +5805,7 @@ SDValue AArch64TargetLowering::LowerFormalArguments(
58055805 assert(!Res && "Call operand has unhandled type");
58065806 (void)Res;
58075807 }
5808- SmallVector<SDValue, 16> ArgValues;
5808+
58095809 unsigned ExtraArgLocs = 0;
58105810 for (unsigned i = 0, e = Ins.size(); i != e; ++i) {
58115811 CCValAssign &VA = ArgLocs[i - ExtraArgLocs];
@@ -6157,17 +6157,10 @@ void AArch64TargetLowering::saveVarArgRegisters(CCState &CCInfo,
61576157/// appropriate copies out of appropriate physical registers.
61586158SDValue AArch64TargetLowering::LowerCallResult(
61596159 SDValue Chain, SDValue InFlag, CallingConv::ID CallConv, bool isVarArg,
6160- const SmallVectorImpl<ISD::InputArg > &Ins , const SDLoc &DL,
6160+ const SmallVectorImpl<CCValAssign > &RVLocs , const SDLoc &DL,
61616161 SelectionDAG &DAG, SmallVectorImpl<SDValue> &InVals, bool isThisReturn,
61626162 SDValue ThisVal) const {
6163- CCAssignFn *RetCC = CCAssignFnForReturn(CallConv);
6164- // Assign locations to each value returned by this call.
6165- SmallVector<CCValAssign, 16> RVLocs;
61666163 DenseMap<unsigned, SDValue> CopiedRegs;
6167- CCState CCInfo(CallConv, isVarArg, DAG.getMachineFunction(), RVLocs,
6168- *DAG.getContext());
6169- CCInfo.AnalyzeCallResult(Ins, RetCC);
6170-
61716164 // Copy all of the result registers out of their specified physreg.
61726165 for (unsigned i = 0; i != RVLocs.size(); ++i) {
61736166 CCValAssign VA = RVLocs[i];
@@ -6508,17 +6501,39 @@ AArch64TargetLowering::LowerCall(CallLoweringInfo &CLI,
65086501 GuardWithBTI = FuncInfo->branchTargetEnforcement();
65096502 }
65106503
6504+ // Analyze operands of the call, assigning locations to each operand.
6505+ SmallVector<CCValAssign, 16> ArgLocs;
6506+ CCState CCInfo(CallConv, IsVarArg, MF, ArgLocs, *DAG.getContext());
6507+
6508+ if (IsVarArg) {
6509+ unsigned NumArgs = Outs.size();
6510+
6511+ for (unsigned i = 0; i != NumArgs; ++i) {
6512+ if (!Outs[i].IsFixed && Outs[i].VT.isScalableVector())
6513+ report_fatal_error("Passing SVE types to variadic functions is "
6514+ "currently not supported");
6515+ }
6516+ }
6517+
6518+ analyzeCallOperands(*this, Subtarget, CLI, CCInfo);
6519+
6520+ CCAssignFn *RetCC = CCAssignFnForReturn(CallConv);
6521+ // Assign locations to each value returned by this call.
6522+ SmallVector<CCValAssign, 16> RVLocs;
6523+ CCState RetCCInfo(CallConv, IsVarArg, DAG.getMachineFunction(), RVLocs,
6524+ *DAG.getContext());
6525+ RetCCInfo.AnalyzeCallResult(Ins, RetCC);
6526+
65116527 // Check callee args/returns for SVE registers and set calling convention
65126528 // accordingly.
65136529 if (CallConv == CallingConv::C || CallConv == CallingConv::Fast) {
6514- bool CalleeOutSVE = any_of(Outs, [](ISD::OutputArg &Out){
6515- return Out.VT.isScalableVector();
6516- });
6517- bool CalleeInSVE = any_of(Ins, [](ISD::InputArg &In){
6518- return In.VT.isScalableVector();
6519- });
6520-
6521- if (CalleeInSVE || CalleeOutSVE)
6530+ auto HasSVERegLoc = [](CCValAssign &Loc) {
6531+ if (!Loc.isRegLoc())
6532+ return false;
6533+ return AArch64::ZPRRegClass.contains(Loc.getLocReg()) ||
6534+ AArch64::PPRRegClass.contains(Loc.getLocReg());
6535+ };
6536+ if (any_of(RVLocs, HasSVERegLoc) || any_of(ArgLocs, HasSVERegLoc))
65226537 CallConv = CallingConv::AArch64_SVE_VectorCall;
65236538 }
65246539
@@ -6540,22 +6555,6 @@ AArch64TargetLowering::LowerCall(CallLoweringInfo &CLI,
65406555 report_fatal_error("failed to perform tail call elimination on a call "
65416556 "site marked musttail");
65426557
6543- // Analyze operands of the call, assigning locations to each operand.
6544- SmallVector<CCValAssign, 16> ArgLocs;
6545- CCState CCInfo(CallConv, IsVarArg, MF, ArgLocs, *DAG.getContext());
6546-
6547- if (IsVarArg) {
6548- unsigned NumArgs = Outs.size();
6549-
6550- for (unsigned i = 0; i != NumArgs; ++i) {
6551- if (!Outs[i].IsFixed && Outs[i].VT.isScalableVector())
6552- report_fatal_error("Passing SVE types to variadic functions is "
6553- "currently not supported");
6554- }
6555- }
6556-
6557- analyzeCallOperands(*this, Subtarget, CLI, CCInfo);
6558-
65596558 // Get a count of how many bytes are to be pushed on the stack.
65606559 unsigned NumBytes = CCInfo.getNextStackOffset();
65616560
@@ -6961,7 +6960,7 @@ AArch64TargetLowering::LowerCall(CallLoweringInfo &CLI,
69616960
69626961 // Handle result values, copying them out of physregs into vregs that we
69636962 // return.
6964- return LowerCallResult(Chain, InFlag, CallConv, IsVarArg, Ins , DL, DAG,
6963+ return LowerCallResult(Chain, InFlag, CallConv, IsVarArg, RVLocs , DL, DAG,
69656964 InVals, IsThisReturn,
69666965 IsThisReturn ? OutVals[0] : SDValue());
69676966}
0 commit comments