| Eugene Zelenko | c5eb8e2 | 2017-02-01 22:56:06 +0000 | [diff] [blame] | 1 | //===--- AArch64CallLowering.cpp - Call lowering --------------------------===// |
| Quentin Colombet | ba2a016 | 2016-02-16 19:26:02 +0000 | [diff] [blame] | 2 | // |
| Chandler Carruth | 2946cd7 | 2019-01-19 08:50:56 +0000 | [diff] [blame] | 3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
| 4 | // See https://llvm.org/LICENSE.txt for license information. |
| 5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
| Quentin Colombet | ba2a016 | 2016-02-16 19:26:02 +0000 | [diff] [blame] | 6 | // |
| 7 | //===----------------------------------------------------------------------===// |
| 8 | /// |
| 9 | /// \file |
| 10 | /// This file implements the lowering of LLVM calls to machine code calls for |
| 11 | /// GlobalISel. |
| 12 | /// |
| 13 | //===----------------------------------------------------------------------===// |
| 14 | |
| 15 | #include "AArch64CallLowering.h" |
| 16 | #include "AArch64ISelLowering.h" |
| Tim Northover | e9600d8 | 2017-02-08 17:57:27 +0000 | [diff] [blame] | 17 | #include "AArch64MachineFunctionInfo.h" |
| 18 | #include "AArch64Subtarget.h" |
| Eugene Zelenko | c5eb8e2 | 2017-02-01 22:56:06 +0000 | [diff] [blame] | 19 | #include "llvm/ADT/ArrayRef.h" |
| 20 | #include "llvm/ADT/SmallVector.h" |
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 21 | #include "llvm/CodeGen/Analysis.h" |
| Eugene Zelenko | c5eb8e2 | 2017-02-01 22:56:06 +0000 | [diff] [blame] | 22 | #include "llvm/CodeGen/CallingConvLower.h" |
| Quentin Colombet | f38015e | 2016-12-22 21:56:31 +0000 | [diff] [blame] | 23 | #include "llvm/CodeGen/GlobalISel/MachineIRBuilder.h" |
| Quentin Colombet | f38015e | 2016-12-22 21:56:31 +0000 | [diff] [blame] | 24 | #include "llvm/CodeGen/GlobalISel/Utils.h" |
| Eugene Zelenko | c5eb8e2 | 2017-02-01 22:56:06 +0000 | [diff] [blame] | 25 | #include "llvm/CodeGen/LowLevelType.h" |
| 26 | #include "llvm/CodeGen/MachineBasicBlock.h" |
| 27 | #include "llvm/CodeGen/MachineFrameInfo.h" |
| 28 | #include "llvm/CodeGen/MachineFunction.h" |
| Quentin Colombet | ba2a016 | 2016-02-16 19:26:02 +0000 | [diff] [blame] | 29 | #include "llvm/CodeGen/MachineInstrBuilder.h" |
| Eugene Zelenko | c5eb8e2 | 2017-02-01 22:56:06 +0000 | [diff] [blame] | 30 | #include "llvm/CodeGen/MachineMemOperand.h" |
| 31 | #include "llvm/CodeGen/MachineOperand.h" |
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 32 | #include "llvm/CodeGen/MachineRegisterInfo.h" |
| David Blaikie | b3bde2e | 2017-11-17 01:07:10 +0000 | [diff] [blame] | 33 | #include "llvm/CodeGen/TargetRegisterInfo.h" |
| 34 | #include "llvm/CodeGen/TargetSubtargetInfo.h" |
| Craig Topper | 2fa1436 | 2018-03-29 17:21:10 +0000 | [diff] [blame] | 35 | #include "llvm/CodeGen/ValueTypes.h" |
| Eugene Zelenko | c5eb8e2 | 2017-02-01 22:56:06 +0000 | [diff] [blame] | 36 | #include "llvm/IR/Argument.h" |
| 37 | #include "llvm/IR/Attributes.h" |
| 38 | #include "llvm/IR/Function.h" |
| 39 | #include "llvm/IR/Type.h" |
| 40 | #include "llvm/IR/Value.h" |
| David Blaikie | 13e77db | 2018-03-23 23:58:25 +0000 | [diff] [blame] | 41 | #include "llvm/Support/MachineValueType.h" |
| Eugene Zelenko | c5eb8e2 | 2017-02-01 22:56:06 +0000 | [diff] [blame] | 42 | #include <algorithm> |
| 43 | #include <cassert> |
| 44 | #include <cstdint> |
| 45 | #include <iterator> |
| 46 | |
| Amara Emerson | 2b523f8 | 2019-04-09 21:22:33 +0000 | [diff] [blame] | 47 | #define DEBUG_TYPE "aarch64-call-lowering" |
| 48 | |
| Quentin Colombet | ba2a016 | 2016-02-16 19:26:02 +0000 | [diff] [blame] | 49 | using namespace llvm; |
| 50 | |
| 51 | AArch64CallLowering::AArch64CallLowering(const AArch64TargetLowering &TLI) |
| Eugene Zelenko | c5eb8e2 | 2017-02-01 22:56:06 +0000 | [diff] [blame] | 52 | : CallLowering(&TLI) {} |
| Quentin Colombet | ba2a016 | 2016-02-16 19:26:02 +0000 | [diff] [blame] | 53 | |
| Benjamin Kramer | 49a49fe | 2017-08-20 13:03:48 +0000 | [diff] [blame] | 54 | namespace { |
| Diana Picus | f11f042 | 2016-12-05 10:40:33 +0000 | [diff] [blame] | 55 | struct IncomingArgHandler : public CallLowering::ValueHandler { |
| Tim Northover | d943354 | 2017-01-17 22:30:10 +0000 | [diff] [blame] | 56 | IncomingArgHandler(MachineIRBuilder &MIRBuilder, MachineRegisterInfo &MRI, |
| 57 | CCAssignFn *AssignFn) |
| Tim Northover | e9600d8 | 2017-02-08 17:57:27 +0000 | [diff] [blame] | 58 | : ValueHandler(MIRBuilder, MRI, AssignFn), StackUsed(0) {} |
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 59 | |
| Matt Arsenault | faeaedf | 2019-06-24 16:16:12 +0000 | [diff] [blame] | 60 | Register getStackAddress(uint64_t Size, int64_t Offset, |
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 61 | MachinePointerInfo &MPO) override { |
| 62 | auto &MFI = MIRBuilder.getMF().getFrameInfo(); |
| 63 | int FI = MFI.CreateFixedObject(Size, Offset, true); |
| 64 | MPO = MachinePointerInfo::getFixedStack(MIRBuilder.getMF(), FI); |
| Matt Arsenault | faeaedf | 2019-06-24 16:16:12 +0000 | [diff] [blame] | 65 | Register AddrReg = MRI.createGenericVirtualRegister(LLT::pointer(0, 64)); |
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 66 | MIRBuilder.buildFrameIndex(AddrReg, FI); |
| Tim Northover | e9600d8 | 2017-02-08 17:57:27 +0000 | [diff] [blame] | 67 | StackUsed = std::max(StackUsed, Size + Offset); |
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 68 | return AddrReg; |
| 69 | } |
| 70 | |
| Matt Arsenault | faeaedf | 2019-06-24 16:16:12 +0000 | [diff] [blame] | 71 | void assignValueToReg(Register ValVReg, Register PhysReg, |
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 72 | CCValAssign &VA) override { |
| 73 | markPhysRegUsed(PhysReg); |
| Aditya Nandakumar | c3bfc81 | 2017-10-09 20:07:43 +0000 | [diff] [blame] | 74 | switch (VA.getLocInfo()) { |
| 75 | default: |
| 76 | MIRBuilder.buildCopy(ValVReg, PhysReg); |
| 77 | break; |
| 78 | case CCValAssign::LocInfo::SExt: |
| 79 | case CCValAssign::LocInfo::ZExt: |
| 80 | case CCValAssign::LocInfo::AExt: { |
| 81 | auto Copy = MIRBuilder.buildCopy(LLT{VA.getLocVT()}, PhysReg); |
| 82 | MIRBuilder.buildTrunc(ValVReg, Copy); |
| 83 | break; |
| 84 | } |
| 85 | } |
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 86 | } |
| 87 | |
| Matt Arsenault | faeaedf | 2019-06-24 16:16:12 +0000 | [diff] [blame] | 88 | void assignValueToAddress(Register ValVReg, Register Addr, uint64_t Size, |
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 89 | MachinePointerInfo &MPO, CCValAssign &VA) override { |
| Matt Arsenault | 2a64598 | 2019-01-31 01:38:47 +0000 | [diff] [blame] | 90 | // FIXME: Get alignment |
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 91 | auto MMO = MIRBuilder.getMF().getMachineMemOperand( |
| 92 | MPO, MachineMemOperand::MOLoad | MachineMemOperand::MOInvariant, Size, |
| Matt Arsenault | 2a64598 | 2019-01-31 01:38:47 +0000 | [diff] [blame] | 93 | 1); |
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 94 | MIRBuilder.buildLoad(ValVReg, Addr, *MMO); |
| 95 | } |
| 96 | |
| 97 | /// How the physical register gets marked varies between formal |
| 98 | /// parameters (it's a basic-block live-in), and a call instruction |
| 99 | /// (it's an implicit-def of the BL). |
| 100 | virtual void markPhysRegUsed(unsigned PhysReg) = 0; |
| Tim Northover | e9600d8 | 2017-02-08 17:57:27 +0000 | [diff] [blame] | 101 | |
| Amara Emerson | bc1172d | 2019-08-05 23:05:28 +0000 | [diff] [blame] | 102 | bool isIncomingArgumentHandler() const override { return true; } |
| Amara Emerson | 2b523f8 | 2019-04-09 21:22:33 +0000 | [diff] [blame] | 103 | |
| Tim Northover | e9600d8 | 2017-02-08 17:57:27 +0000 | [diff] [blame] | 104 | uint64_t StackUsed; |
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 105 | }; |
| 106 | |
| 107 | struct FormalArgHandler : public IncomingArgHandler { |
| Tim Northover | d943354 | 2017-01-17 22:30:10 +0000 | [diff] [blame] | 108 | FormalArgHandler(MachineIRBuilder &MIRBuilder, MachineRegisterInfo &MRI, |
| 109 | CCAssignFn *AssignFn) |
| 110 | : IncomingArgHandler(MIRBuilder, MRI, AssignFn) {} |
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 111 | |
| 112 | void markPhysRegUsed(unsigned PhysReg) override { |
| Tim Northover | 522fb7e | 2019-08-02 14:09:49 +0000 | [diff] [blame] | 113 | MIRBuilder.getMRI()->addLiveIn(PhysReg); |
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 114 | MIRBuilder.getMBB().addLiveIn(PhysReg); |
| 115 | } |
| 116 | }; |
| 117 | |
| 118 | struct CallReturnHandler : public IncomingArgHandler { |
| 119 | CallReturnHandler(MachineIRBuilder &MIRBuilder, MachineRegisterInfo &MRI, |
| Tim Northover | d943354 | 2017-01-17 22:30:10 +0000 | [diff] [blame] | 120 | MachineInstrBuilder MIB, CCAssignFn *AssignFn) |
| 121 | : IncomingArgHandler(MIRBuilder, MRI, AssignFn), MIB(MIB) {} |
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 122 | |
| 123 | void markPhysRegUsed(unsigned PhysReg) override { |
| 124 | MIB.addDef(PhysReg, RegState::Implicit); |
| 125 | } |
| 126 | |
| 127 | MachineInstrBuilder MIB; |
| 128 | }; |
| 129 | |
| Diana Picus | f11f042 | 2016-12-05 10:40:33 +0000 | [diff] [blame] | 130 | struct OutgoingArgHandler : public CallLowering::ValueHandler { |
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 131 | OutgoingArgHandler(MachineIRBuilder &MIRBuilder, MachineRegisterInfo &MRI, |
| Tim Northover | d943354 | 2017-01-17 22:30:10 +0000 | [diff] [blame] | 132 | MachineInstrBuilder MIB, CCAssignFn *AssignFn, |
| Jessica Paquette | a42070a | 2019-09-12 22:10:36 +0000 | [diff] [blame^] | 133 | CCAssignFn *AssignFnVarArg, bool IsTailCall = false) |
| Tim Northover | d943354 | 2017-01-17 22:30:10 +0000 | [diff] [blame] | 134 | : ValueHandler(MIRBuilder, MRI, AssignFn), MIB(MIB), |
| Jessica Paquette | a42070a | 2019-09-12 22:10:36 +0000 | [diff] [blame^] | 135 | AssignFnVarArg(AssignFnVarArg), IsTailCall(IsTailCall), StackSize(0) {} |
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 136 | |
| Matt Arsenault | faeaedf | 2019-06-24 16:16:12 +0000 | [diff] [blame] | 137 | Register getStackAddress(uint64_t Size, int64_t Offset, |
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 138 | MachinePointerInfo &MPO) override { |
| Jessica Paquette | a42070a | 2019-09-12 22:10:36 +0000 | [diff] [blame^] | 139 | MachineFunction &MF = MIRBuilder.getMF(); |
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 140 | LLT p0 = LLT::pointer(0, 64); |
| 141 | LLT s64 = LLT::scalar(64); |
| Jessica Paquette | a42070a | 2019-09-12 22:10:36 +0000 | [diff] [blame^] | 142 | |
| 143 | if (IsTailCall) { |
| 144 | // TODO: For -tailcallopt tail calls, Offset will need FPDiff like in |
| 145 | // ISelLowering. |
| 146 | int FI = MF.getFrameInfo().CreateFixedObject(Size, Offset, true); |
| 147 | Register FIReg = MRI.createGenericVirtualRegister(p0); |
| 148 | MIRBuilder.buildFrameIndex(FIReg, FI); |
| 149 | MPO = MachinePointerInfo::getFixedStack(MF, FI); |
| 150 | return FIReg; |
| 151 | } |
| 152 | |
| Matt Arsenault | faeaedf | 2019-06-24 16:16:12 +0000 | [diff] [blame] | 153 | Register SPReg = MRI.createGenericVirtualRegister(p0); |
| 154 | MIRBuilder.buildCopy(SPReg, Register(AArch64::SP)); |
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 155 | |
| Matt Arsenault | faeaedf | 2019-06-24 16:16:12 +0000 | [diff] [blame] | 156 | Register OffsetReg = MRI.createGenericVirtualRegister(s64); |
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 157 | MIRBuilder.buildConstant(OffsetReg, Offset); |
| 158 | |
| Matt Arsenault | faeaedf | 2019-06-24 16:16:12 +0000 | [diff] [blame] | 159 | Register AddrReg = MRI.createGenericVirtualRegister(p0); |
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 160 | MIRBuilder.buildGEP(AddrReg, SPReg, OffsetReg); |
| 161 | |
| Jessica Paquette | a42070a | 2019-09-12 22:10:36 +0000 | [diff] [blame^] | 162 | MPO = MachinePointerInfo::getStack(MF, Offset); |
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 163 | return AddrReg; |
| 164 | } |
| 165 | |
| Matt Arsenault | faeaedf | 2019-06-24 16:16:12 +0000 | [diff] [blame] | 166 | void assignValueToReg(Register ValVReg, Register PhysReg, |
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 167 | CCValAssign &VA) override { |
| 168 | MIB.addUse(PhysReg, RegState::Implicit); |
| Matt Arsenault | faeaedf | 2019-06-24 16:16:12 +0000 | [diff] [blame] | 169 | Register ExtReg = extendRegister(ValVReg, VA); |
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 170 | MIRBuilder.buildCopy(PhysReg, ExtReg); |
| 171 | } |
| 172 | |
| Matt Arsenault | faeaedf | 2019-06-24 16:16:12 +0000 | [diff] [blame] | 173 | void assignValueToAddress(Register ValVReg, Register Addr, uint64_t Size, |
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 174 | MachinePointerInfo &MPO, CCValAssign &VA) override { |
| Amara Emerson | d912ffa | 2018-07-03 15:59:26 +0000 | [diff] [blame] | 175 | if (VA.getLocInfo() == CCValAssign::LocInfo::AExt) { |
| Amara Emerson | 846f243 | 2018-07-02 16:39:09 +0000 | [diff] [blame] | 176 | Size = VA.getLocVT().getSizeInBits() / 8; |
| Amara Emerson | d912ffa | 2018-07-03 15:59:26 +0000 | [diff] [blame] | 177 | ValVReg = MIRBuilder.buildAnyExt(LLT::scalar(Size * 8), ValVReg) |
| 178 | ->getOperand(0) |
| 179 | .getReg(); |
| 180 | } |
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 181 | auto MMO = MIRBuilder.getMF().getMachineMemOperand( |
| Matt Arsenault | 2a64598 | 2019-01-31 01:38:47 +0000 | [diff] [blame] | 182 | MPO, MachineMemOperand::MOStore, Size, 1); |
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 183 | MIRBuilder.buildStore(ValVReg, Addr, *MMO); |
| 184 | } |
| 185 | |
| Eugene Zelenko | c5eb8e2 | 2017-02-01 22:56:06 +0000 | [diff] [blame] | 186 | bool assignArg(unsigned ValNo, MVT ValVT, MVT LocVT, |
| 187 | CCValAssign::LocInfo LocInfo, |
| 188 | const CallLowering::ArgInfo &Info, |
| Amara Emerson | fbaf425 | 2019-09-03 21:42:28 +0000 | [diff] [blame] | 189 | ISD::ArgFlagsTy Flags, |
| Eugene Zelenko | c5eb8e2 | 2017-02-01 22:56:06 +0000 | [diff] [blame] | 190 | CCState &State) override { |
| Tim Northover | e80d6d1 | 2017-03-02 15:34:18 +0000 | [diff] [blame] | 191 | bool Res; |
| Tim Northover | d943354 | 2017-01-17 22:30:10 +0000 | [diff] [blame] | 192 | if (Info.IsFixed) |
| Amara Emerson | fbaf425 | 2019-09-03 21:42:28 +0000 | [diff] [blame] | 193 | Res = AssignFn(ValNo, ValVT, LocVT, LocInfo, Flags, State); |
| Tim Northover | e80d6d1 | 2017-03-02 15:34:18 +0000 | [diff] [blame] | 194 | else |
| Amara Emerson | fbaf425 | 2019-09-03 21:42:28 +0000 | [diff] [blame] | 195 | Res = AssignFnVarArg(ValNo, ValVT, LocVT, LocInfo, Flags, State); |
| Tim Northover | e80d6d1 | 2017-03-02 15:34:18 +0000 | [diff] [blame] | 196 | |
| 197 | StackSize = State.getNextStackOffset(); |
| 198 | return Res; |
| Tim Northover | d943354 | 2017-01-17 22:30:10 +0000 | [diff] [blame] | 199 | } |
| 200 | |
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 201 | MachineInstrBuilder MIB; |
| Tim Northover | d943354 | 2017-01-17 22:30:10 +0000 | [diff] [blame] | 202 | CCAssignFn *AssignFnVarArg; |
| Jessica Paquette | a42070a | 2019-09-12 22:10:36 +0000 | [diff] [blame^] | 203 | bool IsTailCall; |
| Tim Northover | 509091f | 2017-01-17 22:43:34 +0000 | [diff] [blame] | 204 | uint64_t StackSize; |
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 205 | }; |
| Benjamin Kramer | 49a49fe | 2017-08-20 13:03:48 +0000 | [diff] [blame] | 206 | } // namespace |
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 207 | |
| Benjamin Kramer | 061f4a5 | 2017-01-13 14:39:03 +0000 | [diff] [blame] | 208 | void AArch64CallLowering::splitToValueTypes( |
| 209 | const ArgInfo &OrigArg, SmallVectorImpl<ArgInfo> &SplitArgs, |
| Diana Picus | 253b53b | 2019-06-27 09:24:30 +0000 | [diff] [blame] | 210 | const DataLayout &DL, MachineRegisterInfo &MRI, CallingConv::ID CallConv) const { |
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 211 | const AArch64TargetLowering &TLI = *getTLI<AArch64TargetLowering>(); |
| Tim Northover | 9a46718 | 2016-09-21 12:57:45 +0000 | [diff] [blame] | 212 | LLVMContext &Ctx = OrigArg.Ty->getContext(); |
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 213 | |
| Amara Emerson | 0d6a26d | 2018-05-16 10:32:02 +0000 | [diff] [blame] | 214 | if (OrigArg.Ty->isVoidTy()) |
| 215 | return; |
| 216 | |
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 217 | SmallVector<EVT, 4> SplitVTs; |
| 218 | SmallVector<uint64_t, 4> Offsets; |
| Tim Northover | 9a46718 | 2016-09-21 12:57:45 +0000 | [diff] [blame] | 219 | ComputeValueVTs(TLI, DL, OrigArg.Ty, SplitVTs, &Offsets, 0); |
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 220 | |
| 221 | if (SplitVTs.size() == 1) { |
| Tim Northover | d1fd383 | 2016-12-05 21:25:33 +0000 | [diff] [blame] | 222 | // No splitting to do, but we want to replace the original type (e.g. [1 x |
| 223 | // double] -> double). |
| Diana Picus | 69ce1c13 | 2019-06-27 08:50:53 +0000 | [diff] [blame] | 224 | SplitArgs.emplace_back(OrigArg.Regs[0], SplitVTs[0].getTypeForEVT(Ctx), |
| Amara Emerson | fbaf425 | 2019-09-03 21:42:28 +0000 | [diff] [blame] | 225 | OrigArg.Flags[0], OrigArg.IsFixed); |
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 226 | return; |
| 227 | } |
| 228 | |
| Diana Picus | 253b53b | 2019-06-27 09:24:30 +0000 | [diff] [blame] | 229 | // Create one ArgInfo for each virtual register in the original ArgInfo. |
| 230 | assert(OrigArg.Regs.size() == SplitVTs.size() && "Regs / types mismatch"); |
| Diana Picus | c3dbe23 | 2019-06-27 08:54:17 +0000 | [diff] [blame] | 231 | |
| Tim Northover | ef1fc5a | 2017-08-21 21:56:11 +0000 | [diff] [blame] | 232 | bool NeedsRegBlock = TLI.functionArgumentNeedsConsecutiveRegisters( |
| 233 | OrigArg.Ty, CallConv, false); |
| Diana Picus | 253b53b | 2019-06-27 09:24:30 +0000 | [diff] [blame] | 234 | for (unsigned i = 0, e = SplitVTs.size(); i < e; ++i) { |
| 235 | Type *SplitTy = SplitVTs[i].getTypeForEVT(Ctx); |
| Amara Emerson | fbaf425 | 2019-09-03 21:42:28 +0000 | [diff] [blame] | 236 | SplitArgs.emplace_back(OrigArg.Regs[i], SplitTy, OrigArg.Flags[0], |
| Diana Picus | 253b53b | 2019-06-27 09:24:30 +0000 | [diff] [blame] | 237 | OrigArg.IsFixed); |
| Tim Northover | ef1fc5a | 2017-08-21 21:56:11 +0000 | [diff] [blame] | 238 | if (NeedsRegBlock) |
| Amara Emerson | fbaf425 | 2019-09-03 21:42:28 +0000 | [diff] [blame] | 239 | SplitArgs.back().Flags[0].setInConsecutiveRegs(); |
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 240 | } |
| 241 | |
| Amara Emerson | fbaf425 | 2019-09-03 21:42:28 +0000 | [diff] [blame] | 242 | SplitArgs.back().Flags[0].setInConsecutiveRegsLast(); |
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 243 | } |
| 244 | |
| 245 | bool AArch64CallLowering::lowerReturn(MachineIRBuilder &MIRBuilder, |
| Alexander Ivchenko | 49168f6 | 2018-08-02 08:33:31 +0000 | [diff] [blame] | 246 | const Value *Val, |
| Matt Arsenault | e3a676e | 2019-06-24 15:50:29 +0000 | [diff] [blame] | 247 | ArrayRef<Register> VRegs, |
| 248 | Register SwiftErrorVReg) const { |
| Tim Northover | 05cc485 | 2016-12-07 21:05:38 +0000 | [diff] [blame] | 249 | auto MIB = MIRBuilder.buildInstrNoInsert(AArch64::RET_ReallyLR); |
| Alexander Ivchenko | 49168f6 | 2018-08-02 08:33:31 +0000 | [diff] [blame] | 250 | assert(((Val && !VRegs.empty()) || (!Val && VRegs.empty())) && |
| 251 | "Return value without a vreg"); |
| 252 | |
| Tim Northover | 05cc485 | 2016-12-07 21:05:38 +0000 | [diff] [blame] | 253 | bool Success = true; |
| Alexander Ivchenko | 49168f6 | 2018-08-02 08:33:31 +0000 | [diff] [blame] | 254 | if (!VRegs.empty()) { |
| 255 | MachineFunction &MF = MIRBuilder.getMF(); |
| 256 | const Function &F = MF.getFunction(); |
| 257 | |
| Amara Emerson | 5a3bb68 | 2018-06-01 13:20:32 +0000 | [diff] [blame] | 258 | MachineRegisterInfo &MRI = MF.getRegInfo(); |
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 259 | const AArch64TargetLowering &TLI = *getTLI<AArch64TargetLowering>(); |
| 260 | CCAssignFn *AssignFn = TLI.CCAssignFnForReturn(F.getCallingConv()); |
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 261 | auto &DL = F.getParent()->getDataLayout(); |
| Alexander Ivchenko | 49168f6 | 2018-08-02 08:33:31 +0000 | [diff] [blame] | 262 | LLVMContext &Ctx = Val->getType()->getContext(); |
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 263 | |
| Alexander Ivchenko | 49168f6 | 2018-08-02 08:33:31 +0000 | [diff] [blame] | 264 | SmallVector<EVT, 4> SplitEVTs; |
| 265 | ComputeValueVTs(TLI, DL, Val->getType(), SplitEVTs); |
| 266 | assert(VRegs.size() == SplitEVTs.size() && |
| 267 | "For each split Type there should be exactly one VReg."); |
| Tim Northover | 9a46718 | 2016-09-21 12:57:45 +0000 | [diff] [blame] | 268 | |
| 269 | SmallVector<ArgInfo, 8> SplitArgs; |
| Amara Emerson | 2b523f8 | 2019-04-09 21:22:33 +0000 | [diff] [blame] | 270 | CallingConv::ID CC = F.getCallingConv(); |
| 271 | |
| Alexander Ivchenko | 49168f6 | 2018-08-02 08:33:31 +0000 | [diff] [blame] | 272 | for (unsigned i = 0; i < SplitEVTs.size(); ++i) { |
| Amara Emerson | 2b523f8 | 2019-04-09 21:22:33 +0000 | [diff] [blame] | 273 | if (TLI.getNumRegistersForCallingConv(Ctx, CC, SplitEVTs[i]) > 1) { |
| 274 | LLVM_DEBUG(dbgs() << "Can't handle extended arg types which need split"); |
| 275 | return false; |
| Alexander Ivchenko | 49168f6 | 2018-08-02 08:33:31 +0000 | [diff] [blame] | 276 | } |
| 277 | |
| Matt Arsenault | faeaedf | 2019-06-24 16:16:12 +0000 | [diff] [blame] | 278 | Register CurVReg = VRegs[i]; |
| Alexander Ivchenko | 49168f6 | 2018-08-02 08:33:31 +0000 | [diff] [blame] | 279 | ArgInfo CurArgInfo = ArgInfo{CurVReg, SplitEVTs[i].getTypeForEVT(Ctx)}; |
| 280 | setArgFlags(CurArgInfo, AttributeList::ReturnIndex, DL, F); |
| Amara Emerson | 2b523f8 | 2019-04-09 21:22:33 +0000 | [diff] [blame] | 281 | |
| 282 | // i1 is a special case because SDAG i1 true is naturally zero extended |
| 283 | // when widened using ANYEXT. We need to do it explicitly here. |
| 284 | if (MRI.getType(CurVReg).getSizeInBits() == 1) { |
| 285 | CurVReg = MIRBuilder.buildZExt(LLT::scalar(8), CurVReg).getReg(0); |
| 286 | } else { |
| 287 | // Some types will need extending as specified by the CC. |
| 288 | MVT NewVT = TLI.getRegisterTypeForCallingConv(Ctx, CC, SplitEVTs[i]); |
| 289 | if (EVT(NewVT) != SplitEVTs[i]) { |
| 290 | unsigned ExtendOp = TargetOpcode::G_ANYEXT; |
| 291 | if (F.getAttributes().hasAttribute(AttributeList::ReturnIndex, |
| 292 | Attribute::SExt)) |
| 293 | ExtendOp = TargetOpcode::G_SEXT; |
| 294 | else if (F.getAttributes().hasAttribute(AttributeList::ReturnIndex, |
| 295 | Attribute::ZExt)) |
| 296 | ExtendOp = TargetOpcode::G_ZEXT; |
| 297 | |
| 298 | LLT NewLLT(NewVT); |
| 299 | LLT OldLLT(MVT::getVT(CurArgInfo.Ty)); |
| 300 | CurArgInfo.Ty = EVT(NewVT).getTypeForEVT(Ctx); |
| 301 | // Instead of an extend, we might have a vector type which needs |
| Amara Emerson | 3d1128c | 2019-05-06 19:41:01 +0000 | [diff] [blame] | 302 | // padding with more elements, e.g. <2 x half> -> <4 x half>. |
| 303 | if (NewVT.isVector()) { |
| 304 | if (OldLLT.isVector()) { |
| 305 | if (NewLLT.getNumElements() > OldLLT.getNumElements()) { |
| 306 | // We don't handle VA types which are not exactly twice the |
| 307 | // size, but can easily be done in future. |
| 308 | if (NewLLT.getNumElements() != OldLLT.getNumElements() * 2) { |
| 309 | LLVM_DEBUG(dbgs() << "Outgoing vector ret has too many elts"); |
| 310 | return false; |
| 311 | } |
| 312 | auto Undef = MIRBuilder.buildUndef({OldLLT}); |
| 313 | CurVReg = |
| 314 | MIRBuilder.buildMerge({NewLLT}, {CurVReg, Undef.getReg(0)}) |
| 315 | .getReg(0); |
| 316 | } else { |
| 317 | // Just do a vector extend. |
| 318 | CurVReg = MIRBuilder.buildInstr(ExtendOp, {NewLLT}, {CurVReg}) |
| 319 | .getReg(0); |
| 320 | } |
| 321 | } else if (NewLLT.getNumElements() == 2) { |
| 322 | // We need to pad a <1 x S> type to <2 x S>. Since we don't have |
| 323 | // <1 x S> vector types in GISel we use a build_vector instead |
| 324 | // of a vector merge/concat. |
| 325 | auto Undef = MIRBuilder.buildUndef({OldLLT}); |
| 326 | CurVReg = |
| 327 | MIRBuilder |
| 328 | .buildBuildVector({NewLLT}, {CurVReg, Undef.getReg(0)}) |
| 329 | .getReg(0); |
| 330 | } else { |
| 331 | LLVM_DEBUG(dbgs() << "Could not handle ret ty"); |
| Amara Emerson | 2b523f8 | 2019-04-09 21:22:33 +0000 | [diff] [blame] | 332 | return false; |
| 333 | } |
| Amara Emerson | 2b523f8 | 2019-04-09 21:22:33 +0000 | [diff] [blame] | 334 | } else { |
| Amara Emerson | 3d1128c | 2019-05-06 19:41:01 +0000 | [diff] [blame] | 335 | // A scalar extend. |
| Amara Emerson | 2b523f8 | 2019-04-09 21:22:33 +0000 | [diff] [blame] | 336 | CurVReg = |
| 337 | MIRBuilder.buildInstr(ExtendOp, {NewLLT}, {CurVReg}).getReg(0); |
| 338 | } |
| 339 | } |
| 340 | } |
| Diana Picus | 69ce1c13 | 2019-06-27 08:50:53 +0000 | [diff] [blame] | 341 | if (CurVReg != CurArgInfo.Regs[0]) { |
| 342 | CurArgInfo.Regs[0] = CurVReg; |
| Amara Emerson | 2b523f8 | 2019-04-09 21:22:33 +0000 | [diff] [blame] | 343 | // Reset the arg flags after modifying CurVReg. |
| 344 | setArgFlags(CurArgInfo, AttributeList::ReturnIndex, DL, F); |
| 345 | } |
| Diana Picus | 253b53b | 2019-06-27 09:24:30 +0000 | [diff] [blame] | 346 | splitToValueTypes(CurArgInfo, SplitArgs, DL, MRI, CC); |
| Alexander Ivchenko | 49168f6 | 2018-08-02 08:33:31 +0000 | [diff] [blame] | 347 | } |
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 348 | |
| Tim Northover | d943354 | 2017-01-17 22:30:10 +0000 | [diff] [blame] | 349 | OutgoingArgHandler Handler(MIRBuilder, MRI, MIB, AssignFn, AssignFn); |
| 350 | Success = handleAssignments(MIRBuilder, SplitArgs, Handler); |
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 351 | } |
| Tim Northover | 05cc485 | 2016-12-07 21:05:38 +0000 | [diff] [blame] | 352 | |
| Tim Northover | 3b2157a | 2019-05-24 08:40:13 +0000 | [diff] [blame] | 353 | if (SwiftErrorVReg) { |
| 354 | MIB.addUse(AArch64::X21, RegState::Implicit); |
| 355 | MIRBuilder.buildCopy(AArch64::X21, SwiftErrorVReg); |
| 356 | } |
| 357 | |
| Tim Northover | 05cc485 | 2016-12-07 21:05:38 +0000 | [diff] [blame] | 358 | MIRBuilder.insertInstr(MIB); |
| 359 | return Success; |
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 360 | } |
| 361 | |
| Diana Picus | c3dbe23 | 2019-06-27 08:54:17 +0000 | [diff] [blame] | 362 | bool AArch64CallLowering::lowerFormalArguments( |
| 363 | MachineIRBuilder &MIRBuilder, const Function &F, |
| 364 | ArrayRef<ArrayRef<Register>> VRegs) const { |
| Tim Northover | 406024a | 2016-08-10 21:44:01 +0000 | [diff] [blame] | 365 | MachineFunction &MF = MIRBuilder.getMF(); |
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 366 | MachineBasicBlock &MBB = MIRBuilder.getMBB(); |
| 367 | MachineRegisterInfo &MRI = MF.getRegInfo(); |
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 368 | auto &DL = F.getParent()->getDataLayout(); |
| Tim Northover | 406024a | 2016-08-10 21:44:01 +0000 | [diff] [blame] | 369 | |
| Tim Northover | 9a46718 | 2016-09-21 12:57:45 +0000 | [diff] [blame] | 370 | SmallVector<ArgInfo, 8> SplitArgs; |
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 371 | unsigned i = 0; |
| Reid Kleckner | 45707d4 | 2017-03-16 22:59:15 +0000 | [diff] [blame] | 372 | for (auto &Arg : F.args()) { |
| Amara Emerson | d78d65c | 2017-11-30 20:06:02 +0000 | [diff] [blame] | 373 | if (DL.getTypeStoreSize(Arg.getType()) == 0) |
| 374 | continue; |
| Diana Picus | c3dbe23 | 2019-06-27 08:54:17 +0000 | [diff] [blame] | 375 | |
| Tim Northover | 9a46718 | 2016-09-21 12:57:45 +0000 | [diff] [blame] | 376 | ArgInfo OrigArg{VRegs[i], Arg.getType()}; |
| Reid Kleckner | a0b45f4 | 2017-05-03 18:17:31 +0000 | [diff] [blame] | 377 | setArgFlags(OrigArg, i + AttributeList::FirstArgIndex, DL, F); |
| Tim Northover | c2c545b | 2017-03-06 23:50:28 +0000 | [diff] [blame] | 378 | |
| Diana Picus | 253b53b | 2019-06-27 09:24:30 +0000 | [diff] [blame] | 379 | splitToValueTypes(OrigArg, SplitArgs, DL, MRI, F.getCallingConv()); |
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 380 | ++i; |
| 381 | } |
| 382 | |
| 383 | if (!MBB.empty()) |
| 384 | MIRBuilder.setInstr(*MBB.begin()); |
| Tim Northover | 406024a | 2016-08-10 21:44:01 +0000 | [diff] [blame] | 385 | |
| 386 | const AArch64TargetLowering &TLI = *getTLI<AArch64TargetLowering>(); |
| 387 | CCAssignFn *AssignFn = |
| 388 | TLI.CCAssignFnForCall(F.getCallingConv(), /*IsVarArg=*/false); |
| 389 | |
| Tim Northover | d943354 | 2017-01-17 22:30:10 +0000 | [diff] [blame] | 390 | FormalArgHandler Handler(MIRBuilder, MRI, AssignFn); |
| 391 | if (!handleAssignments(MIRBuilder, SplitArgs, Handler)) |
| Tim Northover | 9a46718 | 2016-09-21 12:57:45 +0000 | [diff] [blame] | 392 | return false; |
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 393 | |
| Jessica Paquette | a42070a | 2019-09-12 22:10:36 +0000 | [diff] [blame^] | 394 | AArch64FunctionInfo *FuncInfo = MF.getInfo<AArch64FunctionInfo>(); |
| 395 | uint64_t StackOffset = Handler.StackUsed; |
| Tim Northover | e9600d8 | 2017-02-08 17:57:27 +0000 | [diff] [blame] | 396 | if (F.isVarArg()) { |
| Tim Northover | f1c2892 | 2019-09-12 10:22:23 +0000 | [diff] [blame] | 397 | auto &Subtarget = MF.getSubtarget<AArch64Subtarget>(); |
| 398 | if (!Subtarget.isTargetDarwin()) { |
| 399 | // FIXME: we need to reimplement saveVarArgsRegisters from |
| Tim Northover | e9600d8 | 2017-02-08 17:57:27 +0000 | [diff] [blame] | 400 | // AArch64ISelLowering. |
| 401 | return false; |
| 402 | } |
| 403 | |
| Tim Northover | f1c2892 | 2019-09-12 10:22:23 +0000 | [diff] [blame] | 404 | // We currently pass all varargs at 8-byte alignment, or 4 in ILP32. |
| Jessica Paquette | a42070a | 2019-09-12 22:10:36 +0000 | [diff] [blame^] | 405 | StackOffset = alignTo(Handler.StackUsed, Subtarget.isTargetILP32() ? 4 : 8); |
| Tim Northover | e9600d8 | 2017-02-08 17:57:27 +0000 | [diff] [blame] | 406 | |
| 407 | auto &MFI = MIRBuilder.getMF().getFrameInfo(); |
| Tim Northover | e9600d8 | 2017-02-08 17:57:27 +0000 | [diff] [blame] | 408 | FuncInfo->setVarArgsStackIndex(MFI.CreateFixedObject(4, StackOffset, true)); |
| 409 | } |
| 410 | |
| Jessica Paquette | a42070a | 2019-09-12 22:10:36 +0000 | [diff] [blame^] | 411 | // TODO: Port checks for stack to restore for -tailcallopt from ISelLowering. |
| 412 | // We need to keep track of the size of function stacks for tail call |
| 413 | // optimization. When we tail call, we need to check if the callee's arguments |
| 414 | // will fit on the caller's stack. So, whenever we lower formal arguments, |
| 415 | // we should keep track of this information, since we might lower a tail call |
| 416 | // in this function later. |
| 417 | FuncInfo->setBytesInStackArgArea(StackOffset); |
| 418 | |
| Tri Vo | 6c47c62 | 2018-09-22 22:17:50 +0000 | [diff] [blame] | 419 | auto &Subtarget = MF.getSubtarget<AArch64Subtarget>(); |
| 420 | if (Subtarget.hasCustomCallingConv()) |
| 421 | Subtarget.getRegisterInfo()->UpdateCustomCalleeSavedRegs(MF); |
| 422 | |
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 423 | // Move back to the end of the basic block. |
| 424 | MIRBuilder.setMBB(MBB); |
| 425 | |
| Tim Northover | 9a46718 | 2016-09-21 12:57:45 +0000 | [diff] [blame] | 426 | return true; |
| Tim Northover | 406024a | 2016-08-10 21:44:01 +0000 | [diff] [blame] | 427 | } |
| 428 | |
| Jessica Paquette | 20e8667 | 2019-09-05 20:18:34 +0000 | [diff] [blame] | 429 | /// Return true if the calling convention is one that we can guarantee TCO for. |
| 430 | static bool canGuaranteeTCO(CallingConv::ID CC) { |
| 431 | return CC == CallingConv::Fast; |
| 432 | } |
| 433 | |
| 434 | /// Return true if we might ever do TCO for calls with this calling convention. |
| 435 | static bool mayTailCallThisCC(CallingConv::ID CC) { |
| 436 | switch (CC) { |
| 437 | case CallingConv::C: |
| 438 | case CallingConv::PreserveMost: |
| 439 | case CallingConv::Swift: |
| 440 | return true; |
| 441 | default: |
| 442 | return canGuaranteeTCO(CC); |
| 443 | } |
| 444 | } |
| 445 | |
| Jessica Paquette | 2af5b19 | 2019-09-10 23:25:12 +0000 | [diff] [blame] | 446 | bool AArch64CallLowering::doCallerAndCalleePassArgsTheSameWay( |
| 447 | CallLoweringInfo &Info, MachineFunction &MF, |
| 448 | SmallVectorImpl<ArgInfo> &InArgs) const { |
| 449 | const Function &CallerF = MF.getFunction(); |
| 450 | CallingConv::ID CalleeCC = Info.CallConv; |
| 451 | CallingConv::ID CallerCC = CallerF.getCallingConv(); |
| 452 | |
| 453 | // If the calling conventions match, then everything must be the same. |
| 454 | if (CalleeCC == CallerCC) |
| 455 | return true; |
| 456 | |
| 457 | // Check if the caller and callee will handle arguments in the same way. |
| 458 | const AArch64TargetLowering &TLI = *getTLI<AArch64TargetLowering>(); |
| 459 | CCAssignFn *CalleeAssignFn = TLI.CCAssignFnForCall(CalleeCC, Info.IsVarArg); |
| 460 | CCAssignFn *CallerAssignFn = |
| 461 | TLI.CCAssignFnForCall(CallerCC, CallerF.isVarArg()); |
| 462 | |
| 463 | if (!resultsCompatible(Info, MF, InArgs, *CalleeAssignFn, *CallerAssignFn)) |
| 464 | return false; |
| 465 | |
| 466 | // Make sure that the caller and callee preserve all of the same registers. |
| 467 | auto TRI = MF.getSubtarget<AArch64Subtarget>().getRegisterInfo(); |
| 468 | const uint32_t *CallerPreserved = TRI->getCallPreservedMask(MF, CallerCC); |
| 469 | const uint32_t *CalleePreserved = TRI->getCallPreservedMask(MF, CalleeCC); |
| 470 | if (MF.getSubtarget<AArch64Subtarget>().hasCustomCallingConv()) { |
| 471 | TRI->UpdateCustomCallPreservedMask(MF, &CallerPreserved); |
| 472 | TRI->UpdateCustomCallPreservedMask(MF, &CalleePreserved); |
| 473 | } |
| 474 | |
| 475 | return TRI->regmaskSubsetEqual(CallerPreserved, CalleePreserved); |
| 476 | } |
| 477 | |
| Jessica Paquette | a42070a | 2019-09-12 22:10:36 +0000 | [diff] [blame^] | 478 | bool AArch64CallLowering::areCalleeOutgoingArgsTailCallable( |
| 479 | CallLoweringInfo &Info, MachineFunction &MF, |
| 480 | SmallVectorImpl<ArgInfo> &OutArgs) const { |
| 481 | // If there are no outgoing arguments, then we are done. |
| 482 | if (OutArgs.empty()) |
| 483 | return true; |
| 484 | |
| 485 | const Function &CallerF = MF.getFunction(); |
| 486 | CallingConv::ID CalleeCC = Info.CallConv; |
| 487 | CallingConv::ID CallerCC = CallerF.getCallingConv(); |
| 488 | const AArch64TargetLowering &TLI = *getTLI<AArch64TargetLowering>(); |
| 489 | |
| 490 | // We have outgoing arguments. Make sure that we can tail call with them. |
| 491 | SmallVector<CCValAssign, 16> OutLocs; |
| 492 | CCState OutInfo(CalleeCC, false, MF, OutLocs, CallerF.getContext()); |
| 493 | |
| 494 | if (!analyzeArgInfo(OutInfo, OutArgs, |
| 495 | *TLI.CCAssignFnForCall(CalleeCC, Info.IsVarArg))) { |
| 496 | LLVM_DEBUG(dbgs() << "... Could not analyze call operands.\n"); |
| 497 | return false; |
| 498 | } |
| 499 | |
| 500 | // Make sure that they can fit on the caller's stack. |
| 501 | const AArch64FunctionInfo *FuncInfo = MF.getInfo<AArch64FunctionInfo>(); |
| 502 | if (OutInfo.getNextStackOffset() > FuncInfo->getBytesInStackArgArea()) { |
| 503 | LLVM_DEBUG(dbgs() << "... Cannot fit call operands on caller's stack.\n"); |
| 504 | return false; |
| 505 | } |
| 506 | |
| 507 | // Verify that the parameters in callee-saved registers match. |
| 508 | // TODO: Port this over to CallLowering as general code once swiftself is |
| 509 | // supported. |
| 510 | auto TRI = MF.getSubtarget<AArch64Subtarget>().getRegisterInfo(); |
| 511 | const uint32_t *CallerPreservedMask = TRI->getCallPreservedMask(MF, CallerCC); |
| 512 | |
| 513 | for (auto &ArgLoc : OutLocs) { |
| 514 | // If it's not a register, it's fine. |
| 515 | if (!ArgLoc.isRegLoc()) |
| 516 | continue; |
| 517 | |
| 518 | Register Reg = ArgLoc.getLocReg(); |
| 519 | |
| 520 | // Only look at callee-saved registers. |
| 521 | if (MachineOperand::clobbersPhysReg(CallerPreservedMask, Reg)) |
| 522 | continue; |
| 523 | |
| 524 | // TODO: Port the remainder of this check from TargetLowering to support |
| 525 | // tail calling swiftself. |
| 526 | LLVM_DEBUG( |
| 527 | dbgs() |
| 528 | << "... Cannot handle callee-saved registers in outgoing args yet.\n"); |
| 529 | return false; |
| 530 | } |
| 531 | |
| 532 | return true; |
| 533 | } |
| 534 | |
| Jessica Paquette | 20e8667 | 2019-09-05 20:18:34 +0000 | [diff] [blame] | 535 | bool AArch64CallLowering::isEligibleForTailCallOptimization( |
| Jessica Paquette | 2af5b19 | 2019-09-10 23:25:12 +0000 | [diff] [blame] | 536 | MachineIRBuilder &MIRBuilder, CallLoweringInfo &Info, |
| Jessica Paquette | a42070a | 2019-09-12 22:10:36 +0000 | [diff] [blame^] | 537 | SmallVectorImpl<ArgInfo> &InArgs, |
| 538 | SmallVectorImpl<ArgInfo> &OutArgs) const { |
| Jessica Paquette | 20e8667 | 2019-09-05 20:18:34 +0000 | [diff] [blame] | 539 | CallingConv::ID CalleeCC = Info.CallConv; |
| 540 | MachineFunction &MF = MIRBuilder.getMF(); |
| 541 | const Function &CallerF = MF.getFunction(); |
| Jessica Paquette | 20e8667 | 2019-09-05 20:18:34 +0000 | [diff] [blame] | 542 | |
| 543 | LLVM_DEBUG(dbgs() << "Attempting to lower call as tail call\n"); |
| 544 | |
| 545 | if (Info.SwiftErrorVReg) { |
| 546 | // TODO: We should handle this. |
| 547 | // Note that this is also handled by the check for no outgoing arguments. |
| 548 | // Proactively disabling this though, because the swifterror handling in |
| 549 | // lowerCall inserts a COPY *after* the location of the call. |
| 550 | LLVM_DEBUG(dbgs() << "... Cannot handle tail calls with swifterror yet.\n"); |
| 551 | return false; |
| 552 | } |
| 553 | |
| Jessica Paquette | 20e8667 | 2019-09-05 20:18:34 +0000 | [diff] [blame] | 554 | if (!mayTailCallThisCC(CalleeCC)) { |
| 555 | LLVM_DEBUG(dbgs() << "... Calling convention cannot be tail called.\n"); |
| 556 | return false; |
| 557 | } |
| 558 | |
| 559 | if (Info.IsVarArg) { |
| 560 | LLVM_DEBUG(dbgs() << "... Tail calling varargs not supported yet.\n"); |
| 561 | return false; |
| 562 | } |
| 563 | |
| 564 | // Byval parameters hand the function a pointer directly into the stack area |
| 565 | // we want to reuse during a tail call. Working around this *is* possible (see |
| 566 | // X86). |
| 567 | // |
| 568 | // FIXME: In AArch64ISelLowering, this isn't worked around. Can/should we try |
| 569 | // it? |
| 570 | // |
| 571 | // On Windows, "inreg" attributes signify non-aggregate indirect returns. |
| 572 | // In this case, it is necessary to save/restore X0 in the callee. Tail |
| 573 | // call opt interferes with this. So we disable tail call opt when the |
| 574 | // caller has an argument with "inreg" attribute. |
| 575 | // |
| 576 | // FIXME: Check whether the callee also has an "inreg" argument. |
| Jessica Paquette | e297ad1 | 2019-09-11 23:44:16 +0000 | [diff] [blame] | 577 | // |
| 578 | // When the caller has a swifterror argument, we don't want to tail call |
| 579 | // because would have to move into the swifterror register before the |
| 580 | // tail call. |
| Jessica Paquette | 20e8667 | 2019-09-05 20:18:34 +0000 | [diff] [blame] | 581 | if (any_of(CallerF.args(), [](const Argument &A) { |
| Jessica Paquette | e297ad1 | 2019-09-11 23:44:16 +0000 | [diff] [blame] | 582 | return A.hasByValAttr() || A.hasInRegAttr() || A.hasSwiftErrorAttr(); |
| Jessica Paquette | 20e8667 | 2019-09-05 20:18:34 +0000 | [diff] [blame] | 583 | })) { |
| Jessica Paquette | e297ad1 | 2019-09-11 23:44:16 +0000 | [diff] [blame] | 584 | LLVM_DEBUG(dbgs() << "... Cannot tail call from callers with byval, " |
| 585 | "inreg, or swifterror arguments\n"); |
| Jessica Paquette | 20e8667 | 2019-09-05 20:18:34 +0000 | [diff] [blame] | 586 | return false; |
| 587 | } |
| 588 | |
| 589 | // Externally-defined functions with weak linkage should not be |
| 590 | // tail-called on AArch64 when the OS does not support dynamic |
| 591 | // pre-emption of symbols, as the AAELF spec requires normal calls |
| 592 | // to undefined weak functions to be replaced with a NOP or jump to the |
| 593 | // next instruction. The behaviour of branch instructions in this |
| 594 | // situation (as used for tail calls) is implementation-defined, so we |
| 595 | // cannot rely on the linker replacing the tail call with a return. |
| 596 | if (Info.Callee.isGlobal()) { |
| 597 | const GlobalValue *GV = Info.Callee.getGlobal(); |
| 598 | const Triple &TT = MF.getTarget().getTargetTriple(); |
| 599 | if (GV->hasExternalWeakLinkage() && |
| 600 | (!TT.isOSWindows() || TT.isOSBinFormatELF() || |
| 601 | TT.isOSBinFormatMachO())) { |
| 602 | LLVM_DEBUG(dbgs() << "... Cannot tail call externally-defined function " |
| 603 | "with weak linkage for this OS.\n"); |
| 604 | return false; |
| 605 | } |
| 606 | } |
| 607 | |
| 608 | // If we have -tailcallopt and matching CCs, at this point, we could return |
| 609 | // true. However, we don't have full tail call support yet. So, continue |
| 610 | // checking. We want to emit a sibling call. |
| 611 | |
| 612 | // I want anyone implementing a new calling convention to think long and hard |
| 613 | // about this assert. |
| 614 | assert((!Info.IsVarArg || CalleeCC == CallingConv::C) && |
| 615 | "Unexpected variadic calling convention"); |
| 616 | |
| Jessica Paquette | a42070a | 2019-09-12 22:10:36 +0000 | [diff] [blame^] | 617 | // Verify that the incoming and outgoing arguments from the callee are |
| 618 | // safe to tail call. |
| Jessica Paquette | 2af5b19 | 2019-09-10 23:25:12 +0000 | [diff] [blame] | 619 | if (!doCallerAndCalleePassArgsTheSameWay(Info, MF, InArgs)) { |
| Jessica Paquette | 20e8667 | 2019-09-05 20:18:34 +0000 | [diff] [blame] | 620 | LLVM_DEBUG( |
| 621 | dbgs() |
| Jessica Paquette | 2af5b19 | 2019-09-10 23:25:12 +0000 | [diff] [blame] | 622 | << "... Caller and callee have incompatible calling conventions.\n"); |
| Jessica Paquette | 20e8667 | 2019-09-05 20:18:34 +0000 | [diff] [blame] | 623 | return false; |
| 624 | } |
| 625 | |
| Jessica Paquette | a42070a | 2019-09-12 22:10:36 +0000 | [diff] [blame^] | 626 | if (!areCalleeOutgoingArgsTailCallable(Info, MF, OutArgs)) |
| Jessica Paquette | 20e8667 | 2019-09-05 20:18:34 +0000 | [diff] [blame] | 627 | return false; |
| Jessica Paquette | 20e8667 | 2019-09-05 20:18:34 +0000 | [diff] [blame] | 628 | |
| 629 | LLVM_DEBUG( |
| 630 | dbgs() << "... Call is eligible for tail call optimization.\n"); |
| 631 | return true; |
| 632 | } |
| 633 | |
| 634 | static unsigned getCallOpcode(const Function &CallerF, bool IsIndirect, |
| 635 | bool IsTailCall) { |
| 636 | if (!IsTailCall) |
| 637 | return IsIndirect ? AArch64::BLR : AArch64::BL; |
| 638 | |
| 639 | if (!IsIndirect) |
| 640 | return AArch64::TCRETURNdi; |
| 641 | |
| 642 | // When BTI is enabled, we need to use TCRETURNriBTI to make sure that we use |
| 643 | // x16 or x17. |
| 644 | if (CallerF.hasFnAttribute("branch-target-enforcement")) |
| 645 | return AArch64::TCRETURNriBTI; |
| 646 | |
| 647 | return AArch64::TCRETURNri; |
| 648 | } |
| 649 | |
| Tim Northover | 406024a | 2016-08-10 21:44:01 +0000 | [diff] [blame] | 650 | bool AArch64CallLowering::lowerCall(MachineIRBuilder &MIRBuilder, |
| Tim Northover | e1a5f66 | 2019-08-09 08:26:38 +0000 | [diff] [blame] | 651 | CallLoweringInfo &Info) const { |
| Tim Northover | 406024a | 2016-08-10 21:44:01 +0000 | [diff] [blame] | 652 | MachineFunction &MF = MIRBuilder.getMF(); |
| Matthias Braun | f1caa28 | 2017-12-15 22:22:58 +0000 | [diff] [blame] | 653 | const Function &F = MF.getFunction(); |
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 654 | MachineRegisterInfo &MRI = MF.getRegInfo(); |
| 655 | auto &DL = F.getParent()->getDataLayout(); |
| Jessica Paquette | 2af5b19 | 2019-09-10 23:25:12 +0000 | [diff] [blame] | 656 | const AArch64TargetLowering &TLI = *getTLI<AArch64TargetLowering>(); |
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 657 | |
| Jessica Paquette | af0bd41 | 2019-08-28 16:19:01 +0000 | [diff] [blame] | 658 | if (Info.IsMustTailCall) { |
| Jessica Paquette | 20e8667 | 2019-09-05 20:18:34 +0000 | [diff] [blame] | 659 | // TODO: Until we lower all tail calls, we should fall back on this. |
| Jessica Paquette | af0bd41 | 2019-08-28 16:19:01 +0000 | [diff] [blame] | 660 | LLVM_DEBUG(dbgs() << "Cannot lower musttail calls yet.\n"); |
| 661 | return false; |
| 662 | } |
| 663 | |
| Jessica Paquette | 121d911 | 2019-09-06 16:49:13 +0000 | [diff] [blame] | 664 | if (Info.IsTailCall && MF.getTarget().Options.GuaranteedTailCallOpt) { |
| 665 | // TODO: Until we lower all tail calls, we should fall back on this. |
| 666 | LLVM_DEBUG(dbgs() << "Cannot handle -tailcallopt yet.\n"); |
| 667 | return false; |
| 668 | } |
| 669 | |
| Jessica Paquette | a42070a | 2019-09-12 22:10:36 +0000 | [diff] [blame^] | 670 | SmallVector<ArgInfo, 8> OutArgs; |
| Tim Northover | e1a5f66 | 2019-08-09 08:26:38 +0000 | [diff] [blame] | 671 | for (auto &OrigArg : Info.OrigArgs) { |
| Jessica Paquette | a42070a | 2019-09-12 22:10:36 +0000 | [diff] [blame^] | 672 | splitToValueTypes(OrigArg, OutArgs, DL, MRI, Info.CallConv); |
| Amara Emerson | 7a05d1c | 2019-03-08 22:17:00 +0000 | [diff] [blame] | 673 | // AAPCS requires that we zero-extend i1 to 8 bits by the caller. |
| 674 | if (OrigArg.Ty->isIntegerTy(1)) |
| Jessica Paquette | a42070a | 2019-09-12 22:10:36 +0000 | [diff] [blame^] | 675 | OutArgs.back().Flags[0].setZExt(); |
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 676 | } |
| Tim Northover | 406024a | 2016-08-10 21:44:01 +0000 | [diff] [blame] | 677 | |
| Jessica Paquette | 2af5b19 | 2019-09-10 23:25:12 +0000 | [diff] [blame] | 678 | SmallVector<ArgInfo, 8> InArgs; |
| 679 | if (!Info.OrigRet.Ty->isVoidTy()) |
| 680 | splitToValueTypes(Info.OrigRet, InArgs, DL, MRI, F.getCallingConv()); |
| 681 | |
| Jessica Paquette | a42070a | 2019-09-12 22:10:36 +0000 | [diff] [blame^] | 682 | bool IsSibCall = Info.IsTailCall && isEligibleForTailCallOptimization( |
| 683 | MIRBuilder, Info, InArgs, OutArgs); |
| Jessica Paquette | 20e8667 | 2019-09-05 20:18:34 +0000 | [diff] [blame] | 684 | if (IsSibCall) |
| 685 | MF.getFrameInfo().setHasTailCall(); |
| 686 | |
| Tim Northover | 406024a | 2016-08-10 21:44:01 +0000 | [diff] [blame] | 687 | // Find out which ABI gets to decide where things go. |
| Tim Northover | d943354 | 2017-01-17 22:30:10 +0000 | [diff] [blame] | 688 | CCAssignFn *AssignFnFixed = |
| Tim Northover | e1a5f66 | 2019-08-09 08:26:38 +0000 | [diff] [blame] | 689 | TLI.CCAssignFnForCall(Info.CallConv, /*IsVarArg=*/false); |
| Tim Northover | d943354 | 2017-01-17 22:30:10 +0000 | [diff] [blame] | 690 | CCAssignFn *AssignFnVarArg = |
| Tim Northover | e1a5f66 | 2019-08-09 08:26:38 +0000 | [diff] [blame] | 691 | TLI.CCAssignFnForCall(Info.CallConv, /*IsVarArg=*/true); |
| Tim Northover | 406024a | 2016-08-10 21:44:01 +0000 | [diff] [blame] | 692 | |
| Jessica Paquette | 20e8667 | 2019-09-05 20:18:34 +0000 | [diff] [blame] | 693 | // If we have a sibling call, then we don't have to adjust the stack. |
| 694 | // Otherwise, we need to adjust it. |
| 695 | MachineInstrBuilder CallSeqStart; |
| 696 | if (!IsSibCall) |
| 697 | CallSeqStart = MIRBuilder.buildInstr(AArch64::ADJCALLSTACKDOWN); |
| Tim Northover | 509091f | 2017-01-17 22:43:34 +0000 | [diff] [blame] | 698 | |
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 699 | // Create a temporarily-floating call instruction so we can add the implicit |
| 700 | // uses of arg registers. |
| Jessica Paquette | 20e8667 | 2019-09-05 20:18:34 +0000 | [diff] [blame] | 701 | unsigned Opc = getCallOpcode(F, Info.Callee.isReg(), IsSibCall); |
| 702 | |
| 703 | // TODO: Right now, regbankselect doesn't know how to handle the rtcGPR64 |
| 704 | // register class. Until we can do that, we should fall back here. |
| 705 | if (Opc == AArch64::TCRETURNriBTI) { |
| 706 | LLVM_DEBUG( |
| 707 | dbgs() << "Cannot lower indirect tail calls with BTI enabled yet.\n"); |
| 708 | return false; |
| 709 | } |
| 710 | |
| 711 | auto MIB = MIRBuilder.buildInstrNoInsert(Opc); |
| Tim Northover | e1a5f66 | 2019-08-09 08:26:38 +0000 | [diff] [blame] | 712 | MIB.add(Info.Callee); |
| Tim Northover | 406024a | 2016-08-10 21:44:01 +0000 | [diff] [blame] | 713 | |
| Jessica Paquette | 20e8667 | 2019-09-05 20:18:34 +0000 | [diff] [blame] | 714 | // Add the byte offset for the tail call. We only have sibling calls, so this |
| 715 | // is always 0. |
| 716 | // TODO: Handle tail calls where we will have a different value here. |
| 717 | if (IsSibCall) |
| 718 | MIB.addImm(0); |
| 719 | |
| Tim Northover | 406024a | 2016-08-10 21:44:01 +0000 | [diff] [blame] | 720 | // Tell the call which registers are clobbered. |
| Nick Desaulniers | 287a3be | 2018-09-07 20:58:57 +0000 | [diff] [blame] | 721 | auto TRI = MF.getSubtarget<AArch64Subtarget>().getRegisterInfo(); |
| Tri Vo | 6c47c62 | 2018-09-22 22:17:50 +0000 | [diff] [blame] | 722 | const uint32_t *Mask = TRI->getCallPreservedMask(MF, F.getCallingConv()); |
| 723 | if (MF.getSubtarget<AArch64Subtarget>().hasCustomCallingConv()) |
| 724 | TRI->UpdateCustomCallPreservedMask(MF, &Mask); |
| 725 | MIB.addRegMask(Mask); |
| Tim Northover | 406024a | 2016-08-10 21:44:01 +0000 | [diff] [blame] | 726 | |
| Nick Desaulniers | 287a3be | 2018-09-07 20:58:57 +0000 | [diff] [blame] | 727 | if (TRI->isAnyArgRegReserved(MF)) |
| 728 | TRI->emitReservedArgRegCallError(MF); |
| 729 | |
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 730 | // Do the actual argument marshalling. |
| 731 | SmallVector<unsigned, 8> PhysRegs; |
| Tim Northover | d943354 | 2017-01-17 22:30:10 +0000 | [diff] [blame] | 732 | OutgoingArgHandler Handler(MIRBuilder, MRI, MIB, AssignFnFixed, |
| Jessica Paquette | a42070a | 2019-09-12 22:10:36 +0000 | [diff] [blame^] | 733 | AssignFnVarArg, IsSibCall); |
| 734 | if (!handleAssignments(MIRBuilder, OutArgs, Handler)) |
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 735 | return false; |
| 736 | |
| 737 | // Now we can add the actual call instruction to the correct basic block. |
| 738 | MIRBuilder.insertInstr(MIB); |
| Tim Northover | 406024a | 2016-08-10 21:44:01 +0000 | [diff] [blame] | 739 | |
| Quentin Colombet | f38015e | 2016-12-22 21:56:31 +0000 | [diff] [blame] | 740 | // If Callee is a reg, since it is used by a target specific |
| 741 | // instruction, it must have a register class matching the |
| 742 | // constraint of that instruction. |
| Tim Northover | e1a5f66 | 2019-08-09 08:26:38 +0000 | [diff] [blame] | 743 | if (Info.Callee.isReg()) |
| Quentin Colombet | f38015e | 2016-12-22 21:56:31 +0000 | [diff] [blame] | 744 | MIB->getOperand(0).setReg(constrainOperandRegClass( |
| 745 | MF, *TRI, MRI, *MF.getSubtarget().getInstrInfo(), |
| Tim Northover | e1a5f66 | 2019-08-09 08:26:38 +0000 | [diff] [blame] | 746 | *MF.getSubtarget().getRegBankInfo(), *MIB, MIB->getDesc(), Info.Callee, |
| 747 | 0)); |
| Quentin Colombet | f38015e | 2016-12-22 21:56:31 +0000 | [diff] [blame] | 748 | |
| Jessica Paquette | bfb00e3 | 2019-09-09 17:15:56 +0000 | [diff] [blame] | 749 | // If we're tail calling, then we're the return from the block. So, we don't |
| 750 | // want to copy anything. |
| 751 | if (IsSibCall) |
| 752 | return true; |
| 753 | |
| Tim Northover | 406024a | 2016-08-10 21:44:01 +0000 | [diff] [blame] | 754 | // Finally we can copy the returned value back into its virtual-register. In |
| 755 | // symmetry with the arugments, the physical register must be an |
| 756 | // implicit-define of the call instruction. |
| Tim Northover | e1a5f66 | 2019-08-09 08:26:38 +0000 | [diff] [blame] | 757 | if (!Info.OrigRet.Ty->isVoidTy()) { |
| Jessica Paquette | 2af5b19 | 2019-09-10 23:25:12 +0000 | [diff] [blame] | 758 | CCAssignFn *RetAssignFn = TLI.CCAssignFnForReturn(F.getCallingConv()); |
| Tim Northover | d943354 | 2017-01-17 22:30:10 +0000 | [diff] [blame] | 759 | CallReturnHandler Handler(MIRBuilder, MRI, MIB, RetAssignFn); |
| Jessica Paquette | 2af5b19 | 2019-09-10 23:25:12 +0000 | [diff] [blame] | 760 | if (!handleAssignments(MIRBuilder, InArgs, Handler)) |
| Tim Northover | 9a46718 | 2016-09-21 12:57:45 +0000 | [diff] [blame] | 761 | return false; |
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 762 | } |
| 763 | |
| Tim Northover | e1a5f66 | 2019-08-09 08:26:38 +0000 | [diff] [blame] | 764 | if (Info.SwiftErrorVReg) { |
| Tim Northover | 3b2157a | 2019-05-24 08:40:13 +0000 | [diff] [blame] | 765 | MIB.addDef(AArch64::X21, RegState::Implicit); |
| Tim Northover | e1a5f66 | 2019-08-09 08:26:38 +0000 | [diff] [blame] | 766 | MIRBuilder.buildCopy(Info.SwiftErrorVReg, Register(AArch64::X21)); |
| Tim Northover | 3b2157a | 2019-05-24 08:40:13 +0000 | [diff] [blame] | 767 | } |
| 768 | |
| Jessica Paquette | bfb00e3 | 2019-09-09 17:15:56 +0000 | [diff] [blame] | 769 | CallSeqStart.addImm(Handler.StackSize).addImm(0); |
| 770 | MIRBuilder.buildInstr(AArch64::ADJCALLSTACKUP) |
| 771 | .addImm(Handler.StackSize) |
| 772 | .addImm(0); |
| Tim Northover | 509091f | 2017-01-17 22:43:34 +0000 | [diff] [blame] | 773 | |
| Tim Northover | 406024a | 2016-08-10 21:44:01 +0000 | [diff] [blame] | 774 | return true; |
| 775 | } |