| Eugene Zelenko | c5eb8e2 | 2017-02-01 22:56:06 +0000 | [diff] [blame] | 1 | //===--- AArch64CallLowering.cpp - Call lowering --------------------------===// | 
| Quentin Colombet | ba2a016 | 2016-02-16 19:26:02 +0000 | [diff] [blame] | 2 | // | 
| Chandler Carruth | 2946cd7 | 2019-01-19 08:50:56 +0000 | [diff] [blame] | 3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. | 
|  | 4 | // See https://llvm.org/LICENSE.txt for license information. | 
|  | 5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception | 
| Quentin Colombet | ba2a016 | 2016-02-16 19:26:02 +0000 | [diff] [blame] | 6 | // | 
|  | 7 | //===----------------------------------------------------------------------===// | 
|  | 8 | /// | 
|  | 9 | /// \file | 
|  | 10 | /// This file implements the lowering of LLVM calls to machine code calls for | 
|  | 11 | /// GlobalISel. | 
|  | 12 | /// | 
|  | 13 | //===----------------------------------------------------------------------===// | 
|  | 14 |  | 
|  | 15 | #include "AArch64CallLowering.h" | 
|  | 16 | #include "AArch64ISelLowering.h" | 
| Tim Northover | e9600d8 | 2017-02-08 17:57:27 +0000 | [diff] [blame] | 17 | #include "AArch64MachineFunctionInfo.h" | 
|  | 18 | #include "AArch64Subtarget.h" | 
| Eugene Zelenko | c5eb8e2 | 2017-02-01 22:56:06 +0000 | [diff] [blame] | 19 | #include "llvm/ADT/ArrayRef.h" | 
|  | 20 | #include "llvm/ADT/SmallVector.h" | 
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 21 | #include "llvm/CodeGen/Analysis.h" | 
| Eugene Zelenko | c5eb8e2 | 2017-02-01 22:56:06 +0000 | [diff] [blame] | 22 | #include "llvm/CodeGen/CallingConvLower.h" | 
| Quentin Colombet | f38015e | 2016-12-22 21:56:31 +0000 | [diff] [blame] | 23 | #include "llvm/CodeGen/GlobalISel/MachineIRBuilder.h" | 
| Quentin Colombet | f38015e | 2016-12-22 21:56:31 +0000 | [diff] [blame] | 24 | #include "llvm/CodeGen/GlobalISel/Utils.h" | 
| Eugene Zelenko | c5eb8e2 | 2017-02-01 22:56:06 +0000 | [diff] [blame] | 25 | #include "llvm/CodeGen/LowLevelType.h" | 
|  | 26 | #include "llvm/CodeGen/MachineBasicBlock.h" | 
|  | 27 | #include "llvm/CodeGen/MachineFrameInfo.h" | 
|  | 28 | #include "llvm/CodeGen/MachineFunction.h" | 
| Quentin Colombet | ba2a016 | 2016-02-16 19:26:02 +0000 | [diff] [blame] | 29 | #include "llvm/CodeGen/MachineInstrBuilder.h" | 
| Eugene Zelenko | c5eb8e2 | 2017-02-01 22:56:06 +0000 | [diff] [blame] | 30 | #include "llvm/CodeGen/MachineMemOperand.h" | 
|  | 31 | #include "llvm/CodeGen/MachineOperand.h" | 
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 32 | #include "llvm/CodeGen/MachineRegisterInfo.h" | 
| David Blaikie | b3bde2e | 2017-11-17 01:07:10 +0000 | [diff] [blame] | 33 | #include "llvm/CodeGen/TargetRegisterInfo.h" | 
|  | 34 | #include "llvm/CodeGen/TargetSubtargetInfo.h" | 
| Craig Topper | 2fa1436 | 2018-03-29 17:21:10 +0000 | [diff] [blame] | 35 | #include "llvm/CodeGen/ValueTypes.h" | 
| Eugene Zelenko | c5eb8e2 | 2017-02-01 22:56:06 +0000 | [diff] [blame] | 36 | #include "llvm/IR/Argument.h" | 
|  | 37 | #include "llvm/IR/Attributes.h" | 
|  | 38 | #include "llvm/IR/Function.h" | 
|  | 39 | #include "llvm/IR/Type.h" | 
|  | 40 | #include "llvm/IR/Value.h" | 
| David Blaikie | 13e77db | 2018-03-23 23:58:25 +0000 | [diff] [blame] | 41 | #include "llvm/Support/MachineValueType.h" | 
| Eugene Zelenko | c5eb8e2 | 2017-02-01 22:56:06 +0000 | [diff] [blame] | 42 | #include <algorithm> | 
|  | 43 | #include <cassert> | 
|  | 44 | #include <cstdint> | 
|  | 45 | #include <iterator> | 
|  | 46 |  | 
| Amara Emerson | 2b523f8 | 2019-04-09 21:22:33 +0000 | [diff] [blame] | 47 | #define DEBUG_TYPE "aarch64-call-lowering" | 
|  | 48 |  | 
| Quentin Colombet | ba2a016 | 2016-02-16 19:26:02 +0000 | [diff] [blame] | 49 | using namespace llvm; | 
|  | 50 |  | 
|  | 51 | AArch64CallLowering::AArch64CallLowering(const AArch64TargetLowering &TLI) | 
| Eugene Zelenko | c5eb8e2 | 2017-02-01 22:56:06 +0000 | [diff] [blame] | 52 | : CallLowering(&TLI) {} | 
| Quentin Colombet | ba2a016 | 2016-02-16 19:26:02 +0000 | [diff] [blame] | 53 |  | 
| Benjamin Kramer | 49a49fe | 2017-08-20 13:03:48 +0000 | [diff] [blame] | 54 | namespace { | 
| Diana Picus | f11f042 | 2016-12-05 10:40:33 +0000 | [diff] [blame] | 55 | struct IncomingArgHandler : public CallLowering::ValueHandler { | 
| Tim Northover | d943354 | 2017-01-17 22:30:10 +0000 | [diff] [blame] | 56 | IncomingArgHandler(MachineIRBuilder &MIRBuilder, MachineRegisterInfo &MRI, | 
|  | 57 | CCAssignFn *AssignFn) | 
| Tim Northover | e9600d8 | 2017-02-08 17:57:27 +0000 | [diff] [blame] | 58 | : ValueHandler(MIRBuilder, MRI, AssignFn), StackUsed(0) {} | 
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 59 |  | 
|  | 60 | unsigned getStackAddress(uint64_t Size, int64_t Offset, | 
|  | 61 | MachinePointerInfo &MPO) override { | 
|  | 62 | auto &MFI = MIRBuilder.getMF().getFrameInfo(); | 
|  | 63 | int FI = MFI.CreateFixedObject(Size, Offset, true); | 
|  | 64 | MPO = MachinePointerInfo::getFixedStack(MIRBuilder.getMF(), FI); | 
|  | 65 | unsigned AddrReg = MRI.createGenericVirtualRegister(LLT::pointer(0, 64)); | 
|  | 66 | MIRBuilder.buildFrameIndex(AddrReg, FI); | 
| Tim Northover | e9600d8 | 2017-02-08 17:57:27 +0000 | [diff] [blame] | 67 | StackUsed = std::max(StackUsed, Size + Offset); | 
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 68 | return AddrReg; | 
|  | 69 | } | 
|  | 70 |  | 
|  | 71 | void assignValueToReg(unsigned ValVReg, unsigned PhysReg, | 
|  | 72 | CCValAssign &VA) override { | 
|  | 73 | markPhysRegUsed(PhysReg); | 
| Aditya Nandakumar | c3bfc81 | 2017-10-09 20:07:43 +0000 | [diff] [blame] | 74 | switch (VA.getLocInfo()) { | 
|  | 75 | default: | 
|  | 76 | MIRBuilder.buildCopy(ValVReg, PhysReg); | 
|  | 77 | break; | 
|  | 78 | case CCValAssign::LocInfo::SExt: | 
|  | 79 | case CCValAssign::LocInfo::ZExt: | 
|  | 80 | case CCValAssign::LocInfo::AExt: { | 
|  | 81 | auto Copy = MIRBuilder.buildCopy(LLT{VA.getLocVT()}, PhysReg); | 
|  | 82 | MIRBuilder.buildTrunc(ValVReg, Copy); | 
|  | 83 | break; | 
|  | 84 | } | 
|  | 85 | } | 
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 86 | } | 
|  | 87 |  | 
|  | 88 | void assignValueToAddress(unsigned ValVReg, unsigned Addr, uint64_t Size, | 
|  | 89 | MachinePointerInfo &MPO, CCValAssign &VA) override { | 
| Matt Arsenault | 2a64598 | 2019-01-31 01:38:47 +0000 | [diff] [blame] | 90 | // FIXME: Get alignment | 
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 91 | auto MMO = MIRBuilder.getMF().getMachineMemOperand( | 
|  | 92 | MPO, MachineMemOperand::MOLoad | MachineMemOperand::MOInvariant, Size, | 
| Matt Arsenault | 2a64598 | 2019-01-31 01:38:47 +0000 | [diff] [blame] | 93 | 1); | 
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 94 | MIRBuilder.buildLoad(ValVReg, Addr, *MMO); | 
|  | 95 | } | 
|  | 96 |  | 
|  | 97 | /// How the physical register gets marked varies between formal | 
|  | 98 | /// parameters (it's a basic-block live-in), and a call instruction | 
|  | 99 | /// (it's an implicit-def of the BL). | 
|  | 100 | virtual void markPhysRegUsed(unsigned PhysReg) = 0; | 
| Tim Northover | e9600d8 | 2017-02-08 17:57:27 +0000 | [diff] [blame] | 101 |  | 
| Amara Emerson | 2b523f8 | 2019-04-09 21:22:33 +0000 | [diff] [blame] | 102 | bool isArgumentHandler() const override { return true; } | 
|  | 103 |  | 
| Tim Northover | e9600d8 | 2017-02-08 17:57:27 +0000 | [diff] [blame] | 104 | uint64_t StackUsed; | 
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 105 | }; | 
|  | 106 |  | 
|  | 107 | struct FormalArgHandler : public IncomingArgHandler { | 
| Tim Northover | d943354 | 2017-01-17 22:30:10 +0000 | [diff] [blame] | 108 | FormalArgHandler(MachineIRBuilder &MIRBuilder, MachineRegisterInfo &MRI, | 
|  | 109 | CCAssignFn *AssignFn) | 
|  | 110 | : IncomingArgHandler(MIRBuilder, MRI, AssignFn) {} | 
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 111 |  | 
|  | 112 | void markPhysRegUsed(unsigned PhysReg) override { | 
|  | 113 | MIRBuilder.getMBB().addLiveIn(PhysReg); | 
|  | 114 | } | 
|  | 115 | }; | 
|  | 116 |  | 
|  | 117 | struct CallReturnHandler : public IncomingArgHandler { | 
|  | 118 | CallReturnHandler(MachineIRBuilder &MIRBuilder, MachineRegisterInfo &MRI, | 
| Tim Northover | d943354 | 2017-01-17 22:30:10 +0000 | [diff] [blame] | 119 | MachineInstrBuilder MIB, CCAssignFn *AssignFn) | 
|  | 120 | : IncomingArgHandler(MIRBuilder, MRI, AssignFn), MIB(MIB) {} | 
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 121 |  | 
|  | 122 | void markPhysRegUsed(unsigned PhysReg) override { | 
|  | 123 | MIB.addDef(PhysReg, RegState::Implicit); | 
|  | 124 | } | 
|  | 125 |  | 
|  | 126 | MachineInstrBuilder MIB; | 
|  | 127 | }; | 
|  | 128 |  | 
| Diana Picus | f11f042 | 2016-12-05 10:40:33 +0000 | [diff] [blame] | 129 | struct OutgoingArgHandler : public CallLowering::ValueHandler { | 
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 130 | OutgoingArgHandler(MachineIRBuilder &MIRBuilder, MachineRegisterInfo &MRI, | 
| Tim Northover | d943354 | 2017-01-17 22:30:10 +0000 | [diff] [blame] | 131 | MachineInstrBuilder MIB, CCAssignFn *AssignFn, | 
|  | 132 | CCAssignFn *AssignFnVarArg) | 
|  | 133 | : ValueHandler(MIRBuilder, MRI, AssignFn), MIB(MIB), | 
| Tim Northover | 509091f | 2017-01-17 22:43:34 +0000 | [diff] [blame] | 134 | AssignFnVarArg(AssignFnVarArg), StackSize(0) {} | 
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 135 |  | 
|  | 136 | unsigned getStackAddress(uint64_t Size, int64_t Offset, | 
|  | 137 | MachinePointerInfo &MPO) override { | 
|  | 138 | LLT p0 = LLT::pointer(0, 64); | 
|  | 139 | LLT s64 = LLT::scalar(64); | 
|  | 140 | unsigned SPReg = MRI.createGenericVirtualRegister(p0); | 
|  | 141 | MIRBuilder.buildCopy(SPReg, AArch64::SP); | 
|  | 142 |  | 
|  | 143 | unsigned OffsetReg = MRI.createGenericVirtualRegister(s64); | 
|  | 144 | MIRBuilder.buildConstant(OffsetReg, Offset); | 
|  | 145 |  | 
|  | 146 | unsigned AddrReg = MRI.createGenericVirtualRegister(p0); | 
|  | 147 | MIRBuilder.buildGEP(AddrReg, SPReg, OffsetReg); | 
|  | 148 |  | 
|  | 149 | MPO = MachinePointerInfo::getStack(MIRBuilder.getMF(), Offset); | 
|  | 150 | return AddrReg; | 
|  | 151 | } | 
|  | 152 |  | 
|  | 153 | void assignValueToReg(unsigned ValVReg, unsigned PhysReg, | 
|  | 154 | CCValAssign &VA) override { | 
|  | 155 | MIB.addUse(PhysReg, RegState::Implicit); | 
|  | 156 | unsigned ExtReg = extendRegister(ValVReg, VA); | 
|  | 157 | MIRBuilder.buildCopy(PhysReg, ExtReg); | 
|  | 158 | } | 
|  | 159 |  | 
|  | 160 | void assignValueToAddress(unsigned ValVReg, unsigned Addr, uint64_t Size, | 
|  | 161 | MachinePointerInfo &MPO, CCValAssign &VA) override { | 
| Amara Emerson | d912ffa | 2018-07-03 15:59:26 +0000 | [diff] [blame] | 162 | if (VA.getLocInfo() == CCValAssign::LocInfo::AExt) { | 
| Amara Emerson | 846f243 | 2018-07-02 16:39:09 +0000 | [diff] [blame] | 163 | Size = VA.getLocVT().getSizeInBits() / 8; | 
| Amara Emerson | d912ffa | 2018-07-03 15:59:26 +0000 | [diff] [blame] | 164 | ValVReg = MIRBuilder.buildAnyExt(LLT::scalar(Size * 8), ValVReg) | 
|  | 165 | ->getOperand(0) | 
|  | 166 | .getReg(); | 
|  | 167 | } | 
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 168 | auto MMO = MIRBuilder.getMF().getMachineMemOperand( | 
| Matt Arsenault | 2a64598 | 2019-01-31 01:38:47 +0000 | [diff] [blame] | 169 | MPO, MachineMemOperand::MOStore, Size, 1); | 
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 170 | MIRBuilder.buildStore(ValVReg, Addr, *MMO); | 
|  | 171 | } | 
|  | 172 |  | 
| Eugene Zelenko | c5eb8e2 | 2017-02-01 22:56:06 +0000 | [diff] [blame] | 173 | bool assignArg(unsigned ValNo, MVT ValVT, MVT LocVT, | 
|  | 174 | CCValAssign::LocInfo LocInfo, | 
|  | 175 | const CallLowering::ArgInfo &Info, | 
|  | 176 | CCState &State) override { | 
| Tim Northover | e80d6d1 | 2017-03-02 15:34:18 +0000 | [diff] [blame] | 177 | bool Res; | 
| Tim Northover | d943354 | 2017-01-17 22:30:10 +0000 | [diff] [blame] | 178 | if (Info.IsFixed) | 
| Tim Northover | e80d6d1 | 2017-03-02 15:34:18 +0000 | [diff] [blame] | 179 | Res = AssignFn(ValNo, ValVT, LocVT, LocInfo, Info.Flags, State); | 
|  | 180 | else | 
|  | 181 | Res = AssignFnVarArg(ValNo, ValVT, LocVT, LocInfo, Info.Flags, State); | 
|  | 182 |  | 
|  | 183 | StackSize = State.getNextStackOffset(); | 
|  | 184 | return Res; | 
| Tim Northover | d943354 | 2017-01-17 22:30:10 +0000 | [diff] [blame] | 185 | } | 
|  | 186 |  | 
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 187 | MachineInstrBuilder MIB; | 
| Tim Northover | d943354 | 2017-01-17 22:30:10 +0000 | [diff] [blame] | 188 | CCAssignFn *AssignFnVarArg; | 
| Tim Northover | 509091f | 2017-01-17 22:43:34 +0000 | [diff] [blame] | 189 | uint64_t StackSize; | 
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 190 | }; | 
| Benjamin Kramer | 49a49fe | 2017-08-20 13:03:48 +0000 | [diff] [blame] | 191 | } // namespace | 
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 192 |  | 
| Benjamin Kramer | 061f4a5 | 2017-01-13 14:39:03 +0000 | [diff] [blame] | 193 | void AArch64CallLowering::splitToValueTypes( | 
|  | 194 | const ArgInfo &OrigArg, SmallVectorImpl<ArgInfo> &SplitArgs, | 
| Tim Northover | ef1fc5a | 2017-08-21 21:56:11 +0000 | [diff] [blame] | 195 | const DataLayout &DL, MachineRegisterInfo &MRI, CallingConv::ID CallConv, | 
| Benjamin Kramer | 061f4a5 | 2017-01-13 14:39:03 +0000 | [diff] [blame] | 196 | const SplitArgTy &PerformArgSplit) const { | 
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 197 | const AArch64TargetLowering &TLI = *getTLI<AArch64TargetLowering>(); | 
| Tim Northover | 9a46718 | 2016-09-21 12:57:45 +0000 | [diff] [blame] | 198 | LLVMContext &Ctx = OrigArg.Ty->getContext(); | 
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 199 |  | 
| Amara Emerson | 0d6a26d | 2018-05-16 10:32:02 +0000 | [diff] [blame] | 200 | if (OrigArg.Ty->isVoidTy()) | 
|  | 201 | return; | 
|  | 202 |  | 
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 203 | SmallVector<EVT, 4> SplitVTs; | 
|  | 204 | SmallVector<uint64_t, 4> Offsets; | 
| Tim Northover | 9a46718 | 2016-09-21 12:57:45 +0000 | [diff] [blame] | 205 | ComputeValueVTs(TLI, DL, OrigArg.Ty, SplitVTs, &Offsets, 0); | 
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 206 |  | 
|  | 207 | if (SplitVTs.size() == 1) { | 
| Tim Northover | d1fd383 | 2016-12-05 21:25:33 +0000 | [diff] [blame] | 208 | // No splitting to do, but we want to replace the original type (e.g. [1 x | 
|  | 209 | // double] -> double). | 
|  | 210 | SplitArgs.emplace_back(OrigArg.Reg, SplitVTs[0].getTypeForEVT(Ctx), | 
| Tim Northover | d943354 | 2017-01-17 22:30:10 +0000 | [diff] [blame] | 211 | OrigArg.Flags, OrigArg.IsFixed); | 
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 212 | return; | 
|  | 213 | } | 
|  | 214 |  | 
| Tim Northover | 9a46718 | 2016-09-21 12:57:45 +0000 | [diff] [blame] | 215 | unsigned FirstRegIdx = SplitArgs.size(); | 
| Tim Northover | ef1fc5a | 2017-08-21 21:56:11 +0000 | [diff] [blame] | 216 | bool NeedsRegBlock = TLI.functionArgumentNeedsConsecutiveRegisters( | 
|  | 217 | OrigArg.Ty, CallConv, false); | 
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 218 | for (auto SplitVT : SplitVTs) { | 
|  | 219 | Type *SplitTy = SplitVT.getTypeForEVT(Ctx); | 
| Tim Northover | 9a46718 | 2016-09-21 12:57:45 +0000 | [diff] [blame] | 220 | SplitArgs.push_back( | 
| Daniel Sanders | 52b4ce7 | 2017-03-07 23:20:35 +0000 | [diff] [blame] | 221 | ArgInfo{MRI.createGenericVirtualRegister(getLLTForType(*SplitTy, DL)), | 
|  | 222 | SplitTy, OrigArg.Flags, OrigArg.IsFixed}); | 
| Tim Northover | ef1fc5a | 2017-08-21 21:56:11 +0000 | [diff] [blame] | 223 | if (NeedsRegBlock) | 
|  | 224 | SplitArgs.back().Flags.setInConsecutiveRegs(); | 
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 225 | } | 
|  | 226 |  | 
| Tim Northover | ef1fc5a | 2017-08-21 21:56:11 +0000 | [diff] [blame] | 227 | SplitArgs.back().Flags.setInConsecutiveRegsLast(); | 
|  | 228 |  | 
| Tim Northover | c2c545b | 2017-03-06 23:50:28 +0000 | [diff] [blame] | 229 | for (unsigned i = 0; i < Offsets.size(); ++i) | 
|  | 230 | PerformArgSplit(SplitArgs[FirstRegIdx + i].Reg, Offsets[i] * 8); | 
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 231 | } | 
|  | 232 |  | 
|  | 233 | bool AArch64CallLowering::lowerReturn(MachineIRBuilder &MIRBuilder, | 
| Alexander Ivchenko | 49168f6 | 2018-08-02 08:33:31 +0000 | [diff] [blame] | 234 | const Value *Val, | 
| Matt Arsenault | e3a676e | 2019-06-24 15:50:29 +0000 | [diff] [blame^] | 235 | ArrayRef<Register> VRegs, | 
|  | 236 | Register SwiftErrorVReg) const { | 
| Tim Northover | 05cc485 | 2016-12-07 21:05:38 +0000 | [diff] [blame] | 237 | auto MIB = MIRBuilder.buildInstrNoInsert(AArch64::RET_ReallyLR); | 
| Alexander Ivchenko | 49168f6 | 2018-08-02 08:33:31 +0000 | [diff] [blame] | 238 | assert(((Val && !VRegs.empty()) || (!Val && VRegs.empty())) && | 
|  | 239 | "Return value without a vreg"); | 
|  | 240 |  | 
| Tim Northover | 05cc485 | 2016-12-07 21:05:38 +0000 | [diff] [blame] | 241 | bool Success = true; | 
| Alexander Ivchenko | 49168f6 | 2018-08-02 08:33:31 +0000 | [diff] [blame] | 242 | if (!VRegs.empty()) { | 
|  | 243 | MachineFunction &MF = MIRBuilder.getMF(); | 
|  | 244 | const Function &F = MF.getFunction(); | 
|  | 245 |  | 
| Amara Emerson | 5a3bb68 | 2018-06-01 13:20:32 +0000 | [diff] [blame] | 246 | MachineRegisterInfo &MRI = MF.getRegInfo(); | 
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 247 | const AArch64TargetLowering &TLI = *getTLI<AArch64TargetLowering>(); | 
|  | 248 | CCAssignFn *AssignFn = TLI.CCAssignFnForReturn(F.getCallingConv()); | 
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 249 | auto &DL = F.getParent()->getDataLayout(); | 
| Alexander Ivchenko | 49168f6 | 2018-08-02 08:33:31 +0000 | [diff] [blame] | 250 | LLVMContext &Ctx = Val->getType()->getContext(); | 
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 251 |  | 
| Alexander Ivchenko | 49168f6 | 2018-08-02 08:33:31 +0000 | [diff] [blame] | 252 | SmallVector<EVT, 4> SplitEVTs; | 
|  | 253 | ComputeValueVTs(TLI, DL, Val->getType(), SplitEVTs); | 
|  | 254 | assert(VRegs.size() == SplitEVTs.size() && | 
|  | 255 | "For each split Type there should be exactly one VReg."); | 
| Tim Northover | 9a46718 | 2016-09-21 12:57:45 +0000 | [diff] [blame] | 256 |  | 
|  | 257 | SmallVector<ArgInfo, 8> SplitArgs; | 
| Amara Emerson | 2b523f8 | 2019-04-09 21:22:33 +0000 | [diff] [blame] | 258 | CallingConv::ID CC = F.getCallingConv(); | 
|  | 259 |  | 
| Alexander Ivchenko | 49168f6 | 2018-08-02 08:33:31 +0000 | [diff] [blame] | 260 | for (unsigned i = 0; i < SplitEVTs.size(); ++i) { | 
| Amara Emerson | 2b523f8 | 2019-04-09 21:22:33 +0000 | [diff] [blame] | 261 | if (TLI.getNumRegistersForCallingConv(Ctx, CC, SplitEVTs[i]) > 1) { | 
|  | 262 | LLVM_DEBUG(dbgs() << "Can't handle extended arg types which need split"); | 
|  | 263 | return false; | 
| Alexander Ivchenko | 49168f6 | 2018-08-02 08:33:31 +0000 | [diff] [blame] | 264 | } | 
|  | 265 |  | 
| Amara Emerson | 2b523f8 | 2019-04-09 21:22:33 +0000 | [diff] [blame] | 266 | unsigned CurVReg = VRegs[i]; | 
| Alexander Ivchenko | 49168f6 | 2018-08-02 08:33:31 +0000 | [diff] [blame] | 267 | ArgInfo CurArgInfo = ArgInfo{CurVReg, SplitEVTs[i].getTypeForEVT(Ctx)}; | 
|  | 268 | setArgFlags(CurArgInfo, AttributeList::ReturnIndex, DL, F); | 
| Amara Emerson | 2b523f8 | 2019-04-09 21:22:33 +0000 | [diff] [blame] | 269 |  | 
|  | 270 | // i1 is a special case because SDAG i1 true is naturally zero extended | 
|  | 271 | // when widened using ANYEXT. We need to do it explicitly here. | 
|  | 272 | if (MRI.getType(CurVReg).getSizeInBits() == 1) { | 
|  | 273 | CurVReg = MIRBuilder.buildZExt(LLT::scalar(8), CurVReg).getReg(0); | 
|  | 274 | } else { | 
|  | 275 | // Some types will need extending as specified by the CC. | 
|  | 276 | MVT NewVT = TLI.getRegisterTypeForCallingConv(Ctx, CC, SplitEVTs[i]); | 
|  | 277 | if (EVT(NewVT) != SplitEVTs[i]) { | 
|  | 278 | unsigned ExtendOp = TargetOpcode::G_ANYEXT; | 
|  | 279 | if (F.getAttributes().hasAttribute(AttributeList::ReturnIndex, | 
|  | 280 | Attribute::SExt)) | 
|  | 281 | ExtendOp = TargetOpcode::G_SEXT; | 
|  | 282 | else if (F.getAttributes().hasAttribute(AttributeList::ReturnIndex, | 
|  | 283 | Attribute::ZExt)) | 
|  | 284 | ExtendOp = TargetOpcode::G_ZEXT; | 
|  | 285 |  | 
|  | 286 | LLT NewLLT(NewVT); | 
|  | 287 | LLT OldLLT(MVT::getVT(CurArgInfo.Ty)); | 
|  | 288 | CurArgInfo.Ty = EVT(NewVT).getTypeForEVT(Ctx); | 
|  | 289 | // Instead of an extend, we might have a vector type which needs | 
| Amara Emerson | 3d1128c | 2019-05-06 19:41:01 +0000 | [diff] [blame] | 290 | // padding with more elements, e.g. <2 x half> -> <4 x half>. | 
|  | 291 | if (NewVT.isVector()) { | 
|  | 292 | if (OldLLT.isVector()) { | 
|  | 293 | if (NewLLT.getNumElements() > OldLLT.getNumElements()) { | 
|  | 294 | // We don't handle VA types which are not exactly twice the | 
|  | 295 | // size, but can easily be done in future. | 
|  | 296 | if (NewLLT.getNumElements() != OldLLT.getNumElements() * 2) { | 
|  | 297 | LLVM_DEBUG(dbgs() << "Outgoing vector ret has too many elts"); | 
|  | 298 | return false; | 
|  | 299 | } | 
|  | 300 | auto Undef = MIRBuilder.buildUndef({OldLLT}); | 
|  | 301 | CurVReg = | 
|  | 302 | MIRBuilder.buildMerge({NewLLT}, {CurVReg, Undef.getReg(0)}) | 
|  | 303 | .getReg(0); | 
|  | 304 | } else { | 
|  | 305 | // Just do a vector extend. | 
|  | 306 | CurVReg = MIRBuilder.buildInstr(ExtendOp, {NewLLT}, {CurVReg}) | 
|  | 307 | .getReg(0); | 
|  | 308 | } | 
|  | 309 | } else if (NewLLT.getNumElements() == 2) { | 
|  | 310 | // We need to pad a <1 x S> type to <2 x S>. Since we don't have | 
|  | 311 | // <1 x S> vector types in GISel we use a build_vector instead | 
|  | 312 | // of a vector merge/concat. | 
|  | 313 | auto Undef = MIRBuilder.buildUndef({OldLLT}); | 
|  | 314 | CurVReg = | 
|  | 315 | MIRBuilder | 
|  | 316 | .buildBuildVector({NewLLT}, {CurVReg, Undef.getReg(0)}) | 
|  | 317 | .getReg(0); | 
|  | 318 | } else { | 
|  | 319 | LLVM_DEBUG(dbgs() << "Could not handle ret ty"); | 
| Amara Emerson | 2b523f8 | 2019-04-09 21:22:33 +0000 | [diff] [blame] | 320 | return false; | 
|  | 321 | } | 
| Amara Emerson | 2b523f8 | 2019-04-09 21:22:33 +0000 | [diff] [blame] | 322 | } else { | 
| Amara Emerson | 3d1128c | 2019-05-06 19:41:01 +0000 | [diff] [blame] | 323 | // A scalar extend. | 
| Amara Emerson | 2b523f8 | 2019-04-09 21:22:33 +0000 | [diff] [blame] | 324 | CurVReg = | 
|  | 325 | MIRBuilder.buildInstr(ExtendOp, {NewLLT}, {CurVReg}).getReg(0); | 
|  | 326 | } | 
|  | 327 | } | 
|  | 328 | } | 
|  | 329 | if (CurVReg != CurArgInfo.Reg) { | 
|  | 330 | CurArgInfo.Reg = CurVReg; | 
|  | 331 | // Reset the arg flags after modifying CurVReg. | 
|  | 332 | setArgFlags(CurArgInfo, AttributeList::ReturnIndex, DL, F); | 
|  | 333 | } | 
|  | 334 | splitToValueTypes(CurArgInfo, SplitArgs, DL, MRI, CC, | 
| Alexander Ivchenko | 49168f6 | 2018-08-02 08:33:31 +0000 | [diff] [blame] | 335 | [&](unsigned Reg, uint64_t Offset) { | 
|  | 336 | MIRBuilder.buildExtract(Reg, CurVReg, Offset); | 
|  | 337 | }); | 
|  | 338 | } | 
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 339 |  | 
| Tim Northover | d943354 | 2017-01-17 22:30:10 +0000 | [diff] [blame] | 340 | OutgoingArgHandler Handler(MIRBuilder, MRI, MIB, AssignFn, AssignFn); | 
|  | 341 | Success = handleAssignments(MIRBuilder, SplitArgs, Handler); | 
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 342 | } | 
| Tim Northover | 05cc485 | 2016-12-07 21:05:38 +0000 | [diff] [blame] | 343 |  | 
| Tim Northover | 3b2157a | 2019-05-24 08:40:13 +0000 | [diff] [blame] | 344 | if (SwiftErrorVReg) { | 
|  | 345 | MIB.addUse(AArch64::X21, RegState::Implicit); | 
|  | 346 | MIRBuilder.buildCopy(AArch64::X21, SwiftErrorVReg); | 
|  | 347 | } | 
|  | 348 |  | 
| Tim Northover | 05cc485 | 2016-12-07 21:05:38 +0000 | [diff] [blame] | 349 | MIRBuilder.insertInstr(MIB); | 
|  | 350 | return Success; | 
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 351 | } | 
|  | 352 |  | 
| Tim Northover | 862758ec | 2016-09-21 12:57:35 +0000 | [diff] [blame] | 353 | bool AArch64CallLowering::lowerFormalArguments(MachineIRBuilder &MIRBuilder, | 
|  | 354 | const Function &F, | 
| Matt Arsenault | e3a676e | 2019-06-24 15:50:29 +0000 | [diff] [blame^] | 355 | ArrayRef<Register> VRegs) const { | 
| Tim Northover | 406024a | 2016-08-10 21:44:01 +0000 | [diff] [blame] | 356 | MachineFunction &MF = MIRBuilder.getMF(); | 
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 357 | MachineBasicBlock &MBB = MIRBuilder.getMBB(); | 
|  | 358 | MachineRegisterInfo &MRI = MF.getRegInfo(); | 
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 359 | auto &DL = F.getParent()->getDataLayout(); | 
| Tim Northover | 406024a | 2016-08-10 21:44:01 +0000 | [diff] [blame] | 360 |  | 
| Tim Northover | 9a46718 | 2016-09-21 12:57:45 +0000 | [diff] [blame] | 361 | SmallVector<ArgInfo, 8> SplitArgs; | 
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 362 | unsigned i = 0; | 
| Reid Kleckner | 45707d4 | 2017-03-16 22:59:15 +0000 | [diff] [blame] | 363 | for (auto &Arg : F.args()) { | 
| Amara Emerson | d78d65c | 2017-11-30 20:06:02 +0000 | [diff] [blame] | 364 | if (DL.getTypeStoreSize(Arg.getType()) == 0) | 
|  | 365 | continue; | 
| Tim Northover | 9a46718 | 2016-09-21 12:57:45 +0000 | [diff] [blame] | 366 | ArgInfo OrigArg{VRegs[i], Arg.getType()}; | 
| Reid Kleckner | a0b45f4 | 2017-05-03 18:17:31 +0000 | [diff] [blame] | 367 | setArgFlags(OrigArg, i + AttributeList::FirstArgIndex, DL, F); | 
| Tim Northover | c2c545b | 2017-03-06 23:50:28 +0000 | [diff] [blame] | 368 | bool Split = false; | 
|  | 369 | LLT Ty = MRI.getType(VRegs[i]); | 
|  | 370 | unsigned Dst = VRegs[i]; | 
|  | 371 |  | 
| Tim Northover | ef1fc5a | 2017-08-21 21:56:11 +0000 | [diff] [blame] | 372 | splitToValueTypes(OrigArg, SplitArgs, DL, MRI, F.getCallingConv(), | 
| Tim Northover | c2c545b | 2017-03-06 23:50:28 +0000 | [diff] [blame] | 373 | [&](unsigned Reg, uint64_t Offset) { | 
|  | 374 | if (!Split) { | 
|  | 375 | Split = true; | 
|  | 376 | Dst = MRI.createGenericVirtualRegister(Ty); | 
|  | 377 | MIRBuilder.buildUndef(Dst); | 
|  | 378 | } | 
|  | 379 | unsigned Tmp = MRI.createGenericVirtualRegister(Ty); | 
|  | 380 | MIRBuilder.buildInsert(Tmp, Dst, Reg, Offset); | 
|  | 381 | Dst = Tmp; | 
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 382 | }); | 
| Tim Northover | c2c545b | 2017-03-06 23:50:28 +0000 | [diff] [blame] | 383 |  | 
|  | 384 | if (Dst != VRegs[i]) | 
|  | 385 | MIRBuilder.buildCopy(VRegs[i], Dst); | 
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 386 | ++i; | 
|  | 387 | } | 
|  | 388 |  | 
|  | 389 | if (!MBB.empty()) | 
|  | 390 | MIRBuilder.setInstr(*MBB.begin()); | 
| Tim Northover | 406024a | 2016-08-10 21:44:01 +0000 | [diff] [blame] | 391 |  | 
|  | 392 | const AArch64TargetLowering &TLI = *getTLI<AArch64TargetLowering>(); | 
|  | 393 | CCAssignFn *AssignFn = | 
|  | 394 | TLI.CCAssignFnForCall(F.getCallingConv(), /*IsVarArg=*/false); | 
|  | 395 |  | 
| Tim Northover | d943354 | 2017-01-17 22:30:10 +0000 | [diff] [blame] | 396 | FormalArgHandler Handler(MIRBuilder, MRI, AssignFn); | 
|  | 397 | if (!handleAssignments(MIRBuilder, SplitArgs, Handler)) | 
| Tim Northover | 9a46718 | 2016-09-21 12:57:45 +0000 | [diff] [blame] | 398 | return false; | 
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 399 |  | 
| Tim Northover | e9600d8 | 2017-02-08 17:57:27 +0000 | [diff] [blame] | 400 | if (F.isVarArg()) { | 
|  | 401 | if (!MF.getSubtarget<AArch64Subtarget>().isTargetDarwin()) { | 
|  | 402 | // FIXME: we need to reimplement saveVarArgsRegisters from | 
|  | 403 | // AArch64ISelLowering. | 
|  | 404 | return false; | 
|  | 405 | } | 
|  | 406 |  | 
|  | 407 | // We currently pass all varargs at 8-byte alignment. | 
|  | 408 | uint64_t StackOffset = alignTo(Handler.StackUsed, 8); | 
|  | 409 |  | 
|  | 410 | auto &MFI = MIRBuilder.getMF().getFrameInfo(); | 
|  | 411 | AArch64FunctionInfo *FuncInfo = MF.getInfo<AArch64FunctionInfo>(); | 
|  | 412 | FuncInfo->setVarArgsStackIndex(MFI.CreateFixedObject(4, StackOffset, true)); | 
|  | 413 | } | 
|  | 414 |  | 
| Tri Vo | 6c47c62 | 2018-09-22 22:17:50 +0000 | [diff] [blame] | 415 | auto &Subtarget = MF.getSubtarget<AArch64Subtarget>(); | 
|  | 416 | if (Subtarget.hasCustomCallingConv()) | 
|  | 417 | Subtarget.getRegisterInfo()->UpdateCustomCalleeSavedRegs(MF); | 
|  | 418 |  | 
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 419 | // Move back to the end of the basic block. | 
|  | 420 | MIRBuilder.setMBB(MBB); | 
|  | 421 |  | 
| Tim Northover | 9a46718 | 2016-09-21 12:57:45 +0000 | [diff] [blame] | 422 | return true; | 
| Tim Northover | 406024a | 2016-08-10 21:44:01 +0000 | [diff] [blame] | 423 | } | 
|  | 424 |  | 
|  | 425 | bool AArch64CallLowering::lowerCall(MachineIRBuilder &MIRBuilder, | 
| Diana Picus | d79253a | 2017-03-20 14:40:18 +0000 | [diff] [blame] | 426 | CallingConv::ID CallConv, | 
| Tim Northover | 9a46718 | 2016-09-21 12:57:45 +0000 | [diff] [blame] | 427 | const MachineOperand &Callee, | 
|  | 428 | const ArgInfo &OrigRet, | 
| Tim Northover | 3b2157a | 2019-05-24 08:40:13 +0000 | [diff] [blame] | 429 | ArrayRef<ArgInfo> OrigArgs, | 
| Matt Arsenault | e3a676e | 2019-06-24 15:50:29 +0000 | [diff] [blame^] | 430 | Register SwiftErrorVReg) const { | 
| Tim Northover | 406024a | 2016-08-10 21:44:01 +0000 | [diff] [blame] | 431 | MachineFunction &MF = MIRBuilder.getMF(); | 
| Matthias Braun | f1caa28 | 2017-12-15 22:22:58 +0000 | [diff] [blame] | 432 | const Function &F = MF.getFunction(); | 
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 433 | MachineRegisterInfo &MRI = MF.getRegInfo(); | 
|  | 434 | auto &DL = F.getParent()->getDataLayout(); | 
|  | 435 |  | 
| Tim Northover | 9a46718 | 2016-09-21 12:57:45 +0000 | [diff] [blame] | 436 | SmallVector<ArgInfo, 8> SplitArgs; | 
|  | 437 | for (auto &OrigArg : OrigArgs) { | 
| Tim Northover | ef1fc5a | 2017-08-21 21:56:11 +0000 | [diff] [blame] | 438 | splitToValueTypes(OrigArg, SplitArgs, DL, MRI, CallConv, | 
| Tim Northover | c2c545b | 2017-03-06 23:50:28 +0000 | [diff] [blame] | 439 | [&](unsigned Reg, uint64_t Offset) { | 
|  | 440 | MIRBuilder.buildExtract(Reg, OrigArg.Reg, Offset); | 
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 441 | }); | 
| Amara Emerson | 7a05d1c | 2019-03-08 22:17:00 +0000 | [diff] [blame] | 442 | // AAPCS requires that we zero-extend i1 to 8 bits by the caller. | 
|  | 443 | if (OrigArg.Ty->isIntegerTy(1)) | 
|  | 444 | SplitArgs.back().Flags.setZExt(); | 
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 445 | } | 
| Tim Northover | 406024a | 2016-08-10 21:44:01 +0000 | [diff] [blame] | 446 |  | 
| Tim Northover | 406024a | 2016-08-10 21:44:01 +0000 | [diff] [blame] | 447 | // Find out which ABI gets to decide where things go. | 
|  | 448 | const AArch64TargetLowering &TLI = *getTLI<AArch64TargetLowering>(); | 
| Tim Northover | d943354 | 2017-01-17 22:30:10 +0000 | [diff] [blame] | 449 | CCAssignFn *AssignFnFixed = | 
| Diana Picus | d79253a | 2017-03-20 14:40:18 +0000 | [diff] [blame] | 450 | TLI.CCAssignFnForCall(CallConv, /*IsVarArg=*/false); | 
| Tim Northover | d943354 | 2017-01-17 22:30:10 +0000 | [diff] [blame] | 451 | CCAssignFn *AssignFnVarArg = | 
| Diana Picus | d79253a | 2017-03-20 14:40:18 +0000 | [diff] [blame] | 452 | TLI.CCAssignFnForCall(CallConv, /*IsVarArg=*/true); | 
| Tim Northover | 406024a | 2016-08-10 21:44:01 +0000 | [diff] [blame] | 453 |  | 
| Tim Northover | 509091f | 2017-01-17 22:43:34 +0000 | [diff] [blame] | 454 | auto CallSeqStart = MIRBuilder.buildInstr(AArch64::ADJCALLSTACKDOWN); | 
|  | 455 |  | 
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 456 | // Create a temporarily-floating call instruction so we can add the implicit | 
|  | 457 | // uses of arg registers. | 
|  | 458 | auto MIB = MIRBuilder.buildInstrNoInsert(Callee.isReg() ? AArch64::BLR | 
|  | 459 | : AArch64::BL); | 
| Diana Picus | 116bbab | 2017-01-13 09:58:52 +0000 | [diff] [blame] | 460 | MIB.add(Callee); | 
| Tim Northover | 406024a | 2016-08-10 21:44:01 +0000 | [diff] [blame] | 461 |  | 
|  | 462 | // Tell the call which registers are clobbered. | 
| Nick Desaulniers | 287a3be | 2018-09-07 20:58:57 +0000 | [diff] [blame] | 463 | auto TRI = MF.getSubtarget<AArch64Subtarget>().getRegisterInfo(); | 
| Tri Vo | 6c47c62 | 2018-09-22 22:17:50 +0000 | [diff] [blame] | 464 | const uint32_t *Mask = TRI->getCallPreservedMask(MF, F.getCallingConv()); | 
|  | 465 | if (MF.getSubtarget<AArch64Subtarget>().hasCustomCallingConv()) | 
|  | 466 | TRI->UpdateCustomCallPreservedMask(MF, &Mask); | 
|  | 467 | MIB.addRegMask(Mask); | 
| Tim Northover | 406024a | 2016-08-10 21:44:01 +0000 | [diff] [blame] | 468 |  | 
| Nick Desaulniers | 287a3be | 2018-09-07 20:58:57 +0000 | [diff] [blame] | 469 | if (TRI->isAnyArgRegReserved(MF)) | 
|  | 470 | TRI->emitReservedArgRegCallError(MF); | 
|  | 471 |  | 
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 472 | // Do the actual argument marshalling. | 
|  | 473 | SmallVector<unsigned, 8> PhysRegs; | 
| Tim Northover | d943354 | 2017-01-17 22:30:10 +0000 | [diff] [blame] | 474 | OutgoingArgHandler Handler(MIRBuilder, MRI, MIB, AssignFnFixed, | 
|  | 475 | AssignFnVarArg); | 
|  | 476 | if (!handleAssignments(MIRBuilder, SplitArgs, Handler)) | 
| Tim Northover | a5e38fa | 2016-09-22 13:49:25 +0000 | [diff] [blame] | 477 | return false; | 
|  | 478 |  | 
|  | 479 | // Now we can add the actual call instruction to the correct basic block. | 
|  | 480 | MIRBuilder.insertInstr(MIB); | 
| Tim Northover | 406024a | 2016-08-10 21:44:01 +0000 | [diff] [blame] | 481 |  | 
| Quentin Colombet | f38015e | 2016-12-22 21:56:31 +0000 | [diff] [blame] | 482 | // If Callee is a reg, since it is used by a target specific | 
|  | 483 | // instruction, it must have a register class matching the | 
|  | 484 | // constraint of that instruction. | 
|  | 485 | if (Callee.isReg()) | 
|  | 486 | MIB->getOperand(0).setReg(constrainOperandRegClass( | 
|  | 487 | MF, *TRI, MRI, *MF.getSubtarget().getInstrInfo(), | 
| Aditya Nandakumar | 5999905 | 2018-02-26 22:56:21 +0000 | [diff] [blame] | 488 | *MF.getSubtarget().getRegBankInfo(), *MIB, MIB->getDesc(), Callee, 0)); | 
| Quentin Colombet | f38015e | 2016-12-22 21:56:31 +0000 | [diff] [blame] | 489 |  | 
| Tim Northover | 406024a | 2016-08-10 21:44:01 +0000 | [diff] [blame] | 490 | // Finally we can copy the returned value back into its virtual-register. In | 
|  | 491 | // symmetry with the arugments, the physical register must be an | 
|  | 492 | // implicit-define of the call instruction. | 
|  | 493 | CCAssignFn *RetAssignFn = TLI.CCAssignFnForReturn(F.getCallingConv()); | 
| Tim Northover | 9a46718 | 2016-09-21 12:57:45 +0000 | [diff] [blame] | 494 | if (OrigRet.Reg) { | 
|  | 495 | SplitArgs.clear(); | 
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 496 |  | 
|  | 497 | SmallVector<uint64_t, 8> RegOffsets; | 
| Matt Arsenault | e3a676e | 2019-06-24 15:50:29 +0000 | [diff] [blame^] | 498 | SmallVector<Register, 8> SplitRegs; | 
| Tim Northover | ef1fc5a | 2017-08-21 21:56:11 +0000 | [diff] [blame] | 499 | splitToValueTypes(OrigRet, SplitArgs, DL, MRI, F.getCallingConv(), | 
| Tim Northover | c2c545b | 2017-03-06 23:50:28 +0000 | [diff] [blame] | 500 | [&](unsigned Reg, uint64_t Offset) { | 
|  | 501 | RegOffsets.push_back(Offset); | 
|  | 502 | SplitRegs.push_back(Reg); | 
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 503 | }); | 
|  | 504 |  | 
| Tim Northover | d943354 | 2017-01-17 22:30:10 +0000 | [diff] [blame] | 505 | CallReturnHandler Handler(MIRBuilder, MRI, MIB, RetAssignFn); | 
|  | 506 | if (!handleAssignments(MIRBuilder, SplitArgs, Handler)) | 
| Tim Northover | 9a46718 | 2016-09-21 12:57:45 +0000 | [diff] [blame] | 507 | return false; | 
| Tim Northover | 406024a | 2016-08-10 21:44:01 +0000 | [diff] [blame] | 508 |  | 
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 509 | if (!RegOffsets.empty()) | 
| Tim Northover | 9a46718 | 2016-09-21 12:57:45 +0000 | [diff] [blame] | 510 | MIRBuilder.buildSequence(OrigRet.Reg, SplitRegs, RegOffsets); | 
| Tim Northover | b18ea16 | 2016-09-20 15:20:36 +0000 | [diff] [blame] | 511 | } | 
|  | 512 |  | 
| Tim Northover | 3b2157a | 2019-05-24 08:40:13 +0000 | [diff] [blame] | 513 | if (SwiftErrorVReg) { | 
|  | 514 | MIB.addDef(AArch64::X21, RegState::Implicit); | 
|  | 515 | MIRBuilder.buildCopy(SwiftErrorVReg, AArch64::X21); | 
|  | 516 | } | 
|  | 517 |  | 
| Serge Pavlov | d526b13 | 2017-05-09 13:35:13 +0000 | [diff] [blame] | 518 | CallSeqStart.addImm(Handler.StackSize).addImm(0); | 
| Tim Northover | 509091f | 2017-01-17 22:43:34 +0000 | [diff] [blame] | 519 | MIRBuilder.buildInstr(AArch64::ADJCALLSTACKUP) | 
|  | 520 | .addImm(Handler.StackSize) | 
|  | 521 | .addImm(0); | 
|  | 522 |  | 
| Tim Northover | 406024a | 2016-08-10 21:44:01 +0000 | [diff] [blame] | 523 | return true; | 
|  | 524 | } |