| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1 | //===- AArch64FrameLowering.cpp - AArch64 Frame Lowering -------*- C++ -*-====// | 
|  | 2 | // | 
|  | 3 | //                     The LLVM Compiler Infrastructure | 
|  | 4 | // | 
|  | 5 | // This file is distributed under the University of Illinois Open Source | 
|  | 6 | // License. See LICENSE.TXT for details. | 
|  | 7 | // | 
|  | 8 | //===----------------------------------------------------------------------===// | 
|  | 9 | // | 
|  | 10 | // This file contains the AArch64 implementation of TargetFrameLowering class. | 
|  | 11 | // | 
| Kristof Beyls | 17cb898 | 2015-04-09 08:49:47 +0000 | [diff] [blame] | 12 | // On AArch64, stack frames are structured as follows: | 
|  | 13 | // | 
|  | 14 | // The stack grows downward. | 
|  | 15 | // | 
|  | 16 | // All of the individual frame areas on the frame below are optional, i.e. it's | 
|  | 17 | // possible to create a function so that the particular area isn't present | 
|  | 18 | // in the frame. | 
|  | 19 | // | 
|  | 20 | // At function entry, the "frame" looks as follows: | 
|  | 21 | // | 
|  | 22 | // |                                   | Higher address | 
|  | 23 | // |-----------------------------------| | 
|  | 24 | // |                                   | | 
|  | 25 | // | arguments passed on the stack     | | 
|  | 26 | // |                                   | | 
|  | 27 | // |-----------------------------------| <- sp | 
|  | 28 | // |                                   | Lower address | 
|  | 29 | // | 
|  | 30 | // | 
|  | 31 | // After the prologue has run, the frame has the following general structure. | 
|  | 32 | // Note that this doesn't depict the case where a red-zone is used. Also, | 
|  | 33 | // technically the last frame area (VLAs) doesn't get created until in the | 
|  | 34 | // main function body, after the prologue is run. However, it's depicted here | 
|  | 35 | // for completeness. | 
|  | 36 | // | 
|  | 37 | // |                                   | Higher address | 
|  | 38 | // |-----------------------------------| | 
|  | 39 | // |                                   | | 
|  | 40 | // | arguments passed on the stack     | | 
|  | 41 | // |                                   | | 
|  | 42 | // |-----------------------------------| | 
|  | 43 | // |                                   | | 
| Martin Storsjo | 68266fa | 2017-07-13 17:03:12 +0000 | [diff] [blame] | 44 | // | (Win64 only) varargs from reg     | | 
|  | 45 | // |                                   | | 
|  | 46 | // |-----------------------------------| | 
|  | 47 | // |                                   | | 
| Kristof Beyls | 17cb898 | 2015-04-09 08:49:47 +0000 | [diff] [blame] | 48 | // | prev_fp, prev_lr                  | | 
|  | 49 | // | (a.k.a. "frame record")           | | 
|  | 50 | // |-----------------------------------| <- fp(=x29) | 
|  | 51 | // |                                   | | 
|  | 52 | // | other callee-saved registers      | | 
|  | 53 | // |                                   | | 
|  | 54 | // |-----------------------------------| | 
|  | 55 | // |.empty.space.to.make.part.below....| | 
|  | 56 | // |.aligned.in.case.it.needs.more.than| (size of this area is unknown at | 
|  | 57 | // |.the.standard.16-byte.alignment....|  compile time; if present) | 
|  | 58 | // |-----------------------------------| | 
|  | 59 | // |                                   | | 
|  | 60 | // | local variables of fixed size     | | 
|  | 61 | // | including spill slots             | | 
|  | 62 | // |-----------------------------------| <- bp(not defined by ABI, | 
|  | 63 | // |.variable-sized.local.variables....|       LLVM chooses X19) | 
|  | 64 | // |.(VLAs)............................| (size of this area is unknown at | 
|  | 65 | // |...................................|  compile time) | 
|  | 66 | // |-----------------------------------| <- sp | 
|  | 67 | // |                                   | Lower address | 
|  | 68 | // | 
|  | 69 | // | 
|  | 70 | // To access the data in a frame, at-compile time, a constant offset must be | 
|  | 71 | // computable from one of the pointers (fp, bp, sp) to access it. The size | 
|  | 72 | // of the areas with a dotted background cannot be computed at compile-time | 
|  | 73 | // if they are present, making it required to have all three of fp, bp and | 
|  | 74 | // sp to be set up to be able to access all contents in the frame areas, | 
|  | 75 | // assuming all of the frame areas are non-empty. | 
|  | 76 | // | 
|  | 77 | // For most functions, some of the frame areas are empty. For those functions, | 
|  | 78 | // it may not be necessary to set up fp or bp: | 
| Benjamin Kramer | df005cb | 2015-08-08 18:27:36 +0000 | [diff] [blame] | 79 | // * A base pointer is definitely needed when there are both VLAs and local | 
| Kristof Beyls | 17cb898 | 2015-04-09 08:49:47 +0000 | [diff] [blame] | 80 | //   variables with more-than-default alignment requirements. | 
| Benjamin Kramer | df005cb | 2015-08-08 18:27:36 +0000 | [diff] [blame] | 81 | // * A frame pointer is definitely needed when there are local variables with | 
| Kristof Beyls | 17cb898 | 2015-04-09 08:49:47 +0000 | [diff] [blame] | 82 | //   more-than-default alignment requirements. | 
|  | 83 | // | 
|  | 84 | // In some cases when a base pointer is not strictly needed, it is generated | 
|  | 85 | // anyway when offsets from the frame pointer to access local variables become | 
|  | 86 | // so large that the offset can't be encoded in the immediate fields of loads | 
|  | 87 | // or stores. | 
|  | 88 | // | 
|  | 89 | // FIXME: also explain the redzone concept. | 
|  | 90 | // FIXME: also explain the concept of reserved call frames. | 
|  | 91 | // | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 92 | //===----------------------------------------------------------------------===// | 
|  | 93 |  | 
|  | 94 | #include "AArch64FrameLowering.h" | 
|  | 95 | #include "AArch64InstrInfo.h" | 
|  | 96 | #include "AArch64MachineFunctionInfo.h" | 
| Eugene Zelenko | 11f6907 | 2017-01-25 00:29:26 +0000 | [diff] [blame] | 97 | #include "AArch64RegisterInfo.h" | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 98 | #include "AArch64Subtarget.h" | 
|  | 99 | #include "AArch64TargetMachine.h" | 
| Martin Storsjo | 2778fd0 | 2017-12-20 06:51:45 +0000 | [diff] [blame] | 100 | #include "MCTargetDesc/AArch64AddressingModes.h" | 
| Luke Cheeseman | 64dcdec | 2018-08-17 12:53:22 +0000 | [diff] [blame] | 101 | #include "llvm/ADT/ScopeExit.h" | 
| Eugene Zelenko | 11f6907 | 2017-01-25 00:29:26 +0000 | [diff] [blame] | 102 | #include "llvm/ADT/SmallVector.h" | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 103 | #include "llvm/ADT/Statistic.h" | 
| Matthias Braun | 332bb5c | 2016-07-06 21:31:27 +0000 | [diff] [blame] | 104 | #include "llvm/CodeGen/LivePhysRegs.h" | 
| Eugene Zelenko | 11f6907 | 2017-01-25 00:29:26 +0000 | [diff] [blame] | 105 | #include "llvm/CodeGen/MachineBasicBlock.h" | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 106 | #include "llvm/CodeGen/MachineFrameInfo.h" | 
|  | 107 | #include "llvm/CodeGen/MachineFunction.h" | 
| Eugene Zelenko | 11f6907 | 2017-01-25 00:29:26 +0000 | [diff] [blame] | 108 | #include "llvm/CodeGen/MachineInstr.h" | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 109 | #include "llvm/CodeGen/MachineInstrBuilder.h" | 
| Eugene Zelenko | 11f6907 | 2017-01-25 00:29:26 +0000 | [diff] [blame] | 110 | #include "llvm/CodeGen/MachineMemOperand.h" | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 111 | #include "llvm/CodeGen/MachineModuleInfo.h" | 
| Eugene Zelenko | 11f6907 | 2017-01-25 00:29:26 +0000 | [diff] [blame] | 112 | #include "llvm/CodeGen/MachineOperand.h" | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 113 | #include "llvm/CodeGen/MachineRegisterInfo.h" | 
|  | 114 | #include "llvm/CodeGen/RegisterScavenging.h" | 
| David Blaikie | 3f833ed | 2017-11-08 01:01:31 +0000 | [diff] [blame] | 115 | #include "llvm/CodeGen/TargetInstrInfo.h" | 
| David Blaikie | b3bde2e | 2017-11-17 01:07:10 +0000 | [diff] [blame] | 116 | #include "llvm/CodeGen/TargetRegisterInfo.h" | 
|  | 117 | #include "llvm/CodeGen/TargetSubtargetInfo.h" | 
| Eugene Zelenko | 11f6907 | 2017-01-25 00:29:26 +0000 | [diff] [blame] | 118 | #include "llvm/IR/Attributes.h" | 
|  | 119 | #include "llvm/IR/CallingConv.h" | 
| Benjamin Kramer | 1f8930e | 2014-07-25 11:42:14 +0000 | [diff] [blame] | 120 | #include "llvm/IR/DataLayout.h" | 
| Eugene Zelenko | 11f6907 | 2017-01-25 00:29:26 +0000 | [diff] [blame] | 121 | #include "llvm/IR/DebugLoc.h" | 
| Benjamin Kramer | 1f8930e | 2014-07-25 11:42:14 +0000 | [diff] [blame] | 122 | #include "llvm/IR/Function.h" | 
| Eugene Zelenko | 11f6907 | 2017-01-25 00:29:26 +0000 | [diff] [blame] | 123 | #include "llvm/MC/MCDwarf.h" | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 124 | #include "llvm/Support/CommandLine.h" | 
| Benjamin Kramer | 1f8930e | 2014-07-25 11:42:14 +0000 | [diff] [blame] | 125 | #include "llvm/Support/Debug.h" | 
| Eugene Zelenko | 11f6907 | 2017-01-25 00:29:26 +0000 | [diff] [blame] | 126 | #include "llvm/Support/ErrorHandling.h" | 
|  | 127 | #include "llvm/Support/MathExtras.h" | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 128 | #include "llvm/Support/raw_ostream.h" | 
| Eugene Zelenko | 11f6907 | 2017-01-25 00:29:26 +0000 | [diff] [blame] | 129 | #include "llvm/Target/TargetMachine.h" | 
|  | 130 | #include "llvm/Target/TargetOptions.h" | 
| Eugene Zelenko | 11f6907 | 2017-01-25 00:29:26 +0000 | [diff] [blame] | 131 | #include <cassert> | 
|  | 132 | #include <cstdint> | 
|  | 133 | #include <iterator> | 
|  | 134 | #include <vector> | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 135 |  | 
|  | 136 | using namespace llvm; | 
|  | 137 |  | 
|  | 138 | #define DEBUG_TYPE "frame-info" | 
|  | 139 |  | 
|  | 140 | static cl::opt<bool> EnableRedZone("aarch64-redzone", | 
|  | 141 | cl::desc("enable use of redzone on AArch64"), | 
|  | 142 | cl::init(false), cl::Hidden); | 
|  | 143 |  | 
| Francis Visoiu Mistrih | 164560b | 2018-03-14 20:34:03 +0000 | [diff] [blame] | 144 | static cl::opt<bool> | 
|  | 145 | ReverseCSRRestoreSeq("reverse-csr-restore-seq", | 
|  | 146 | cl::desc("reverse the CSR restore sequence"), | 
|  | 147 | cl::init(false), cl::Hidden); | 
|  | 148 |  | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 149 | STATISTIC(NumRedZoneFunctions, "Number of functions using red zone"); | 
|  | 150 |  | 
| Matthias Braun | 5c290dc | 2018-01-19 03:16:36 +0000 | [diff] [blame] | 151 | /// This is the biggest offset to the stack pointer we can encode in aarch64 | 
|  | 152 | /// instructions (without using a separate calculation and a temp register). | 
|  | 153 | /// Note that the exception here are vector stores/loads which cannot encode any | 
|  | 154 | /// displacements (see estimateRSStackSizeLimit(), isAArch64FrameOffsetLegal()). | 
|  | 155 | static const unsigned DefaultSafeSPDisplacement = 255; | 
|  | 156 |  | 
| Kristof Beyls | 2af1e90 | 2017-05-30 06:58:41 +0000 | [diff] [blame] | 157 | /// Look at each instruction that references stack frames and return the stack | 
|  | 158 | /// size limit beyond which some of these instructions will require a scratch | 
|  | 159 | /// register during their expansion later. | 
|  | 160 | static unsigned estimateRSStackSizeLimit(MachineFunction &MF) { | 
|  | 161 | // FIXME: For now, just conservatively guestimate based on unscaled indexing | 
|  | 162 | // range. We'll end up allocating an unnecessary spill slot a lot, but | 
|  | 163 | // realistically that's not a big deal at this stage of the game. | 
|  | 164 | for (MachineBasicBlock &MBB : MF) { | 
|  | 165 | for (MachineInstr &MI : MBB) { | 
| Shiva Chen | 801bf7e | 2018-05-09 02:42:00 +0000 | [diff] [blame] | 166 | if (MI.isDebugInstr() || MI.isPseudo() || | 
| Kristof Beyls | 2af1e90 | 2017-05-30 06:58:41 +0000 | [diff] [blame] | 167 | MI.getOpcode() == AArch64::ADDXri || | 
|  | 168 | MI.getOpcode() == AArch64::ADDSXri) | 
|  | 169 | continue; | 
|  | 170 |  | 
| Javed Absar | d13d419 | 2017-10-30 22:00:06 +0000 | [diff] [blame] | 171 | for (const MachineOperand &MO : MI.operands()) { | 
|  | 172 | if (!MO.isFI()) | 
| Kristof Beyls | 2af1e90 | 2017-05-30 06:58:41 +0000 | [diff] [blame] | 173 | continue; | 
|  | 174 |  | 
|  | 175 | int Offset = 0; | 
|  | 176 | if (isAArch64FrameOffsetLegal(MI, Offset, nullptr, nullptr, nullptr) == | 
|  | 177 | AArch64FrameOffsetCannotUpdate) | 
|  | 178 | return 0; | 
|  | 179 | } | 
|  | 180 | } | 
|  | 181 | } | 
| Matthias Braun | 5c290dc | 2018-01-19 03:16:36 +0000 | [diff] [blame] | 182 | return DefaultSafeSPDisplacement; | 
| Kristof Beyls | 2af1e90 | 2017-05-30 06:58:41 +0000 | [diff] [blame] | 183 | } | 
|  | 184 |  | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 185 | bool AArch64FrameLowering::canUseRedZone(const MachineFunction &MF) const { | 
|  | 186 | if (!EnableRedZone) | 
|  | 187 | return false; | 
|  | 188 | // Don't use the red zone if the function explicitly asks us not to. | 
|  | 189 | // This is typically used for kernel code. | 
| Matthias Braun | f1caa28 | 2017-12-15 22:22:58 +0000 | [diff] [blame] | 190 | if (MF.getFunction().hasFnAttribute(Attribute::NoRedZone)) | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 191 | return false; | 
|  | 192 |  | 
| Matthias Braun | 941a705 | 2016-07-28 18:40:00 +0000 | [diff] [blame] | 193 | const MachineFrameInfo &MFI = MF.getFrameInfo(); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 194 | const AArch64FunctionInfo *AFI = MF.getInfo<AArch64FunctionInfo>(); | 
|  | 195 | unsigned NumBytes = AFI->getLocalStackSize(); | 
|  | 196 |  | 
| Matthias Braun | 941a705 | 2016-07-28 18:40:00 +0000 | [diff] [blame] | 197 | return !(MFI.hasCalls() || hasFP(MF) || NumBytes > 128); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 198 | } | 
|  | 199 |  | 
|  | 200 | /// hasFP - Return true if the specified function should have a dedicated frame | 
|  | 201 | /// pointer register. | 
|  | 202 | bool AArch64FrameLowering::hasFP(const MachineFunction &MF) const { | 
| Matthias Braun | 941a705 | 2016-07-28 18:40:00 +0000 | [diff] [blame] | 203 | const MachineFrameInfo &MFI = MF.getFrameInfo(); | 
| Eric Christopher | fc6de42 | 2014-08-05 02:39:49 +0000 | [diff] [blame] | 204 | const TargetRegisterInfo *RegInfo = MF.getSubtarget().getRegisterInfo(); | 
| Geoff Berry | 62c1a1e | 2016-03-02 17:58:31 +0000 | [diff] [blame] | 205 | // Retain behavior of always omitting the FP for leaf functions when possible. | 
| Matthias Braun | 5c290dc | 2018-01-19 03:16:36 +0000 | [diff] [blame] | 206 | if (MFI.hasCalls() && MF.getTarget().Options.DisableFramePointerElim(MF)) | 
|  | 207 | return true; | 
|  | 208 | if (MFI.hasVarSizedObjects() || MFI.isFrameAddressTaken() || | 
|  | 209 | MFI.hasStackMap() || MFI.hasPatchPoint() || | 
|  | 210 | RegInfo->needsStackRealignment(MF)) | 
|  | 211 | return true; | 
|  | 212 | // With large callframes around we may need to use FP to access the scavenging | 
|  | 213 | // emergency spillslot. | 
|  | 214 | // | 
|  | 215 | // Unfortunately some calls to hasFP() like machine verifier -> | 
|  | 216 | // getReservedReg() -> hasFP in the middle of global isel are too early | 
|  | 217 | // to know the max call frame size. Hopefully conservatively returning "true" | 
|  | 218 | // in those cases is fine. | 
|  | 219 | // DefaultSafeSPDisplacement is fine as we only emergency spill GP regs. | 
|  | 220 | if (!MFI.isMaxCallFrameSizeComputed() || | 
|  | 221 | MFI.getMaxCallFrameSize() > DefaultSafeSPDisplacement) | 
|  | 222 | return true; | 
|  | 223 |  | 
|  | 224 | return false; | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 225 | } | 
|  | 226 |  | 
|  | 227 | /// hasReservedCallFrame - Under normal circumstances, when a frame pointer is | 
|  | 228 | /// not required, we reserve argument space for call sites in the function | 
|  | 229 | /// immediately on entry to the current function.  This eliminates the need for | 
|  | 230 | /// add/sub sp brackets around call sites.  Returns true if the call frame is | 
|  | 231 | /// included as part of the stack frame. | 
|  | 232 | bool | 
|  | 233 | AArch64FrameLowering::hasReservedCallFrame(const MachineFunction &MF) const { | 
| Matthias Braun | 941a705 | 2016-07-28 18:40:00 +0000 | [diff] [blame] | 234 | return !MF.getFrameInfo().hasVarSizedObjects(); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 235 | } | 
|  | 236 |  | 
| Hans Wennborg | e1a2e90 | 2016-03-31 18:33:38 +0000 | [diff] [blame] | 237 | MachineBasicBlock::iterator AArch64FrameLowering::eliminateCallFramePseudoInstr( | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 238 | MachineFunction &MF, MachineBasicBlock &MBB, | 
|  | 239 | MachineBasicBlock::iterator I) const { | 
| Eric Christopher | fc6de42 | 2014-08-05 02:39:49 +0000 | [diff] [blame] | 240 | const AArch64InstrInfo *TII = | 
|  | 241 | static_cast<const AArch64InstrInfo *>(MF.getSubtarget().getInstrInfo()); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 242 | DebugLoc DL = I->getDebugLoc(); | 
| Matthias Braun | fa3872e | 2015-05-18 20:27:55 +0000 | [diff] [blame] | 243 | unsigned Opc = I->getOpcode(); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 244 | bool IsDestroy = Opc == TII->getCallFrameDestroyOpcode(); | 
|  | 245 | uint64_t CalleePopAmount = IsDestroy ? I->getOperand(1).getImm() : 0; | 
|  | 246 |  | 
| Eric Christopher | fc6de42 | 2014-08-05 02:39:49 +0000 | [diff] [blame] | 247 | const TargetFrameLowering *TFI = MF.getSubtarget().getFrameLowering(); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 248 | if (!TFI->hasReservedCallFrame(MF)) { | 
|  | 249 | unsigned Align = getStackAlignment(); | 
|  | 250 |  | 
|  | 251 | int64_t Amount = I->getOperand(0).getImm(); | 
| Rui Ueyama | da00f2f | 2016-01-14 21:06:47 +0000 | [diff] [blame] | 252 | Amount = alignTo(Amount, Align); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 253 | if (!IsDestroy) | 
|  | 254 | Amount = -Amount; | 
|  | 255 |  | 
|  | 256 | // N.b. if CalleePopAmount is valid but zero (i.e. callee would pop, but it | 
|  | 257 | // doesn't have to pop anything), then the first operand will be zero too so | 
|  | 258 | // this adjustment is a no-op. | 
|  | 259 | if (CalleePopAmount == 0) { | 
|  | 260 | // FIXME: in-function stack adjustment for calls is limited to 24-bits | 
|  | 261 | // because there's no guaranteed temporary register available. | 
|  | 262 | // | 
| Sylvestre Ledru | 469de19 | 2014-08-11 18:04:46 +0000 | [diff] [blame] | 263 | // ADD/SUB (immediate) has only LSL #0 and LSL #12 available. | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 264 | // 1) For offset <= 12-bit, we use LSL #0 | 
|  | 265 | // 2) For 12-bit <= offset <= 24-bit, we use two instructions. One uses | 
|  | 266 | // LSL #0, and the other uses LSL #12. | 
|  | 267 | // | 
| Chad Rosier | 401a4ab | 2016-01-19 16:50:45 +0000 | [diff] [blame] | 268 | // Most call frames will be allocated at the start of a function so | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 269 | // this is OK, but it is a limitation that needs dealing with. | 
|  | 270 | assert(Amount > -0xffffff && Amount < 0xffffff && "call frame too large"); | 
|  | 271 | emitFrameOffset(MBB, I, DL, AArch64::SP, AArch64::SP, Amount, TII); | 
|  | 272 | } | 
|  | 273 | } else if (CalleePopAmount != 0) { | 
|  | 274 | // If the calling convention demands that the callee pops arguments from the | 
|  | 275 | // stack, we want to add it back if we have a reserved call frame. | 
|  | 276 | assert(CalleePopAmount < 0xffffff && "call frame too large"); | 
|  | 277 | emitFrameOffset(MBB, I, DL, AArch64::SP, AArch64::SP, -CalleePopAmount, | 
|  | 278 | TII); | 
|  | 279 | } | 
| Hans Wennborg | e1a2e90 | 2016-03-31 18:33:38 +0000 | [diff] [blame] | 280 | return MBB.erase(I); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 281 | } | 
|  | 282 |  | 
| Luke Cheeseman | 64dcdec | 2018-08-17 12:53:22 +0000 | [diff] [blame] | 283 | static bool ShouldSignReturnAddress(MachineFunction &MF) { | 
|  | 284 | // The function should be signed in the following situations: | 
|  | 285 | // - sign-return-address=all | 
|  | 286 | // - sign-return-address=non-leaf and the functions spills the LR | 
|  | 287 |  | 
|  | 288 | const Function &F = MF.getFunction(); | 
|  | 289 | if (!F.hasFnAttribute("sign-return-address")) | 
|  | 290 | return false; | 
|  | 291 |  | 
|  | 292 | StringRef Scope = F.getFnAttribute("sign-return-address").getValueAsString(); | 
|  | 293 | if (Scope.equals("none")) | 
|  | 294 | return false; | 
|  | 295 |  | 
|  | 296 | if (Scope.equals("all")) | 
|  | 297 | return true; | 
|  | 298 |  | 
|  | 299 | assert(Scope.equals("non-leaf") && "Expected all, none or non-leaf"); | 
|  | 300 |  | 
|  | 301 | for (const auto &Info : MF.getFrameInfo().getCalleeSavedInfo()) | 
|  | 302 | if (Info.getReg() == AArch64::LR) | 
|  | 303 | return true; | 
|  | 304 |  | 
|  | 305 | return false; | 
|  | 306 | } | 
|  | 307 |  | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 308 | void AArch64FrameLowering::emitCalleeSavedFrameMoves( | 
| Geoff Berry | 62d4725 | 2016-02-25 16:36:08 +0000 | [diff] [blame] | 309 | MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI) const { | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 310 | MachineFunction &MF = *MBB.getParent(); | 
| Matthias Braun | 941a705 | 2016-07-28 18:40:00 +0000 | [diff] [blame] | 311 | MachineFrameInfo &MFI = MF.getFrameInfo(); | 
| Matthias Braun | f23ef43 | 2016-11-30 23:48:42 +0000 | [diff] [blame] | 312 | const TargetSubtargetInfo &STI = MF.getSubtarget(); | 
|  | 313 | const MCRegisterInfo *MRI = STI.getRegisterInfo(); | 
|  | 314 | const TargetInstrInfo *TII = STI.getInstrInfo(); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 315 | DebugLoc DL = MBB.findDebugLoc(MBBI); | 
|  | 316 |  | 
|  | 317 | // Add callee saved registers to move list. | 
| Matthias Braun | 941a705 | 2016-07-28 18:40:00 +0000 | [diff] [blame] | 318 | const std::vector<CalleeSavedInfo> &CSI = MFI.getCalleeSavedInfo(); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 319 | if (CSI.empty()) | 
|  | 320 | return; | 
|  | 321 |  | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 322 | for (const auto &Info : CSI) { | 
|  | 323 | unsigned Reg = Info.getReg(); | 
| Geoff Berry | 62d4725 | 2016-02-25 16:36:08 +0000 | [diff] [blame] | 324 | int64_t Offset = | 
| Matthias Braun | 941a705 | 2016-07-28 18:40:00 +0000 | [diff] [blame] | 325 | MFI.getObjectOffset(Info.getFrameIdx()) - getOffsetOfLocalArea(); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 326 | unsigned DwarfReg = MRI->getDwarfRegNum(Reg, true); | 
| Matthias Braun | f23ef43 | 2016-11-30 23:48:42 +0000 | [diff] [blame] | 327 | unsigned CFIIndex = MF.addFrameInst( | 
| Geoff Berry | 62d4725 | 2016-02-25 16:36:08 +0000 | [diff] [blame] | 328 | MCCFIInstruction::createOffset(nullptr, DwarfReg, Offset)); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 329 | BuildMI(MBB, MBBI, DL, TII->get(TargetOpcode::CFI_INSTRUCTION)) | 
| Adrian Prantl | b9fa945 | 2014-12-16 00:20:49 +0000 | [diff] [blame] | 330 | .addCFIIndex(CFIIndex) | 
|  | 331 | .setMIFlags(MachineInstr::FrameSetup); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 332 | } | 
|  | 333 | } | 
|  | 334 |  | 
| Geoff Berry | 7e4ba3d | 2016-02-19 18:27:32 +0000 | [diff] [blame] | 335 | // Find a scratch register that we can use at the start of the prologue to | 
|  | 336 | // re-align the stack pointer.  We avoid using callee-save registers since they | 
|  | 337 | // may appear to be free when this is called from canUseAsPrologue (during | 
|  | 338 | // shrink wrapping), but then no longer be free when this is called from | 
|  | 339 | // emitPrologue. | 
|  | 340 | // | 
|  | 341 | // FIXME: This is a bit conservative, since in the above case we could use one | 
|  | 342 | // of the callee-save registers as a scratch temp to re-align the stack pointer, | 
|  | 343 | // but we would then have to make sure that we were in fact saving at least one | 
|  | 344 | // callee-save register in the prologue, which is additional complexity that | 
|  | 345 | // doesn't seem worth the benefit. | 
|  | 346 | static unsigned findScratchNonCalleeSaveRegister(MachineBasicBlock *MBB) { | 
|  | 347 | MachineFunction *MF = MBB->getParent(); | 
|  | 348 |  | 
|  | 349 | // If MBB is an entry block, use X9 as the scratch register | 
|  | 350 | if (&MF->front() == MBB) | 
|  | 351 | return AArch64::X9; | 
|  | 352 |  | 
| Eric Christopher | 60a245e | 2017-03-31 23:12:27 +0000 | [diff] [blame] | 353 | const AArch64Subtarget &Subtarget = MF->getSubtarget<AArch64Subtarget>(); | 
| Matthias Braun | ac4307c | 2017-05-26 21:51:00 +0000 | [diff] [blame] | 354 | const AArch64RegisterInfo &TRI = *Subtarget.getRegisterInfo(); | 
| Eric Christopher | 60a245e | 2017-03-31 23:12:27 +0000 | [diff] [blame] | 355 | LivePhysRegs LiveRegs(TRI); | 
| Matthias Braun | 332bb5c | 2016-07-06 21:31:27 +0000 | [diff] [blame] | 356 | LiveRegs.addLiveIns(*MBB); | 
| Geoff Berry | 7e4ba3d | 2016-02-19 18:27:32 +0000 | [diff] [blame] | 357 |  | 
| Matthias Braun | 332bb5c | 2016-07-06 21:31:27 +0000 | [diff] [blame] | 358 | // Mark callee saved registers as used so we will not choose them. | 
| Matthias Braun | ac4307c | 2017-05-26 21:51:00 +0000 | [diff] [blame] | 359 | const MCPhysReg *CSRegs = TRI.getCalleeSavedRegs(MF); | 
| Geoff Berry | 7e4ba3d | 2016-02-19 18:27:32 +0000 | [diff] [blame] | 360 | for (unsigned i = 0; CSRegs[i]; ++i) | 
| Matthias Braun | 332bb5c | 2016-07-06 21:31:27 +0000 | [diff] [blame] | 361 | LiveRegs.addReg(CSRegs[i]); | 
| Geoff Berry | 7e4ba3d | 2016-02-19 18:27:32 +0000 | [diff] [blame] | 362 |  | 
| Matthias Braun | 332bb5c | 2016-07-06 21:31:27 +0000 | [diff] [blame] | 363 | // Prefer X9 since it was historically used for the prologue scratch reg. | 
|  | 364 | const MachineRegisterInfo &MRI = MF->getRegInfo(); | 
|  | 365 | if (LiveRegs.available(MRI, AArch64::X9)) | 
|  | 366 | return AArch64::X9; | 
| Geoff Berry | 7e4ba3d | 2016-02-19 18:27:32 +0000 | [diff] [blame] | 367 |  | 
| Matthias Braun | 332bb5c | 2016-07-06 21:31:27 +0000 | [diff] [blame] | 368 | for (unsigned Reg : AArch64::GPR64RegClass) { | 
|  | 369 | if (LiveRegs.available(MRI, Reg)) | 
|  | 370 | return Reg; | 
|  | 371 | } | 
| Geoff Berry | 7e4ba3d | 2016-02-19 18:27:32 +0000 | [diff] [blame] | 372 | return AArch64::NoRegister; | 
|  | 373 | } | 
|  | 374 |  | 
|  | 375 | bool AArch64FrameLowering::canUseAsPrologue( | 
|  | 376 | const MachineBasicBlock &MBB) const { | 
|  | 377 | const MachineFunction *MF = MBB.getParent(); | 
|  | 378 | MachineBasicBlock *TmpMBB = const_cast<MachineBasicBlock *>(&MBB); | 
|  | 379 | const AArch64Subtarget &Subtarget = MF->getSubtarget<AArch64Subtarget>(); | 
|  | 380 | const AArch64RegisterInfo *RegInfo = Subtarget.getRegisterInfo(); | 
|  | 381 |  | 
|  | 382 | // Don't need a scratch register if we're not going to re-align the stack. | 
|  | 383 | if (!RegInfo->needsStackRealignment(*MF)) | 
|  | 384 | return true; | 
|  | 385 | // Otherwise, we can use any block as long as it has a scratch register | 
|  | 386 | // available. | 
|  | 387 | return findScratchNonCalleeSaveRegister(TmpMBB) != AArch64::NoRegister; | 
|  | 388 | } | 
|  | 389 |  | 
| Martin Storsjo | 2778fd0 | 2017-12-20 06:51:45 +0000 | [diff] [blame] | 390 | static bool windowsRequiresStackProbe(MachineFunction &MF, | 
|  | 391 | unsigned StackSizeInBytes) { | 
|  | 392 | const AArch64Subtarget &Subtarget = MF.getSubtarget<AArch64Subtarget>(); | 
|  | 393 | if (!Subtarget.isTargetWindows()) | 
|  | 394 | return false; | 
|  | 395 | const Function &F = MF.getFunction(); | 
|  | 396 | // TODO: When implementing stack protectors, take that into account | 
|  | 397 | // for the probe threshold. | 
|  | 398 | unsigned StackProbeSize = 4096; | 
|  | 399 | if (F.hasFnAttribute("stack-probe-size")) | 
|  | 400 | F.getFnAttribute("stack-probe-size") | 
|  | 401 | .getValueAsString() | 
|  | 402 | .getAsInteger(0, StackProbeSize); | 
| Hans Wennborg | 89c35fc | 2018-02-23 13:46:25 +0000 | [diff] [blame] | 403 | return (StackSizeInBytes >= StackProbeSize) && | 
|  | 404 | !F.hasFnAttribute("no-stack-arg-probe"); | 
| Martin Storsjo | 2778fd0 | 2017-12-20 06:51:45 +0000 | [diff] [blame] | 405 | } | 
|  | 406 |  | 
| Geoff Berry | a533564 | 2016-05-06 16:34:59 +0000 | [diff] [blame] | 407 | bool AArch64FrameLowering::shouldCombineCSRLocalStackBump( | 
|  | 408 | MachineFunction &MF, unsigned StackBumpBytes) const { | 
|  | 409 | AArch64FunctionInfo *AFI = MF.getInfo<AArch64FunctionInfo>(); | 
| Matthias Braun | 941a705 | 2016-07-28 18:40:00 +0000 | [diff] [blame] | 410 | const MachineFrameInfo &MFI = MF.getFrameInfo(); | 
| Geoff Berry | a533564 | 2016-05-06 16:34:59 +0000 | [diff] [blame] | 411 | const AArch64Subtarget &Subtarget = MF.getSubtarget<AArch64Subtarget>(); | 
|  | 412 | const AArch64RegisterInfo *RegInfo = Subtarget.getRegisterInfo(); | 
|  | 413 |  | 
|  | 414 | if (AFI->getLocalStackSize() == 0) | 
|  | 415 | return false; | 
|  | 416 |  | 
|  | 417 | // 512 is the maximum immediate for stp/ldp that will be used for | 
|  | 418 | // callee-save save/restores | 
| Martin Storsjo | 2778fd0 | 2017-12-20 06:51:45 +0000 | [diff] [blame] | 419 | if (StackBumpBytes >= 512 || windowsRequiresStackProbe(MF, StackBumpBytes)) | 
| Geoff Berry | a533564 | 2016-05-06 16:34:59 +0000 | [diff] [blame] | 420 | return false; | 
|  | 421 |  | 
| Matthias Braun | 941a705 | 2016-07-28 18:40:00 +0000 | [diff] [blame] | 422 | if (MFI.hasVarSizedObjects()) | 
| Geoff Berry | a533564 | 2016-05-06 16:34:59 +0000 | [diff] [blame] | 423 | return false; | 
|  | 424 |  | 
|  | 425 | if (RegInfo->needsStackRealignment(MF)) | 
|  | 426 | return false; | 
|  | 427 |  | 
|  | 428 | // This isn't strictly necessary, but it simplifies things a bit since the | 
|  | 429 | // current RedZone handling code assumes the SP is adjusted by the | 
|  | 430 | // callee-save save/restore code. | 
|  | 431 | if (canUseRedZone(MF)) | 
|  | 432 | return false; | 
|  | 433 |  | 
|  | 434 | return true; | 
|  | 435 | } | 
|  | 436 |  | 
|  | 437 | // Convert callee-save register save/restore instruction to do stack pointer | 
|  | 438 | // decrement/increment to allocate/deallocate the callee-save stack area by | 
|  | 439 | // converting store/load to use pre/post increment version. | 
|  | 440 | static MachineBasicBlock::iterator convertCalleeSaveRestoreToSPPrePostIncDec( | 
| Benjamin Kramer | bdc4956 | 2016-06-12 15:39:02 +0000 | [diff] [blame] | 441 | MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, | 
|  | 442 | const DebugLoc &DL, const TargetInstrInfo *TII, int CSStackSizeInc) { | 
| Peter Collingbourne | f11eb3e | 2018-04-04 21:55:44 +0000 | [diff] [blame] | 443 | // Ignore instructions that do not operate on SP, i.e. shadow call stack | 
|  | 444 | // instructions. | 
|  | 445 | while (MBBI->getOpcode() == AArch64::STRXpost || | 
|  | 446 | MBBI->getOpcode() == AArch64::LDRXpre) { | 
|  | 447 | assert(MBBI->getOperand(0).getReg() != AArch64::SP); | 
|  | 448 | ++MBBI; | 
|  | 449 | } | 
|  | 450 |  | 
| Geoff Berry | a533564 | 2016-05-06 16:34:59 +0000 | [diff] [blame] | 451 | unsigned NewOpc; | 
|  | 452 | bool NewIsUnscaled = false; | 
|  | 453 | switch (MBBI->getOpcode()) { | 
|  | 454 | default: | 
|  | 455 | llvm_unreachable("Unexpected callee-save save/restore opcode!"); | 
|  | 456 | case AArch64::STPXi: | 
|  | 457 | NewOpc = AArch64::STPXpre; | 
|  | 458 | break; | 
|  | 459 | case AArch64::STPDi: | 
|  | 460 | NewOpc = AArch64::STPDpre; | 
|  | 461 | break; | 
|  | 462 | case AArch64::STRXui: | 
|  | 463 | NewOpc = AArch64::STRXpre; | 
|  | 464 | NewIsUnscaled = true; | 
|  | 465 | break; | 
|  | 466 | case AArch64::STRDui: | 
|  | 467 | NewOpc = AArch64::STRDpre; | 
|  | 468 | NewIsUnscaled = true; | 
|  | 469 | break; | 
|  | 470 | case AArch64::LDPXi: | 
|  | 471 | NewOpc = AArch64::LDPXpost; | 
|  | 472 | break; | 
|  | 473 | case AArch64::LDPDi: | 
|  | 474 | NewOpc = AArch64::LDPDpost; | 
|  | 475 | break; | 
|  | 476 | case AArch64::LDRXui: | 
|  | 477 | NewOpc = AArch64::LDRXpost; | 
|  | 478 | NewIsUnscaled = true; | 
|  | 479 | break; | 
|  | 480 | case AArch64::LDRDui: | 
|  | 481 | NewOpc = AArch64::LDRDpost; | 
|  | 482 | NewIsUnscaled = true; | 
|  | 483 | break; | 
|  | 484 | } | 
|  | 485 |  | 
|  | 486 | MachineInstrBuilder MIB = BuildMI(MBB, MBBI, DL, TII->get(NewOpc)); | 
|  | 487 | MIB.addReg(AArch64::SP, RegState::Define); | 
|  | 488 |  | 
|  | 489 | // Copy all operands other than the immediate offset. | 
|  | 490 | unsigned OpndIdx = 0; | 
|  | 491 | for (unsigned OpndEnd = MBBI->getNumOperands() - 1; OpndIdx < OpndEnd; | 
|  | 492 | ++OpndIdx) | 
| Diana Picus | 116bbab | 2017-01-13 09:58:52 +0000 | [diff] [blame] | 493 | MIB.add(MBBI->getOperand(OpndIdx)); | 
| Geoff Berry | a533564 | 2016-05-06 16:34:59 +0000 | [diff] [blame] | 494 |  | 
|  | 495 | assert(MBBI->getOperand(OpndIdx).getImm() == 0 && | 
|  | 496 | "Unexpected immediate offset in first/last callee-save save/restore " | 
|  | 497 | "instruction!"); | 
|  | 498 | assert(MBBI->getOperand(OpndIdx - 1).getReg() == AArch64::SP && | 
|  | 499 | "Unexpected base register in callee-save save/restore instruction!"); | 
|  | 500 | // Last operand is immediate offset that needs fixing. | 
|  | 501 | assert(CSStackSizeInc % 8 == 0); | 
|  | 502 | int64_t CSStackSizeIncImm = CSStackSizeInc; | 
|  | 503 | if (!NewIsUnscaled) | 
|  | 504 | CSStackSizeIncImm /= 8; | 
|  | 505 | MIB.addImm(CSStackSizeIncImm); | 
|  | 506 |  | 
|  | 507 | MIB.setMIFlags(MBBI->getFlags()); | 
| Chandler Carruth | c73c030 | 2018-08-16 21:30:05 +0000 | [diff] [blame] | 508 | MIB.setMemRefs(MBBI->memoperands()); | 
| Geoff Berry | a533564 | 2016-05-06 16:34:59 +0000 | [diff] [blame] | 509 |  | 
|  | 510 | return std::prev(MBB.erase(MBBI)); | 
|  | 511 | } | 
|  | 512 |  | 
|  | 513 | // Fixup callee-save register save/restore instructions to take into account | 
|  | 514 | // combined SP bump by adding the local stack size to the stack offsets. | 
| Duncan P. N. Exon Smith | ab53fd9 | 2016-07-08 20:29:42 +0000 | [diff] [blame] | 515 | static void fixupCalleeSaveRestoreStackOffset(MachineInstr &MI, | 
| Geoff Berry | a533564 | 2016-05-06 16:34:59 +0000 | [diff] [blame] | 516 | unsigned LocalStackSize) { | 
| Duncan P. N. Exon Smith | ab53fd9 | 2016-07-08 20:29:42 +0000 | [diff] [blame] | 517 | unsigned Opc = MI.getOpcode(); | 
| Peter Collingbourne | f11eb3e | 2018-04-04 21:55:44 +0000 | [diff] [blame] | 518 |  | 
|  | 519 | // Ignore instructions that do not operate on SP, i.e. shadow call stack | 
|  | 520 | // instructions. | 
|  | 521 | if (Opc == AArch64::STRXpost || Opc == AArch64::LDRXpre) { | 
|  | 522 | assert(MI.getOperand(0).getReg() != AArch64::SP); | 
|  | 523 | return; | 
|  | 524 | } | 
|  | 525 |  | 
| Geoff Berry | a533564 | 2016-05-06 16:34:59 +0000 | [diff] [blame] | 526 | (void)Opc; | 
|  | 527 | assert((Opc == AArch64::STPXi || Opc == AArch64::STPDi || | 
|  | 528 | Opc == AArch64::STRXui || Opc == AArch64::STRDui || | 
|  | 529 | Opc == AArch64::LDPXi || Opc == AArch64::LDPDi || | 
|  | 530 | Opc == AArch64::LDRXui || Opc == AArch64::LDRDui) && | 
|  | 531 | "Unexpected callee-save save/restore opcode!"); | 
|  | 532 |  | 
| Duncan P. N. Exon Smith | ab53fd9 | 2016-07-08 20:29:42 +0000 | [diff] [blame] | 533 | unsigned OffsetIdx = MI.getNumExplicitOperands() - 1; | 
|  | 534 | assert(MI.getOperand(OffsetIdx - 1).getReg() == AArch64::SP && | 
| Geoff Berry | a533564 | 2016-05-06 16:34:59 +0000 | [diff] [blame] | 535 | "Unexpected base register in callee-save save/restore instruction!"); | 
|  | 536 | // Last operand is immediate offset that needs fixing. | 
| Duncan P. N. Exon Smith | ab53fd9 | 2016-07-08 20:29:42 +0000 | [diff] [blame] | 537 | MachineOperand &OffsetOpnd = MI.getOperand(OffsetIdx); | 
| Geoff Berry | a533564 | 2016-05-06 16:34:59 +0000 | [diff] [blame] | 538 | // All generated opcodes have scaled offsets. | 
|  | 539 | assert(LocalStackSize % 8 == 0); | 
|  | 540 | OffsetOpnd.setImm(OffsetOpnd.getImm() + LocalStackSize / 8); | 
|  | 541 | } | 
|  | 542 |  | 
| Francis Visoiu Mistrih | c855e92 | 2018-04-27 15:30:54 +0000 | [diff] [blame] | 543 | static void adaptForLdStOpt(MachineBasicBlock &MBB, | 
|  | 544 | MachineBasicBlock::iterator FirstSPPopI, | 
|  | 545 | MachineBasicBlock::iterator LastPopI) { | 
|  | 546 | // Sometimes (when we restore in the same order as we save), we can end up | 
|  | 547 | // with code like this: | 
|  | 548 | // | 
|  | 549 | // ldp      x26, x25, [sp] | 
|  | 550 | // ldp      x24, x23, [sp, #16] | 
|  | 551 | // ldp      x22, x21, [sp, #32] | 
|  | 552 | // ldp      x20, x19, [sp, #48] | 
|  | 553 | // add      sp, sp, #64 | 
|  | 554 | // | 
|  | 555 | // In this case, it is always better to put the first ldp at the end, so | 
|  | 556 | // that the load-store optimizer can run and merge the ldp and the add into | 
|  | 557 | // a post-index ldp. | 
|  | 558 | // If we managed to grab the first pop instruction, move it to the end. | 
|  | 559 | if (ReverseCSRRestoreSeq) | 
|  | 560 | MBB.splice(FirstSPPopI, &MBB, LastPopI); | 
|  | 561 | // We should end up with something like this now: | 
|  | 562 | // | 
|  | 563 | // ldp      x24, x23, [sp, #16] | 
|  | 564 | // ldp      x22, x21, [sp, #32] | 
|  | 565 | // ldp      x20, x19, [sp, #48] | 
|  | 566 | // ldp      x26, x25, [sp] | 
|  | 567 | // add      sp, sp, #64 | 
|  | 568 | // | 
|  | 569 | // and the load-store optimizer can merge the last two instructions into: | 
|  | 570 | // | 
|  | 571 | // ldp      x26, x25, [sp], #64 | 
|  | 572 | // | 
|  | 573 | } | 
|  | 574 |  | 
| Quentin Colombet | 61b305e | 2015-05-05 17:38:16 +0000 | [diff] [blame] | 575 | void AArch64FrameLowering::emitPrologue(MachineFunction &MF, | 
|  | 576 | MachineBasicBlock &MBB) const { | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 577 | MachineBasicBlock::iterator MBBI = MBB.begin(); | 
| Matthias Braun | 941a705 | 2016-07-28 18:40:00 +0000 | [diff] [blame] | 578 | const MachineFrameInfo &MFI = MF.getFrameInfo(); | 
| Matthias Braun | f1caa28 | 2017-12-15 22:22:58 +0000 | [diff] [blame] | 579 | const Function &F = MF.getFunction(); | 
| Ahmed Bougacha | 66834ec | 2015-12-16 22:54:06 +0000 | [diff] [blame] | 580 | const AArch64Subtarget &Subtarget = MF.getSubtarget<AArch64Subtarget>(); | 
|  | 581 | const AArch64RegisterInfo *RegInfo = Subtarget.getRegisterInfo(); | 
|  | 582 | const TargetInstrInfo *TII = Subtarget.getInstrInfo(); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 583 | MachineModuleInfo &MMI = MF.getMMI(); | 
| Tim Northover | 775aaeb | 2015-11-05 21:54:58 +0000 | [diff] [blame] | 584 | AArch64FunctionInfo *AFI = MF.getInfo<AArch64FunctionInfo>(); | 
| Matthias Braun | f1caa28 | 2017-12-15 22:22:58 +0000 | [diff] [blame] | 585 | bool needsFrameMoves = MMI.hasDebugInfo() || F.needsUnwindTableEntry(); | 
| Tim Northover | 775aaeb | 2015-11-05 21:54:58 +0000 | [diff] [blame] | 586 | bool HasFP = hasFP(MF); | 
|  | 587 |  | 
| Jessica Paquette | 8aa6cd5 | 2018-04-12 16:16:18 +0000 | [diff] [blame] | 588 | // At this point, we're going to decide whether or not the function uses a | 
|  | 589 | // redzone. In most cases, the function doesn't have a redzone so let's | 
|  | 590 | // assume that's false and set it to true in the case that there's a redzone. | 
|  | 591 | AFI->setHasRedZone(false); | 
|  | 592 |  | 
| Tim Northover | 775aaeb | 2015-11-05 21:54:58 +0000 | [diff] [blame] | 593 | // Debug location must be unknown since the first debug location is used | 
|  | 594 | // to determine the end of the prologue. | 
|  | 595 | DebugLoc DL; | 
|  | 596 |  | 
| Luke Cheeseman | 64dcdec | 2018-08-17 12:53:22 +0000 | [diff] [blame] | 597 | if (ShouldSignReturnAddress(MF)) { | 
|  | 598 | BuildMI(MBB, MBBI, DL, TII->get(AArch64::PACIASP)) | 
|  | 599 | .setMIFlag(MachineInstr::FrameSetup); | 
|  | 600 | } | 
|  | 601 |  | 
| Tim Northover | 775aaeb | 2015-11-05 21:54:58 +0000 | [diff] [blame] | 602 | // All calls are tail calls in GHC calling conv, and functions have no | 
|  | 603 | // prologue/epilogue. | 
| Matthias Braun | f1caa28 | 2017-12-15 22:22:58 +0000 | [diff] [blame] | 604 | if (MF.getFunction().getCallingConv() == CallingConv::GHC) | 
| Greg Fitzgerald | fa78d08 | 2015-01-19 17:40:05 +0000 | [diff] [blame] | 605 | return; | 
|  | 606 |  | 
| Matthias Braun | 941a705 | 2016-07-28 18:40:00 +0000 | [diff] [blame] | 607 | int NumBytes = (int)MFI.getStackSize(); | 
| Martin Storsjo | 2778fd0 | 2017-12-20 06:51:45 +0000 | [diff] [blame] | 608 | if (!AFI->hasStackFrame() && !windowsRequiresStackProbe(MF, NumBytes)) { | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 609 | assert(!HasFP && "unexpected function without stack frame but with FP"); | 
|  | 610 |  | 
|  | 611 | // All of the stack allocation is for locals. | 
|  | 612 | AFI->setLocalStackSize(NumBytes); | 
|  | 613 |  | 
| Chad Rosier | 27c352d | 2016-03-14 18:24:34 +0000 | [diff] [blame] | 614 | if (!NumBytes) | 
|  | 615 | return; | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 616 | // REDZONE: If the stack size is less than 128 bytes, we don't need | 
|  | 617 | // to actually allocate. | 
| Jessica Paquette | 642f6c6 | 2018-04-03 21:56:10 +0000 | [diff] [blame] | 618 | if (canUseRedZone(MF)) { | 
|  | 619 | AFI->setHasRedZone(true); | 
| Chad Rosier | 27c352d | 2016-03-14 18:24:34 +0000 | [diff] [blame] | 620 | ++NumRedZoneFunctions; | 
| Jessica Paquette | 642f6c6 | 2018-04-03 21:56:10 +0000 | [diff] [blame] | 621 | } else { | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 622 | emitFrameOffset(MBB, MBBI, DL, AArch64::SP, AArch64::SP, -NumBytes, TII, | 
|  | 623 | MachineInstr::FrameSetup); | 
|  | 624 |  | 
| Chad Rosier | 27c352d | 2016-03-14 18:24:34 +0000 | [diff] [blame] | 625 | // Label used to tie together the PROLOG_LABEL and the MachineMoves. | 
|  | 626 | MCSymbol *FrameLabel = MMI.getContext().createTempSymbol(); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 627 | // Encode the stack size of the leaf function. | 
| Matthias Braun | f23ef43 | 2016-11-30 23:48:42 +0000 | [diff] [blame] | 628 | unsigned CFIIndex = MF.addFrameInst( | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 629 | MCCFIInstruction::createDefCfaOffset(FrameLabel, -NumBytes)); | 
|  | 630 | BuildMI(MBB, MBBI, DL, TII->get(TargetOpcode::CFI_INSTRUCTION)) | 
| Adrian Prantl | b9fa945 | 2014-12-16 00:20:49 +0000 | [diff] [blame] | 631 | .addCFIIndex(CFIIndex) | 
|  | 632 | .setMIFlags(MachineInstr::FrameSetup); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 633 | } | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 634 | return; | 
|  | 635 | } | 
|  | 636 |  | 
| Martin Storsjo | eacf4e4 | 2017-08-01 21:13:54 +0000 | [diff] [blame] | 637 | bool IsWin64 = | 
| Matthias Braun | f1caa28 | 2017-12-15 22:22:58 +0000 | [diff] [blame] | 638 | Subtarget.isCallingConvWin64(MF.getFunction().getCallingConv()); | 
| Martin Storsjo | eacf4e4 | 2017-08-01 21:13:54 +0000 | [diff] [blame] | 639 | unsigned FixedObject = IsWin64 ? alignTo(AFI->getVarArgsGPRSize(), 16) : 0; | 
|  | 640 |  | 
|  | 641 | auto PrologueSaveSize = AFI->getCalleeSavedStackSize() + FixedObject; | 
| Chad Rosier | 27c352d | 2016-03-14 18:24:34 +0000 | [diff] [blame] | 642 | // All of the remaining stack allocations are for locals. | 
| Martin Storsjo | eacf4e4 | 2017-08-01 21:13:54 +0000 | [diff] [blame] | 643 | AFI->setLocalStackSize(NumBytes - PrologueSaveSize); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 644 |  | 
| Geoff Berry | a533564 | 2016-05-06 16:34:59 +0000 | [diff] [blame] | 645 | bool CombineSPBump = shouldCombineCSRLocalStackBump(MF, NumBytes); | 
|  | 646 | if (CombineSPBump) { | 
|  | 647 | emitFrameOffset(MBB, MBBI, DL, AArch64::SP, AArch64::SP, -NumBytes, TII, | 
|  | 648 | MachineInstr::FrameSetup); | 
|  | 649 | NumBytes = 0; | 
| Martin Storsjo | eacf4e4 | 2017-08-01 21:13:54 +0000 | [diff] [blame] | 650 | } else if (PrologueSaveSize != 0) { | 
| Geoff Berry | a533564 | 2016-05-06 16:34:59 +0000 | [diff] [blame] | 651 | MBBI = convertCalleeSaveRestoreToSPPrePostIncDec(MBB, MBBI, DL, TII, | 
| Martin Storsjo | eacf4e4 | 2017-08-01 21:13:54 +0000 | [diff] [blame] | 652 | -PrologueSaveSize); | 
|  | 653 | NumBytes -= PrologueSaveSize; | 
| Geoff Berry | a533564 | 2016-05-06 16:34:59 +0000 | [diff] [blame] | 654 | } | 
|  | 655 | assert(NumBytes >= 0 && "Negative stack allocation size!?"); | 
|  | 656 |  | 
|  | 657 | // Move past the saves of the callee-saved registers, fixing up the offsets | 
|  | 658 | // and pre-inc if we decided to combine the callee-save and local stack | 
|  | 659 | // pointer bump above. | 
| Geoff Berry | 04bf91a | 2016-02-01 16:29:19 +0000 | [diff] [blame] | 660 | MachineBasicBlock::iterator End = MBB.end(); | 
| Geoff Berry | a533564 | 2016-05-06 16:34:59 +0000 | [diff] [blame] | 661 | while (MBBI != End && MBBI->getFlag(MachineInstr::FrameSetup)) { | 
|  | 662 | if (CombineSPBump) | 
| Duncan P. N. Exon Smith | ab53fd9 | 2016-07-08 20:29:42 +0000 | [diff] [blame] | 663 | fixupCalleeSaveRestoreStackOffset(*MBBI, AFI->getLocalStackSize()); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 664 | ++MBBI; | 
| Geoff Berry | a533564 | 2016-05-06 16:34:59 +0000 | [diff] [blame] | 665 | } | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 666 | if (HasFP) { | 
| Martin Storsjo | eacf4e4 | 2017-08-01 21:13:54 +0000 | [diff] [blame] | 667 | // Only set up FP if we actually need to. Frame pointer is fp = | 
|  | 668 | // sp - fixedobject - 16. | 
|  | 669 | int FPOffset = AFI->getCalleeSavedStackSize() - 16; | 
| Geoff Berry | a533564 | 2016-05-06 16:34:59 +0000 | [diff] [blame] | 670 | if (CombineSPBump) | 
|  | 671 | FPOffset += AFI->getLocalStackSize(); | 
| Chad Rosier | 27c352d | 2016-03-14 18:24:34 +0000 | [diff] [blame] | 672 |  | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 673 | // Issue    sub fp, sp, FPOffset or | 
|  | 674 | //          mov fp,sp          when FPOffset is zero. | 
|  | 675 | // Note: All stores of callee-saved registers are marked as "FrameSetup". | 
|  | 676 | // This code marks the instruction(s) that set the FP also. | 
|  | 677 | emitFrameOffset(MBB, MBBI, DL, AArch64::FP, AArch64::SP, FPOffset, TII, | 
|  | 678 | MachineInstr::FrameSetup); | 
|  | 679 | } | 
|  | 680 |  | 
| Martin Storsjo | 2778fd0 | 2017-12-20 06:51:45 +0000 | [diff] [blame] | 681 | if (windowsRequiresStackProbe(MF, NumBytes)) { | 
|  | 682 | uint32_t NumWords = NumBytes >> 4; | 
|  | 683 |  | 
|  | 684 | BuildMI(MBB, MBBI, DL, TII->get(AArch64::MOVi64imm), AArch64::X15) | 
|  | 685 | .addImm(NumWords) | 
|  | 686 | .setMIFlags(MachineInstr::FrameSetup); | 
|  | 687 |  | 
|  | 688 | switch (MF.getTarget().getCodeModel()) { | 
| David Green | 9dd1d45 | 2018-08-22 11:31:39 +0000 | [diff] [blame] | 689 | case CodeModel::Tiny: | 
| Martin Storsjo | 2778fd0 | 2017-12-20 06:51:45 +0000 | [diff] [blame] | 690 | case CodeModel::Small: | 
|  | 691 | case CodeModel::Medium: | 
|  | 692 | case CodeModel::Kernel: | 
|  | 693 | BuildMI(MBB, MBBI, DL, TII->get(AArch64::BL)) | 
|  | 694 | .addExternalSymbol("__chkstk") | 
|  | 695 | .addReg(AArch64::X15, RegState::Implicit) | 
|  | 696 | .setMIFlags(MachineInstr::FrameSetup); | 
|  | 697 | break; | 
|  | 698 | case CodeModel::Large: | 
|  | 699 | BuildMI(MBB, MBBI, DL, TII->get(AArch64::MOVaddrEXT)) | 
|  | 700 | .addReg(AArch64::X16, RegState::Define) | 
|  | 701 | .addExternalSymbol("__chkstk") | 
|  | 702 | .addExternalSymbol("__chkstk") | 
|  | 703 | .setMIFlags(MachineInstr::FrameSetup); | 
|  | 704 |  | 
|  | 705 | BuildMI(MBB, MBBI, DL, TII->get(AArch64::BLR)) | 
|  | 706 | .addReg(AArch64::X16, RegState::Kill) | 
|  | 707 | .addReg(AArch64::X15, RegState::Implicit | RegState::Define) | 
|  | 708 | .setMIFlags(MachineInstr::FrameSetup); | 
|  | 709 | break; | 
|  | 710 | } | 
|  | 711 |  | 
|  | 712 | BuildMI(MBB, MBBI, DL, TII->get(AArch64::SUBXrx64), AArch64::SP) | 
|  | 713 | .addReg(AArch64::SP, RegState::Kill) | 
|  | 714 | .addReg(AArch64::X15, RegState::Kill) | 
|  | 715 | .addImm(AArch64_AM::getArithExtendImm(AArch64_AM::UXTX, 4)) | 
|  | 716 | .setMIFlags(MachineInstr::FrameSetup); | 
|  | 717 | NumBytes = 0; | 
|  | 718 | } | 
|  | 719 |  | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 720 | // Allocate space for the rest of the frame. | 
| Chad Rosier | 27c352d | 2016-03-14 18:24:34 +0000 | [diff] [blame] | 721 | if (NumBytes) { | 
|  | 722 | const bool NeedsRealignment = RegInfo->needsStackRealignment(MF); | 
|  | 723 | unsigned scratchSPReg = AArch64::SP; | 
| Kristof Beyls | 17cb898 | 2015-04-09 08:49:47 +0000 | [diff] [blame] | 724 |  | 
| Chad Rosier | 27c352d | 2016-03-14 18:24:34 +0000 | [diff] [blame] | 725 | if (NeedsRealignment) { | 
|  | 726 | scratchSPReg = findScratchNonCalleeSaveRegister(&MBB); | 
|  | 727 | assert(scratchSPReg != AArch64::NoRegister); | 
|  | 728 | } | 
| Kristof Beyls | 17cb898 | 2015-04-09 08:49:47 +0000 | [diff] [blame] | 729 |  | 
| Chad Rosier | 27c352d | 2016-03-14 18:24:34 +0000 | [diff] [blame] | 730 | // If we're a leaf function, try using the red zone. | 
|  | 731 | if (!canUseRedZone(MF)) | 
|  | 732 | // FIXME: in the case of dynamic re-alignment, NumBytes doesn't have | 
|  | 733 | // the correct value here, as NumBytes also includes padding bytes, | 
|  | 734 | // which shouldn't be counted here. | 
|  | 735 | emitFrameOffset(MBB, MBBI, DL, scratchSPReg, AArch64::SP, -NumBytes, TII, | 
|  | 736 | MachineInstr::FrameSetup); | 
| Kristof Beyls | 17cb898 | 2015-04-09 08:49:47 +0000 | [diff] [blame] | 737 |  | 
| Chad Rosier | 27c352d | 2016-03-14 18:24:34 +0000 | [diff] [blame] | 738 | if (NeedsRealignment) { | 
| Matthias Braun | 941a705 | 2016-07-28 18:40:00 +0000 | [diff] [blame] | 739 | const unsigned Alignment = MFI.getMaxAlignment(); | 
| Chad Rosier | 27c352d | 2016-03-14 18:24:34 +0000 | [diff] [blame] | 740 | const unsigned NrBitsToZero = countTrailingZeros(Alignment); | 
|  | 741 | assert(NrBitsToZero > 1); | 
|  | 742 | assert(scratchSPReg != AArch64::SP); | 
| Kristof Beyls | 17cb898 | 2015-04-09 08:49:47 +0000 | [diff] [blame] | 743 |  | 
| Chad Rosier | 27c352d | 2016-03-14 18:24:34 +0000 | [diff] [blame] | 744 | // SUB X9, SP, NumBytes | 
|  | 745 | //   -- X9 is temporary register, so shouldn't contain any live data here, | 
|  | 746 | //   -- free to use. This is already produced by emitFrameOffset above. | 
|  | 747 | // AND SP, X9, 0b11111...0000 | 
|  | 748 | // The logical immediates have a non-trivial encoding. The following | 
|  | 749 | // formula computes the encoded immediate with all ones but | 
|  | 750 | // NrBitsToZero zero bits as least significant bits. | 
|  | 751 | uint32_t andMaskEncoded = (1 << 12)                         // = N | 
|  | 752 | | ((64 - NrBitsToZero) << 6)      // immr | 
|  | 753 | | ((64 - NrBitsToZero - 1) << 0); // imms | 
|  | 754 |  | 
|  | 755 | BuildMI(MBB, MBBI, DL, TII->get(AArch64::ANDXri), AArch64::SP) | 
|  | 756 | .addReg(scratchSPReg, RegState::Kill) | 
|  | 757 | .addImm(andMaskEncoded); | 
|  | 758 | AFI->setStackRealigned(true); | 
|  | 759 | } | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 760 | } | 
|  | 761 |  | 
|  | 762 | // If we need a base pointer, set it up here. It's whatever the value of the | 
|  | 763 | // stack pointer is at this point. Any variable size objects will be allocated | 
|  | 764 | // after this, so we can still use the base pointer to reference locals. | 
|  | 765 | // | 
|  | 766 | // FIXME: Clarify FrameSetup flags here. | 
|  | 767 | // Note: Use emitFrameOffset() like above for FP if the FrameSetup flag is | 
|  | 768 | // needed. | 
| Kristof Beyls | 17cb898 | 2015-04-09 08:49:47 +0000 | [diff] [blame] | 769 | if (RegInfo->hasBasePointer(MF)) { | 
|  | 770 | TII->copyPhysReg(MBB, MBBI, DL, RegInfo->getBaseRegister(), AArch64::SP, | 
|  | 771 | false); | 
|  | 772 | } | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 773 |  | 
|  | 774 | if (needsFrameMoves) { | 
| Mehdi Amini | bd7287e | 2015-07-16 06:11:10 +0000 | [diff] [blame] | 775 | const DataLayout &TD = MF.getDataLayout(); | 
|  | 776 | const int StackGrowth = -TD.getPointerSize(0); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 777 | unsigned FramePtr = RegInfo->getFrameRegister(MF); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 778 | // An example of the prologue: | 
|  | 779 | // | 
|  | 780 | //     .globl __foo | 
|  | 781 | //     .align 2 | 
|  | 782 | //  __foo: | 
|  | 783 | // Ltmp0: | 
|  | 784 | //     .cfi_startproc | 
|  | 785 | //     .cfi_personality 155, ___gxx_personality_v0 | 
|  | 786 | // Leh_func_begin: | 
|  | 787 | //     .cfi_lsda 16, Lexception33 | 
|  | 788 | // | 
|  | 789 | //     stp  xa,bx, [sp, -#offset]! | 
|  | 790 | //     ... | 
|  | 791 | //     stp  x28, x27, [sp, #offset-32] | 
|  | 792 | //     stp  fp, lr, [sp, #offset-16] | 
|  | 793 | //     add  fp, sp, #offset - 16 | 
|  | 794 | //     sub  sp, sp, #1360 | 
|  | 795 | // | 
|  | 796 | // The Stack: | 
|  | 797 | //       +-------------------------------------------+ | 
|  | 798 | // 10000 | ........ | ........ | ........ | ........ | | 
|  | 799 | // 10004 | ........ | ........ | ........ | ........ | | 
|  | 800 | //       +-------------------------------------------+ | 
|  | 801 | // 10008 | ........ | ........ | ........ | ........ | | 
|  | 802 | // 1000c | ........ | ........ | ........ | ........ | | 
|  | 803 | //       +===========================================+ | 
|  | 804 | // 10010 |                X28 Register               | | 
|  | 805 | // 10014 |                X28 Register               | | 
|  | 806 | //       +-------------------------------------------+ | 
|  | 807 | // 10018 |                X27 Register               | | 
|  | 808 | // 1001c |                X27 Register               | | 
|  | 809 | //       +===========================================+ | 
|  | 810 | // 10020 |                Frame Pointer              | | 
|  | 811 | // 10024 |                Frame Pointer              | | 
|  | 812 | //       +-------------------------------------------+ | 
|  | 813 | // 10028 |                Link Register              | | 
|  | 814 | // 1002c |                Link Register              | | 
|  | 815 | //       +===========================================+ | 
|  | 816 | // 10030 | ........ | ........ | ........ | ........ | | 
|  | 817 | // 10034 | ........ | ........ | ........ | ........ | | 
|  | 818 | //       +-------------------------------------------+ | 
|  | 819 | // 10038 | ........ | ........ | ........ | ........ | | 
|  | 820 | // 1003c | ........ | ........ | ........ | ........ | | 
|  | 821 | //       +-------------------------------------------+ | 
|  | 822 | // | 
|  | 823 | //     [sp] = 10030        ::    >>initial value<< | 
|  | 824 | //     sp = 10020          ::  stp fp, lr, [sp, #-16]! | 
|  | 825 | //     fp = sp == 10020    ::  mov fp, sp | 
|  | 826 | //     [sp] == 10020       ::  stp x28, x27, [sp, #-16]! | 
|  | 827 | //     sp == 10010         ::    >>final value<< | 
|  | 828 | // | 
|  | 829 | // The frame pointer (w29) points to address 10020. If we use an offset of | 
|  | 830 | // '16' from 'w29', we get the CFI offsets of -8 for w30, -16 for w29, -24 | 
|  | 831 | // for w27, and -32 for w28: | 
|  | 832 | // | 
|  | 833 | //  Ltmp1: | 
|  | 834 | //     .cfi_def_cfa w29, 16 | 
|  | 835 | //  Ltmp2: | 
|  | 836 | //     .cfi_offset w30, -8 | 
|  | 837 | //  Ltmp3: | 
|  | 838 | //     .cfi_offset w29, -16 | 
|  | 839 | //  Ltmp4: | 
|  | 840 | //     .cfi_offset w27, -24 | 
|  | 841 | //  Ltmp5: | 
|  | 842 | //     .cfi_offset w28, -32 | 
|  | 843 |  | 
|  | 844 | if (HasFP) { | 
|  | 845 | // Define the current CFA rule to use the provided FP. | 
|  | 846 | unsigned Reg = RegInfo->getDwarfRegNum(FramePtr, true); | 
| Martin Storsjo | eacf4e4 | 2017-08-01 21:13:54 +0000 | [diff] [blame] | 847 | unsigned CFIIndex = MF.addFrameInst(MCCFIInstruction::createDefCfa( | 
|  | 848 | nullptr, Reg, 2 * StackGrowth - FixedObject)); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 849 | BuildMI(MBB, MBBI, DL, TII->get(TargetOpcode::CFI_INSTRUCTION)) | 
| Adrian Prantl | b9fa945 | 2014-12-16 00:20:49 +0000 | [diff] [blame] | 850 | .addCFIIndex(CFIIndex) | 
|  | 851 | .setMIFlags(MachineInstr::FrameSetup); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 852 | } else { | 
|  | 853 | // Encode the stack size of the leaf function. | 
| Matthias Braun | f23ef43 | 2016-11-30 23:48:42 +0000 | [diff] [blame] | 854 | unsigned CFIIndex = MF.addFrameInst( | 
| Matthias Braun | 941a705 | 2016-07-28 18:40:00 +0000 | [diff] [blame] | 855 | MCCFIInstruction::createDefCfaOffset(nullptr, -MFI.getStackSize())); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 856 | BuildMI(MBB, MBBI, DL, TII->get(TargetOpcode::CFI_INSTRUCTION)) | 
| Adrian Prantl | b9fa945 | 2014-12-16 00:20:49 +0000 | [diff] [blame] | 857 | .addCFIIndex(CFIIndex) | 
|  | 858 | .setMIFlags(MachineInstr::FrameSetup); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 859 | } | 
|  | 860 |  | 
| Geoff Berry | 62d4725 | 2016-02-25 16:36:08 +0000 | [diff] [blame] | 861 | // Now emit the moves for whatever callee saved regs we have (including FP, | 
|  | 862 | // LR if those are saved). | 
|  | 863 | emitCalleeSavedFrameMoves(MBB, MBBI); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 864 | } | 
|  | 865 | } | 
|  | 866 |  | 
| Luke Cheeseman | 64dcdec | 2018-08-17 12:53:22 +0000 | [diff] [blame] | 867 | static void InsertReturnAddressAuth(MachineFunction &MF, | 
|  | 868 | MachineBasicBlock &MBB) { | 
|  | 869 | if (!ShouldSignReturnAddress(MF)) | 
|  | 870 | return; | 
|  | 871 | const AArch64Subtarget &Subtarget = MF.getSubtarget<AArch64Subtarget>(); | 
|  | 872 | const TargetInstrInfo *TII = Subtarget.getInstrInfo(); | 
|  | 873 |  | 
|  | 874 | MachineBasicBlock::iterator MBBI = MBB.getFirstTerminator(); | 
|  | 875 | DebugLoc DL; | 
|  | 876 | if (MBBI != MBB.end()) | 
|  | 877 | DL = MBBI->getDebugLoc(); | 
|  | 878 |  | 
|  | 879 | // The AUTIASP instruction assembles to a hint instruction before v8.3a so | 
|  | 880 | // this instruction can safely used for any v8a architecture. | 
|  | 881 | // From v8.3a onwards there are optimised authenticate LR and return | 
|  | 882 | // instructions, namely RETA{A,B}, that can be used instead. | 
|  | 883 | if (Subtarget.hasV8_3aOps() && MBBI != MBB.end() && | 
|  | 884 | MBBI->getOpcode() == AArch64::RET_ReallyLR) { | 
|  | 885 | BuildMI(MBB, MBBI, DL, TII->get(AArch64::RETAA)).copyImplicitOps(*MBBI); | 
|  | 886 | MBB.erase(MBBI); | 
|  | 887 | } else { | 
|  | 888 | BuildMI(MBB, MBBI, DL, TII->get(AArch64::AUTIASP)) | 
|  | 889 | .setMIFlag(MachineInstr::FrameDestroy); | 
|  | 890 | } | 
|  | 891 | } | 
|  | 892 |  | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 893 | void AArch64FrameLowering::emitEpilogue(MachineFunction &MF, | 
|  | 894 | MachineBasicBlock &MBB) const { | 
|  | 895 | MachineBasicBlock::iterator MBBI = MBB.getLastNonDebugInstr(); | 
| Matthias Braun | 941a705 | 2016-07-28 18:40:00 +0000 | [diff] [blame] | 896 | MachineFrameInfo &MFI = MF.getFrameInfo(); | 
| Ahmed Bougacha | 66834ec | 2015-12-16 22:54:06 +0000 | [diff] [blame] | 897 | const AArch64Subtarget &Subtarget = MF.getSubtarget<AArch64Subtarget>(); | 
| Ahmed Bougacha | 66834ec | 2015-12-16 22:54:06 +0000 | [diff] [blame] | 898 | const TargetInstrInfo *TII = Subtarget.getInstrInfo(); | 
| Quentin Colombet | 61b305e | 2015-05-05 17:38:16 +0000 | [diff] [blame] | 899 | DebugLoc DL; | 
|  | 900 | bool IsTailCallReturn = false; | 
|  | 901 | if (MBB.end() != MBBI) { | 
|  | 902 | DL = MBBI->getDebugLoc(); | 
|  | 903 | unsigned RetOpcode = MBBI->getOpcode(); | 
|  | 904 | IsTailCallReturn = RetOpcode == AArch64::TCRETURNdi || | 
|  | 905 | RetOpcode == AArch64::TCRETURNri; | 
|  | 906 | } | 
| Matthias Braun | 941a705 | 2016-07-28 18:40:00 +0000 | [diff] [blame] | 907 | int NumBytes = MFI.getStackSize(); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 908 | const AArch64FunctionInfo *AFI = MF.getInfo<AArch64FunctionInfo>(); | 
|  | 909 |  | 
| Greg Fitzgerald | fa78d08 | 2015-01-19 17:40:05 +0000 | [diff] [blame] | 910 | // All calls are tail calls in GHC calling conv, and functions have no | 
|  | 911 | // prologue/epilogue. | 
| Matthias Braun | f1caa28 | 2017-12-15 22:22:58 +0000 | [diff] [blame] | 912 | if (MF.getFunction().getCallingConv() == CallingConv::GHC) | 
| Greg Fitzgerald | fa78d08 | 2015-01-19 17:40:05 +0000 | [diff] [blame] | 913 | return; | 
|  | 914 |  | 
| Kristof Beyls | 17cb898 | 2015-04-09 08:49:47 +0000 | [diff] [blame] | 915 | // Initial and residual are named for consistency with the prologue. Note that | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 916 | // in the epilogue, the residual adjustment is executed first. | 
|  | 917 | uint64_t ArgumentPopSize = 0; | 
| Quentin Colombet | 61b305e | 2015-05-05 17:38:16 +0000 | [diff] [blame] | 918 | if (IsTailCallReturn) { | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 919 | MachineOperand &StackAdjust = MBBI->getOperand(1); | 
|  | 920 |  | 
|  | 921 | // For a tail-call in a callee-pops-arguments environment, some or all of | 
|  | 922 | // the stack may actually be in use for the call's arguments, this is | 
|  | 923 | // calculated during LowerCall and consumed here... | 
|  | 924 | ArgumentPopSize = StackAdjust.getImm(); | 
|  | 925 | } else { | 
|  | 926 | // ... otherwise the amount to pop is *all* of the argument space, | 
|  | 927 | // conveniently stored in the MachineFunctionInfo by | 
|  | 928 | // LowerFormalArguments. This will, of course, be zero for the C calling | 
|  | 929 | // convention. | 
|  | 930 | ArgumentPopSize = AFI->getArgumentStackToRestore(); | 
|  | 931 | } | 
|  | 932 |  | 
|  | 933 | // The stack frame should be like below, | 
|  | 934 | // | 
|  | 935 | //      ----------------------                     --- | 
|  | 936 | //      |                    |                      | | 
|  | 937 | //      | BytesInStackArgArea|              CalleeArgStackSize | 
|  | 938 | //      | (NumReusableBytes) |                (of tail call) | 
|  | 939 | //      |                    |                     --- | 
|  | 940 | //      |                    |                      | | 
|  | 941 | //      ---------------------|        ---           | | 
|  | 942 | //      |                    |         |            | | 
|  | 943 | //      |   CalleeSavedReg   |         |            | | 
| Geoff Berry | 04bf91a | 2016-02-01 16:29:19 +0000 | [diff] [blame] | 944 | //      | (CalleeSavedStackSize)|      |            | | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 945 | //      |                    |         |            | | 
|  | 946 | //      ---------------------|         |         NumBytes | 
|  | 947 | //      |                    |     StackSize  (StackAdjustUp) | 
|  | 948 | //      |   LocalStackSize   |         |            | | 
|  | 949 | //      | (covering callee   |         |            | | 
|  | 950 | //      |       args)        |         |            | | 
|  | 951 | //      |                    |         |            | | 
|  | 952 | //      ----------------------        ---          --- | 
|  | 953 | // | 
|  | 954 | // So NumBytes = StackSize + BytesInStackArgArea - CalleeArgStackSize | 
|  | 955 | //             = StackSize + ArgumentPopSize | 
|  | 956 | // | 
|  | 957 | // AArch64TargetLowering::LowerCall figures out ArgumentPopSize and keeps | 
|  | 958 | // it as the 2nd argument of AArch64ISD::TC_RETURN. | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 959 |  | 
| Luke Cheeseman | 64dcdec | 2018-08-17 12:53:22 +0000 | [diff] [blame] | 960 | auto Cleanup = make_scope_exit([&] { InsertReturnAddressAuth(MF, MBB); }); | 
|  | 961 |  | 
| Martin Storsjo | eacf4e4 | 2017-08-01 21:13:54 +0000 | [diff] [blame] | 962 | bool IsWin64 = | 
| Matthias Braun | f1caa28 | 2017-12-15 22:22:58 +0000 | [diff] [blame] | 963 | Subtarget.isCallingConvWin64(MF.getFunction().getCallingConv()); | 
| Martin Storsjo | eacf4e4 | 2017-08-01 21:13:54 +0000 | [diff] [blame] | 964 | unsigned FixedObject = IsWin64 ? alignTo(AFI->getVarArgsGPRSize(), 16) : 0; | 
|  | 965 |  | 
| Francis Visoiu Mistrih | 164560b | 2018-03-14 20:34:03 +0000 | [diff] [blame] | 966 | uint64_t AfterCSRPopSize = ArgumentPopSize; | 
| Martin Storsjo | eacf4e4 | 2017-08-01 21:13:54 +0000 | [diff] [blame] | 967 | auto PrologueSaveSize = AFI->getCalleeSavedStackSize() + FixedObject; | 
| Geoff Berry | a533564 | 2016-05-06 16:34:59 +0000 | [diff] [blame] | 968 | bool CombineSPBump = shouldCombineCSRLocalStackBump(MF, NumBytes); | 
| Francis Visoiu Mistrih | 164560b | 2018-03-14 20:34:03 +0000 | [diff] [blame] | 969 | // Assume we can't combine the last pop with the sp restore. | 
| Geoff Berry | a533564 | 2016-05-06 16:34:59 +0000 | [diff] [blame] | 970 |  | 
| Francis Visoiu Mistrih | 164560b | 2018-03-14 20:34:03 +0000 | [diff] [blame] | 971 | if (!CombineSPBump && PrologueSaveSize != 0) { | 
|  | 972 | MachineBasicBlock::iterator Pop = std::prev(MBB.getFirstTerminator()); | 
|  | 973 | // Converting the last ldp to a post-index ldp is valid only if the last | 
|  | 974 | // ldp's offset is 0. | 
|  | 975 | const MachineOperand &OffsetOp = Pop->getOperand(Pop->getNumOperands() - 1); | 
|  | 976 | // If the offset is 0, convert it to a post-index ldp. | 
|  | 977 | if (OffsetOp.getImm() == 0) { | 
|  | 978 | convertCalleeSaveRestoreToSPPrePostIncDec(MBB, Pop, DL, TII, | 
|  | 979 | PrologueSaveSize); | 
|  | 980 | } else { | 
|  | 981 | // If not, make sure to emit an add after the last ldp. | 
|  | 982 | // We're doing this by transfering the size to be restored from the | 
|  | 983 | // adjustment *before* the CSR pops to the adjustment *after* the CSR | 
|  | 984 | // pops. | 
|  | 985 | AfterCSRPopSize += PrologueSaveSize; | 
|  | 986 | } | 
|  | 987 | } | 
| Geoff Berry | a533564 | 2016-05-06 16:34:59 +0000 | [diff] [blame] | 988 |  | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 989 | // Move past the restores of the callee-saved registers. | 
| Francis Visoiu Mistrih | 164560b | 2018-03-14 20:34:03 +0000 | [diff] [blame] | 990 | // If we plan on combining the sp bump of the local stack size and the callee | 
|  | 991 | // save stack size, we might need to adjust the CSR save and restore offsets. | 
| Quentin Colombet | 61b305e | 2015-05-05 17:38:16 +0000 | [diff] [blame] | 992 | MachineBasicBlock::iterator LastPopI = MBB.getFirstTerminator(); | 
| Matthias Braun | 4541929 | 2015-12-17 03:18:47 +0000 | [diff] [blame] | 993 | MachineBasicBlock::iterator Begin = MBB.begin(); | 
|  | 994 | while (LastPopI != Begin) { | 
|  | 995 | --LastPopI; | 
| Geoff Berry | 04bf91a | 2016-02-01 16:29:19 +0000 | [diff] [blame] | 996 | if (!LastPopI->getFlag(MachineInstr::FrameDestroy)) { | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 997 | ++LastPopI; | 
| Matthias Braun | 4541929 | 2015-12-17 03:18:47 +0000 | [diff] [blame] | 998 | break; | 
| Geoff Berry | a533564 | 2016-05-06 16:34:59 +0000 | [diff] [blame] | 999 | } else if (CombineSPBump) | 
| Duncan P. N. Exon Smith | ab53fd9 | 2016-07-08 20:29:42 +0000 | [diff] [blame] | 1000 | fixupCalleeSaveRestoreStackOffset(*LastPopI, AFI->getLocalStackSize()); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1001 | } | 
| Geoff Berry | a533564 | 2016-05-06 16:34:59 +0000 | [diff] [blame] | 1002 |  | 
|  | 1003 | // If there is a single SP update, insert it before the ret and we're done. | 
|  | 1004 | if (CombineSPBump) { | 
|  | 1005 | emitFrameOffset(MBB, MBB.getFirstTerminator(), DL, AArch64::SP, AArch64::SP, | 
| Francis Visoiu Mistrih | 164560b | 2018-03-14 20:34:03 +0000 | [diff] [blame] | 1006 | NumBytes + AfterCSRPopSize, TII, | 
| Geoff Berry | a533564 | 2016-05-06 16:34:59 +0000 | [diff] [blame] | 1007 | MachineInstr::FrameDestroy); | 
|  | 1008 | return; | 
|  | 1009 | } | 
|  | 1010 |  | 
| Martin Storsjo | eacf4e4 | 2017-08-01 21:13:54 +0000 | [diff] [blame] | 1011 | NumBytes -= PrologueSaveSize; | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1012 | assert(NumBytes >= 0 && "Negative stack allocation size!?"); | 
|  | 1013 |  | 
|  | 1014 | if (!hasFP(MF)) { | 
| Geoff Berry | a1c6269 | 2016-02-23 16:54:36 +0000 | [diff] [blame] | 1015 | bool RedZone = canUseRedZone(MF); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1016 | // If this was a redzone leaf function, we don't need to restore the | 
| Geoff Berry | a1c6269 | 2016-02-23 16:54:36 +0000 | [diff] [blame] | 1017 | // stack pointer (but we may need to pop stack args for fastcc). | 
| Francis Visoiu Mistrih | 164560b | 2018-03-14 20:34:03 +0000 | [diff] [blame] | 1018 | if (RedZone && AfterCSRPopSize == 0) | 
| Geoff Berry | a1c6269 | 2016-02-23 16:54:36 +0000 | [diff] [blame] | 1019 | return; | 
|  | 1020 |  | 
| Martin Storsjo | eacf4e4 | 2017-08-01 21:13:54 +0000 | [diff] [blame] | 1021 | bool NoCalleeSaveRestore = PrologueSaveSize == 0; | 
| Geoff Berry | a1c6269 | 2016-02-23 16:54:36 +0000 | [diff] [blame] | 1022 | int StackRestoreBytes = RedZone ? 0 : NumBytes; | 
|  | 1023 | if (NoCalleeSaveRestore) | 
| Francis Visoiu Mistrih | 164560b | 2018-03-14 20:34:03 +0000 | [diff] [blame] | 1024 | StackRestoreBytes += AfterCSRPopSize; | 
| Francis Visoiu Mistrih | c855e92 | 2018-04-27 15:30:54 +0000 | [diff] [blame] | 1025 |  | 
| Geoff Berry | a1c6269 | 2016-02-23 16:54:36 +0000 | [diff] [blame] | 1026 | // If we were able to combine the local stack pop with the argument pop, | 
|  | 1027 | // then we're done. | 
| Francis Visoiu Mistrih | c855e92 | 2018-04-27 15:30:54 +0000 | [diff] [blame] | 1028 | bool Done = NoCalleeSaveRestore || AfterCSRPopSize == 0; | 
|  | 1029 |  | 
|  | 1030 | // If we're done after this, make sure to help the load store optimizer. | 
|  | 1031 | if (Done) | 
|  | 1032 | adaptForLdStOpt(MBB, MBB.getFirstTerminator(), LastPopI); | 
|  | 1033 |  | 
|  | 1034 | emitFrameOffset(MBB, LastPopI, DL, AArch64::SP, AArch64::SP, | 
|  | 1035 | StackRestoreBytes, TII, MachineInstr::FrameDestroy); | 
|  | 1036 | if (Done) | 
| Geoff Berry | a1c6269 | 2016-02-23 16:54:36 +0000 | [diff] [blame] | 1037 | return; | 
| Francis Visoiu Mistrih | c855e92 | 2018-04-27 15:30:54 +0000 | [diff] [blame] | 1038 |  | 
| Geoff Berry | a1c6269 | 2016-02-23 16:54:36 +0000 | [diff] [blame] | 1039 | NumBytes = 0; | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1040 | } | 
|  | 1041 |  | 
|  | 1042 | // Restore the original stack pointer. | 
|  | 1043 | // FIXME: Rather than doing the math here, we should instead just use | 
|  | 1044 | // non-post-indexed loads for the restores if we aren't actually going to | 
|  | 1045 | // be able to save any instructions. | 
| Matthias Braun | 941a705 | 2016-07-28 18:40:00 +0000 | [diff] [blame] | 1046 | if (MFI.hasVarSizedObjects() || AFI->isStackRealigned()) | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1047 | emitFrameOffset(MBB, LastPopI, DL, AArch64::SP, AArch64::FP, | 
| Martin Storsjo | eacf4e4 | 2017-08-01 21:13:54 +0000 | [diff] [blame] | 1048 | -AFI->getCalleeSavedStackSize() + 16, TII, | 
|  | 1049 | MachineInstr::FrameDestroy); | 
| Chad Rosier | 6d98655 | 2016-03-14 18:17:41 +0000 | [diff] [blame] | 1050 | else if (NumBytes) | 
|  | 1051 | emitFrameOffset(MBB, LastPopI, DL, AArch64::SP, AArch64::SP, NumBytes, TII, | 
|  | 1052 | MachineInstr::FrameDestroy); | 
| Geoff Berry | a1c6269 | 2016-02-23 16:54:36 +0000 | [diff] [blame] | 1053 |  | 
|  | 1054 | // This must be placed after the callee-save restore code because that code | 
|  | 1055 | // assumes the SP is at the same location as it was after the callee-save save | 
|  | 1056 | // code in the prologue. | 
| Francis Visoiu Mistrih | 164560b | 2018-03-14 20:34:03 +0000 | [diff] [blame] | 1057 | if (AfterCSRPopSize) { | 
| Peter Collingbourne | f11eb3e | 2018-04-04 21:55:44 +0000 | [diff] [blame] | 1058 | // Find an insertion point for the first ldp so that it goes before the | 
|  | 1059 | // shadow call stack epilog instruction. This ensures that the restore of | 
|  | 1060 | // lr from x18 is placed after the restore from sp. | 
|  | 1061 | auto FirstSPPopI = MBB.getFirstTerminator(); | 
|  | 1062 | while (FirstSPPopI != Begin) { | 
|  | 1063 | auto Prev = std::prev(FirstSPPopI); | 
|  | 1064 | if (Prev->getOpcode() != AArch64::LDRXpre || | 
|  | 1065 | Prev->getOperand(0).getReg() == AArch64::SP) | 
|  | 1066 | break; | 
|  | 1067 | FirstSPPopI = Prev; | 
|  | 1068 | } | 
|  | 1069 |  | 
| Francis Visoiu Mistrih | c855e92 | 2018-04-27 15:30:54 +0000 | [diff] [blame] | 1070 | adaptForLdStOpt(MBB, FirstSPPopI, LastPopI); | 
|  | 1071 |  | 
| Peter Collingbourne | f11eb3e | 2018-04-04 21:55:44 +0000 | [diff] [blame] | 1072 | emitFrameOffset(MBB, FirstSPPopI, DL, AArch64::SP, AArch64::SP, | 
| Francis Visoiu Mistrih | 164560b | 2018-03-14 20:34:03 +0000 | [diff] [blame] | 1073 | AfterCSRPopSize, TII, MachineInstr::FrameDestroy); | 
|  | 1074 | } | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1075 | } | 
|  | 1076 |  | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1077 | /// getFrameIndexReference - Provide a base+offset reference to an FI slot for | 
|  | 1078 | /// debug info.  It's the same as what we use for resolving the code-gen | 
|  | 1079 | /// references for now.  FIXME: This can go wrong when references are | 
|  | 1080 | /// SP-relative and simple call frames aren't used. | 
|  | 1081 | int AArch64FrameLowering::getFrameIndexReference(const MachineFunction &MF, | 
|  | 1082 | int FI, | 
|  | 1083 | unsigned &FrameReg) const { | 
|  | 1084 | return resolveFrameIndexReference(MF, FI, FrameReg); | 
|  | 1085 | } | 
|  | 1086 |  | 
|  | 1087 | int AArch64FrameLowering::resolveFrameIndexReference(const MachineFunction &MF, | 
|  | 1088 | int FI, unsigned &FrameReg, | 
|  | 1089 | bool PreferFP) const { | 
| Matthias Braun | 941a705 | 2016-07-28 18:40:00 +0000 | [diff] [blame] | 1090 | const MachineFrameInfo &MFI = MF.getFrameInfo(); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1091 | const AArch64RegisterInfo *RegInfo = static_cast<const AArch64RegisterInfo *>( | 
| Eric Christopher | fc6de42 | 2014-08-05 02:39:49 +0000 | [diff] [blame] | 1092 | MF.getSubtarget().getRegisterInfo()); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1093 | const AArch64FunctionInfo *AFI = MF.getInfo<AArch64FunctionInfo>(); | 
| Martin Storsjo | eacf4e4 | 2017-08-01 21:13:54 +0000 | [diff] [blame] | 1094 | const AArch64Subtarget &Subtarget = MF.getSubtarget<AArch64Subtarget>(); | 
|  | 1095 | bool IsWin64 = | 
| Matthias Braun | f1caa28 | 2017-12-15 22:22:58 +0000 | [diff] [blame] | 1096 | Subtarget.isCallingConvWin64(MF.getFunction().getCallingConv()); | 
| Martin Storsjo | eacf4e4 | 2017-08-01 21:13:54 +0000 | [diff] [blame] | 1097 | unsigned FixedObject = IsWin64 ? alignTo(AFI->getVarArgsGPRSize(), 16) : 0; | 
|  | 1098 | int FPOffset = MFI.getObjectOffset(FI) + FixedObject + 16; | 
| Matthias Braun | 941a705 | 2016-07-28 18:40:00 +0000 | [diff] [blame] | 1099 | int Offset = MFI.getObjectOffset(FI) + MFI.getStackSize(); | 
|  | 1100 | bool isFixed = MFI.isFixedObjectIndex(FI); | 
| Geoff Berry | 08ab8c95 | 2018-04-26 18:50:45 +0000 | [diff] [blame] | 1101 | bool isCSR = !isFixed && MFI.getObjectOffset(FI) >= | 
|  | 1102 | -((int)AFI->getCalleeSavedStackSize()); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1103 |  | 
|  | 1104 | // Use frame pointer to reference fixed objects. Use it for locals if | 
| Kristof Beyls | 17cb898 | 2015-04-09 08:49:47 +0000 | [diff] [blame] | 1105 | // there are VLAs or a dynamically realigned SP (and thus the SP isn't | 
|  | 1106 | // reliable as a base). Make sure useFPForScavengingIndex() does the | 
|  | 1107 | // right thing for the emergency spill slot. | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1108 | bool UseFP = false; | 
|  | 1109 | if (AFI->hasStackFrame()) { | 
|  | 1110 | // Note: Keeping the following as multiple 'if' statements rather than | 
|  | 1111 | // merging to a single expression for readability. | 
|  | 1112 | // | 
|  | 1113 | // Argument access should always use the FP. | 
|  | 1114 | if (isFixed) { | 
|  | 1115 | UseFP = hasFP(MF); | 
| Geoff Berry | 08ab8c95 | 2018-04-26 18:50:45 +0000 | [diff] [blame] | 1116 | } else if (isCSR && RegInfo->needsStackRealignment(MF)) { | 
|  | 1117 | // References to the CSR area must use FP if we're re-aligning the stack | 
|  | 1118 | // since the dynamically-sized alignment padding is between the SP/BP and | 
|  | 1119 | // the CSR area. | 
|  | 1120 | assert(hasFP(MF) && "Re-aligned stack must have frame pointer"); | 
|  | 1121 | UseFP = true; | 
| Francis Visoiu Mistrih | f2c2205 | 2018-04-10 11:29:40 +0000 | [diff] [blame] | 1122 | } else if (hasFP(MF) && !RegInfo->needsStackRealignment(MF)) { | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1123 | // If the FPOffset is negative, we have to keep in mind that the | 
|  | 1124 | // available offset range for negative offsets is smaller than for | 
| Francis Visoiu Mistrih | f2c2205 | 2018-04-10 11:29:40 +0000 | [diff] [blame] | 1125 | // positive ones. If an offset is | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1126 | // available via the FP and the SP, use whichever is closest. | 
| Francis Visoiu Mistrih | f2c2205 | 2018-04-10 11:29:40 +0000 | [diff] [blame] | 1127 | bool FPOffsetFits = FPOffset >= -256; | 
|  | 1128 | PreferFP |= Offset > -FPOffset; | 
|  | 1129 |  | 
|  | 1130 | if (MFI.hasVarSizedObjects()) { | 
|  | 1131 | // If we have variable sized objects, we can use either FP or BP, as the | 
|  | 1132 | // SP offset is unknown. We can use the base pointer if we have one and | 
|  | 1133 | // FP is not preferred. If not, we're stuck with using FP. | 
|  | 1134 | bool CanUseBP = RegInfo->hasBasePointer(MF); | 
|  | 1135 | if (FPOffsetFits && CanUseBP) // Both are ok. Pick the best. | 
|  | 1136 | UseFP = PreferFP; | 
|  | 1137 | else if (!CanUseBP) // Can't use BP. Forced to use FP. | 
|  | 1138 | UseFP = true; | 
|  | 1139 | // else we can use BP and FP, but the offset from FP won't fit. | 
|  | 1140 | // That will make us scavenge registers which we can probably avoid by | 
|  | 1141 | // using BP. If it won't fit for BP either, we'll scavenge anyway. | 
| Francis Visoiu Mistrih | 6463922 | 2018-04-11 12:36:55 +0000 | [diff] [blame] | 1142 | } else if (FPOffset >= 0) { | 
| Francis Visoiu Mistrih | f2c2205 | 2018-04-10 11:29:40 +0000 | [diff] [blame] | 1143 | // Use SP or FP, whichever gives us the best chance of the offset | 
|  | 1144 | // being in range for direct access. If the FPOffset is positive, | 
|  | 1145 | // that'll always be best, as the SP will be even further away. | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1146 | UseFP = true; | 
| Francis Visoiu Mistrih | f2c2205 | 2018-04-10 11:29:40 +0000 | [diff] [blame] | 1147 | } else { | 
|  | 1148 | // We have the choice between FP and (SP or BP). | 
|  | 1149 | if (FPOffsetFits && PreferFP) // If FP is the best fit, use it. | 
|  | 1150 | UseFP = true; | 
|  | 1151 | } | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1152 | } | 
|  | 1153 | } | 
|  | 1154 |  | 
| Geoff Berry | 08ab8c95 | 2018-04-26 18:50:45 +0000 | [diff] [blame] | 1155 | assert(((isFixed || isCSR) || !RegInfo->needsStackRealignment(MF) || !UseFP) && | 
| Kristof Beyls | 17cb898 | 2015-04-09 08:49:47 +0000 | [diff] [blame] | 1156 | "In the presence of dynamic stack pointer realignment, " | 
| Geoff Berry | 08ab8c95 | 2018-04-26 18:50:45 +0000 | [diff] [blame] | 1157 | "non-argument/CSR objects cannot be accessed through the frame pointer"); | 
| Kristof Beyls | 17cb898 | 2015-04-09 08:49:47 +0000 | [diff] [blame] | 1158 |  | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1159 | if (UseFP) { | 
|  | 1160 | FrameReg = RegInfo->getFrameRegister(MF); | 
|  | 1161 | return FPOffset; | 
|  | 1162 | } | 
|  | 1163 |  | 
|  | 1164 | // Use the base pointer if we have one. | 
|  | 1165 | if (RegInfo->hasBasePointer(MF)) | 
|  | 1166 | FrameReg = RegInfo->getBaseRegister(); | 
|  | 1167 | else { | 
| Francis Visoiu Mistrih | f2c2205 | 2018-04-10 11:29:40 +0000 | [diff] [blame] | 1168 | assert(!MFI.hasVarSizedObjects() && | 
|  | 1169 | "Can't use SP when we have var sized objects."); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1170 | FrameReg = AArch64::SP; | 
|  | 1171 | // If we're using the red zone for this function, the SP won't actually | 
|  | 1172 | // be adjusted, so the offsets will be negative. They're also all | 
|  | 1173 | // within range of the signed 9-bit immediate instructions. | 
|  | 1174 | if (canUseRedZone(MF)) | 
|  | 1175 | Offset -= AFI->getLocalStackSize(); | 
|  | 1176 | } | 
|  | 1177 |  | 
|  | 1178 | return Offset; | 
|  | 1179 | } | 
|  | 1180 |  | 
|  | 1181 | static unsigned getPrologueDeath(MachineFunction &MF, unsigned Reg) { | 
| Matthias Braun | 74a0bd3 | 2016-04-13 21:43:16 +0000 | [diff] [blame] | 1182 | // Do not set a kill flag on values that are also marked as live-in. This | 
|  | 1183 | // happens with the @llvm-returnaddress intrinsic and with arguments passed in | 
|  | 1184 | // callee saved registers. | 
|  | 1185 | // Omitting the kill flags is conservatively correct even if the live-in | 
|  | 1186 | // is not used after all. | 
|  | 1187 | bool IsLiveIn = MF.getRegInfo().isLiveIn(Reg); | 
|  | 1188 | return getKillRegState(!IsLiveIn); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1189 | } | 
|  | 1190 |  | 
| Manman Ren | 5751814 | 2016-04-11 21:08:06 +0000 | [diff] [blame] | 1191 | static bool produceCompactUnwindFrame(MachineFunction &MF) { | 
|  | 1192 | const AArch64Subtarget &Subtarget = MF.getSubtarget<AArch64Subtarget>(); | 
| Matthias Braun | f1caa28 | 2017-12-15 22:22:58 +0000 | [diff] [blame] | 1193 | AttributeList Attrs = MF.getFunction().getAttributes(); | 
| Manman Ren | 5751814 | 2016-04-11 21:08:06 +0000 | [diff] [blame] | 1194 | return Subtarget.isTargetMachO() && | 
|  | 1195 | !(Subtarget.getTargetLowering()->supportSwiftError() && | 
|  | 1196 | Attrs.hasAttrSomewhere(Attribute::SwiftError)); | 
|  | 1197 | } | 
|  | 1198 |  | 
| Benjamin Kramer | b7d3311 | 2016-08-06 11:13:10 +0000 | [diff] [blame] | 1199 | namespace { | 
| Eugene Zelenko | 11f6907 | 2017-01-25 00:29:26 +0000 | [diff] [blame] | 1200 |  | 
| Geoff Berry | 29d4a69 | 2016-02-01 19:07:06 +0000 | [diff] [blame] | 1201 | struct RegPairInfo { | 
| Eugene Zelenko | 11f6907 | 2017-01-25 00:29:26 +0000 | [diff] [blame] | 1202 | unsigned Reg1 = AArch64::NoRegister; | 
|  | 1203 | unsigned Reg2 = AArch64::NoRegister; | 
| Geoff Berry | 29d4a69 | 2016-02-01 19:07:06 +0000 | [diff] [blame] | 1204 | int FrameIdx; | 
|  | 1205 | int Offset; | 
|  | 1206 | bool IsGPR; | 
| Eugene Zelenko | 11f6907 | 2017-01-25 00:29:26 +0000 | [diff] [blame] | 1207 |  | 
|  | 1208 | RegPairInfo() = default; | 
|  | 1209 |  | 
| Geoff Berry | c25d3bd | 2016-02-12 16:31:41 +0000 | [diff] [blame] | 1210 | bool isPaired() const { return Reg2 != AArch64::NoRegister; } | 
| Geoff Berry | 29d4a69 | 2016-02-01 19:07:06 +0000 | [diff] [blame] | 1211 | }; | 
| Eugene Zelenko | 11f6907 | 2017-01-25 00:29:26 +0000 | [diff] [blame] | 1212 |  | 
| Benjamin Kramer | b7d3311 | 2016-08-06 11:13:10 +0000 | [diff] [blame] | 1213 | } // end anonymous namespace | 
| Geoff Berry | 29d4a69 | 2016-02-01 19:07:06 +0000 | [diff] [blame] | 1214 |  | 
| Geoff Berry | c25d3bd | 2016-02-12 16:31:41 +0000 | [diff] [blame] | 1215 | static void computeCalleeSaveRegisterPairs( | 
|  | 1216 | MachineFunction &MF, const std::vector<CalleeSavedInfo> &CSI, | 
| Peter Collingbourne | f11eb3e | 2018-04-04 21:55:44 +0000 | [diff] [blame] | 1217 | const TargetRegisterInfo *TRI, SmallVectorImpl<RegPairInfo> &RegPairs, | 
|  | 1218 | bool &NeedShadowCallStackProlog) { | 
| Geoff Berry | 29d4a69 | 2016-02-01 19:07:06 +0000 | [diff] [blame] | 1219 |  | 
| Geoff Berry | c25d3bd | 2016-02-12 16:31:41 +0000 | [diff] [blame] | 1220 | if (CSI.empty()) | 
|  | 1221 | return; | 
|  | 1222 |  | 
|  | 1223 | AArch64FunctionInfo *AFI = MF.getInfo<AArch64FunctionInfo>(); | 
| Matthias Braun | 941a705 | 2016-07-28 18:40:00 +0000 | [diff] [blame] | 1224 | MachineFrameInfo &MFI = MF.getFrameInfo(); | 
| Matthias Braun | f1caa28 | 2017-12-15 22:22:58 +0000 | [diff] [blame] | 1225 | CallingConv::ID CC = MF.getFunction().getCallingConv(); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1226 | unsigned Count = CSI.size(); | 
| Roman Levenstein | 2792b3f | 2016-03-10 04:35:09 +0000 | [diff] [blame] | 1227 | (void)CC; | 
| Geoff Berry | c25d3bd | 2016-02-12 16:31:41 +0000 | [diff] [blame] | 1228 | // MachO's compact unwind format relies on all registers being stored in | 
|  | 1229 | // pairs. | 
| Manman Ren | 5751814 | 2016-04-11 21:08:06 +0000 | [diff] [blame] | 1230 | assert((!produceCompactUnwindFrame(MF) || | 
| Roman Levenstein | 2792b3f | 2016-03-10 04:35:09 +0000 | [diff] [blame] | 1231 | CC == CallingConv::PreserveMost || | 
| Geoff Berry | c25d3bd | 2016-02-12 16:31:41 +0000 | [diff] [blame] | 1232 | (Count & 1) == 0) && | 
|  | 1233 | "Odd number of callee-saved regs to spill!"); | 
| Martin Storsjo | 68266fa | 2017-07-13 17:03:12 +0000 | [diff] [blame] | 1234 | int Offset = AFI->getCalleeSavedStackSize(); | 
|  | 1235 |  | 
| Geoff Berry | c25d3bd | 2016-02-12 16:31:41 +0000 | [diff] [blame] | 1236 | for (unsigned i = 0; i < Count; ++i) { | 
| Geoff Berry | 29d4a69 | 2016-02-01 19:07:06 +0000 | [diff] [blame] | 1237 | RegPairInfo RPI; | 
| Geoff Berry | c25d3bd | 2016-02-12 16:31:41 +0000 | [diff] [blame] | 1238 | RPI.Reg1 = CSI[i].getReg(); | 
|  | 1239 |  | 
|  | 1240 | assert(AArch64::GPR64RegClass.contains(RPI.Reg1) || | 
|  | 1241 | AArch64::FPR64RegClass.contains(RPI.Reg1)); | 
|  | 1242 | RPI.IsGPR = AArch64::GPR64RegClass.contains(RPI.Reg1); | 
|  | 1243 |  | 
|  | 1244 | // Add the next reg to the pair if it is in the same register class. | 
|  | 1245 | if (i + 1 < Count) { | 
|  | 1246 | unsigned NextReg = CSI[i + 1].getReg(); | 
|  | 1247 | if ((RPI.IsGPR && AArch64::GPR64RegClass.contains(NextReg)) || | 
|  | 1248 | (!RPI.IsGPR && AArch64::FPR64RegClass.contains(NextReg))) | 
|  | 1249 | RPI.Reg2 = NextReg; | 
|  | 1250 | } | 
| Geoff Berry | 29d4a69 | 2016-02-01 19:07:06 +0000 | [diff] [blame] | 1251 |  | 
| Peter Collingbourne | f11eb3e | 2018-04-04 21:55:44 +0000 | [diff] [blame] | 1252 | // If either of the registers to be saved is the lr register, it means that | 
|  | 1253 | // we also need to save lr in the shadow call stack. | 
|  | 1254 | if ((RPI.Reg1 == AArch64::LR || RPI.Reg2 == AArch64::LR) && | 
|  | 1255 | MF.getFunction().hasFnAttribute(Attribute::ShadowCallStack)) { | 
| Nick Desaulniers | 287a3be | 2018-09-07 20:58:57 +0000 | [diff] [blame^] | 1256 | if (!MF.getSubtarget<AArch64Subtarget>().isXRegisterReserved(18)) | 
| Peter Collingbourne | f11eb3e | 2018-04-04 21:55:44 +0000 | [diff] [blame] | 1257 | report_fatal_error("Must reserve x18 to use shadow call stack"); | 
|  | 1258 | NeedShadowCallStackProlog = true; | 
|  | 1259 | } | 
|  | 1260 |  | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1261 | // GPRs and FPRs are saved in pairs of 64-bit regs. We expect the CSI | 
|  | 1262 | // list to come in sorted by frame index so that we can issue the store | 
|  | 1263 | // pair instructions directly. Assert if we see anything otherwise. | 
|  | 1264 | // | 
|  | 1265 | // The order of the registers in the list is controlled by | 
|  | 1266 | // getCalleeSavedRegs(), so they will always be in-order, as well. | 
| Geoff Berry | c25d3bd | 2016-02-12 16:31:41 +0000 | [diff] [blame] | 1267 | assert((!RPI.isPaired() || | 
|  | 1268 | (CSI[i].getFrameIdx() + 1 == CSI[i + 1].getFrameIdx())) && | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1269 | "Out of order callee saved regs!"); | 
| Geoff Berry | 29d4a69 | 2016-02-01 19:07:06 +0000 | [diff] [blame] | 1270 |  | 
| Geoff Berry | c25d3bd | 2016-02-12 16:31:41 +0000 | [diff] [blame] | 1271 | // MachO's compact unwind format relies on all registers being stored in | 
|  | 1272 | // adjacent register pairs. | 
| Manman Ren | 5751814 | 2016-04-11 21:08:06 +0000 | [diff] [blame] | 1273 | assert((!produceCompactUnwindFrame(MF) || | 
| Roman Levenstein | 2792b3f | 2016-03-10 04:35:09 +0000 | [diff] [blame] | 1274 | CC == CallingConv::PreserveMost || | 
| Geoff Berry | c25d3bd | 2016-02-12 16:31:41 +0000 | [diff] [blame] | 1275 | (RPI.isPaired() && | 
|  | 1276 | ((RPI.Reg1 == AArch64::LR && RPI.Reg2 == AArch64::FP) || | 
|  | 1277 | RPI.Reg1 + 1 == RPI.Reg2))) && | 
|  | 1278 | "Callee-save registers not saved as adjacent register pair!"); | 
|  | 1279 |  | 
|  | 1280 | RPI.FrameIdx = CSI[i].getFrameIdx(); | 
|  | 1281 |  | 
|  | 1282 | if (Count * 8 != AFI->getCalleeSavedStackSize() && !RPI.isPaired()) { | 
|  | 1283 | // Round up size of non-pair to pair size if we need to pad the | 
|  | 1284 | // callee-save area to ensure 16-byte alignment. | 
|  | 1285 | Offset -= 16; | 
| Matthias Braun | 941a705 | 2016-07-28 18:40:00 +0000 | [diff] [blame] | 1286 | assert(MFI.getObjectAlignment(RPI.FrameIdx) <= 16); | 
|  | 1287 | MFI.setObjectAlignment(RPI.FrameIdx, 16); | 
| Geoff Berry | 66f6b65 | 2016-06-02 16:22:07 +0000 | [diff] [blame] | 1288 | AFI->setCalleeSaveStackHasFreeSpace(true); | 
| Geoff Berry | c25d3bd | 2016-02-12 16:31:41 +0000 | [diff] [blame] | 1289 | } else | 
|  | 1290 | Offset -= RPI.isPaired() ? 16 : 8; | 
|  | 1291 | assert(Offset % 8 == 0); | 
|  | 1292 | RPI.Offset = Offset / 8; | 
| Geoff Berry | 29d4a69 | 2016-02-01 19:07:06 +0000 | [diff] [blame] | 1293 | assert((RPI.Offset >= -64 && RPI.Offset <= 63) && | 
|  | 1294 | "Offset out of bounds for LDP/STP immediate"); | 
|  | 1295 |  | 
|  | 1296 | RegPairs.push_back(RPI); | 
| Geoff Berry | c25d3bd | 2016-02-12 16:31:41 +0000 | [diff] [blame] | 1297 | if (RPI.isPaired()) | 
|  | 1298 | ++i; | 
| Geoff Berry | 29d4a69 | 2016-02-01 19:07:06 +0000 | [diff] [blame] | 1299 | } | 
|  | 1300 | } | 
|  | 1301 |  | 
|  | 1302 | bool AArch64FrameLowering::spillCalleeSavedRegisters( | 
|  | 1303 | MachineBasicBlock &MBB, MachineBasicBlock::iterator MI, | 
|  | 1304 | const std::vector<CalleeSavedInfo> &CSI, | 
|  | 1305 | const TargetRegisterInfo *TRI) const { | 
|  | 1306 | MachineFunction &MF = *MBB.getParent(); | 
|  | 1307 | const TargetInstrInfo &TII = *MF.getSubtarget().getInstrInfo(); | 
|  | 1308 | DebugLoc DL; | 
|  | 1309 | SmallVector<RegPairInfo, 8> RegPairs; | 
|  | 1310 |  | 
| Peter Collingbourne | f11eb3e | 2018-04-04 21:55:44 +0000 | [diff] [blame] | 1311 | bool NeedShadowCallStackProlog = false; | 
|  | 1312 | computeCalleeSaveRegisterPairs(MF, CSI, TRI, RegPairs, | 
|  | 1313 | NeedShadowCallStackProlog); | 
| Matthias Braun | 88c8c98 | 2017-05-27 03:38:02 +0000 | [diff] [blame] | 1314 | const MachineRegisterInfo &MRI = MF.getRegInfo(); | 
| Geoff Berry | 29d4a69 | 2016-02-01 19:07:06 +0000 | [diff] [blame] | 1315 |  | 
| Peter Collingbourne | f11eb3e | 2018-04-04 21:55:44 +0000 | [diff] [blame] | 1316 | if (NeedShadowCallStackProlog) { | 
|  | 1317 | // Shadow call stack prolog: str x30, [x18], #8 | 
|  | 1318 | BuildMI(MBB, MI, DL, TII.get(AArch64::STRXpost)) | 
|  | 1319 | .addReg(AArch64::X18, RegState::Define) | 
|  | 1320 | .addReg(AArch64::LR) | 
|  | 1321 | .addReg(AArch64::X18) | 
|  | 1322 | .addImm(8) | 
|  | 1323 | .setMIFlag(MachineInstr::FrameSetup); | 
|  | 1324 |  | 
|  | 1325 | // This instruction also makes x18 live-in to the entry block. | 
|  | 1326 | MBB.addLiveIn(AArch64::X18); | 
|  | 1327 | } | 
|  | 1328 |  | 
| Geoff Berry | c25d3bd | 2016-02-12 16:31:41 +0000 | [diff] [blame] | 1329 | for (auto RPII = RegPairs.rbegin(), RPIE = RegPairs.rend(); RPII != RPIE; | 
| Geoff Berry | 29d4a69 | 2016-02-01 19:07:06 +0000 | [diff] [blame] | 1330 | ++RPII) { | 
|  | 1331 | RegPairInfo RPI = *RPII; | 
|  | 1332 | unsigned Reg1 = RPI.Reg1; | 
|  | 1333 | unsigned Reg2 = RPI.Reg2; | 
|  | 1334 | unsigned StrOpc; | 
|  | 1335 |  | 
| Geoff Berry | a533564 | 2016-05-06 16:34:59 +0000 | [diff] [blame] | 1336 | // Issue sequence of spills for cs regs.  The first spill may be converted | 
|  | 1337 | // to a pre-decrement store later by emitPrologue if the callee-save stack | 
|  | 1338 | // area allocation can't be combined with the local stack area allocation. | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1339 | // For example: | 
| Geoff Berry | a533564 | 2016-05-06 16:34:59 +0000 | [diff] [blame] | 1340 | //    stp     x22, x21, [sp, #0]     // addImm(+0) | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1341 | //    stp     x20, x19, [sp, #16]    // addImm(+2) | 
|  | 1342 | //    stp     fp, lr, [sp, #32]      // addImm(+4) | 
|  | 1343 | // Rationale: This sequence saves uop updates compared to a sequence of | 
|  | 1344 | // pre-increment spills like stp xi,xj,[sp,#-16]! | 
| Geoff Berry | 29d4a69 | 2016-02-01 19:07:06 +0000 | [diff] [blame] | 1345 | // Note: Similar rationale and sequence for restores in epilog. | 
| Geoff Berry | a533564 | 2016-05-06 16:34:59 +0000 | [diff] [blame] | 1346 | if (RPI.IsGPR) | 
|  | 1347 | StrOpc = RPI.isPaired() ? AArch64::STPXi : AArch64::STRXui; | 
|  | 1348 | else | 
|  | 1349 | StrOpc = RPI.isPaired() ? AArch64::STPDi : AArch64::STRDui; | 
| Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1350 | LLVM_DEBUG(dbgs() << "CSR spill: (" << printReg(Reg1, TRI); | 
|  | 1351 | if (RPI.isPaired()) dbgs() << ", " << printReg(Reg2, TRI); | 
|  | 1352 | dbgs() << ") -> fi#(" << RPI.FrameIdx; | 
|  | 1353 | if (RPI.isPaired()) dbgs() << ", " << RPI.FrameIdx + 1; | 
|  | 1354 | dbgs() << ")\n"); | 
| Geoff Berry | 29d4a69 | 2016-02-01 19:07:06 +0000 | [diff] [blame] | 1355 |  | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1356 | MachineInstrBuilder MIB = BuildMI(MBB, MI, DL, TII.get(StrOpc)); | 
| Matthias Braun | 88c8c98 | 2017-05-27 03:38:02 +0000 | [diff] [blame] | 1357 | if (!MRI.isReserved(Reg1)) | 
|  | 1358 | MBB.addLiveIn(Reg1); | 
| Geoff Berry | c25d3bd | 2016-02-12 16:31:41 +0000 | [diff] [blame] | 1359 | if (RPI.isPaired()) { | 
| Matthias Braun | 88c8c98 | 2017-05-27 03:38:02 +0000 | [diff] [blame] | 1360 | if (!MRI.isReserved(Reg2)) | 
|  | 1361 | MBB.addLiveIn(Reg2); | 
| Geoff Berry | a533564 | 2016-05-06 16:34:59 +0000 | [diff] [blame] | 1362 | MIB.addReg(Reg2, getPrologueDeath(MF, Reg2)); | 
| Geoff Berry | c376406 | 2016-04-15 15:16:19 +0000 | [diff] [blame] | 1363 | MIB.addMemOperand(MF.getMachineMemOperand( | 
|  | 1364 | MachinePointerInfo::getFixedStack(MF, RPI.FrameIdx + 1), | 
|  | 1365 | MachineMemOperand::MOStore, 8, 8)); | 
| Geoff Berry | c25d3bd | 2016-02-12 16:31:41 +0000 | [diff] [blame] | 1366 | } | 
| Geoff Berry | a533564 | 2016-05-06 16:34:59 +0000 | [diff] [blame] | 1367 | MIB.addReg(Reg1, getPrologueDeath(MF, Reg1)) | 
|  | 1368 | .addReg(AArch64::SP) | 
|  | 1369 | .addImm(RPI.Offset) // [sp, #offset*8], where factor*8 is implicit | 
|  | 1370 | .setMIFlag(MachineInstr::FrameSetup); | 
| Geoff Berry | c376406 | 2016-04-15 15:16:19 +0000 | [diff] [blame] | 1371 | MIB.addMemOperand(MF.getMachineMemOperand( | 
|  | 1372 | MachinePointerInfo::getFixedStack(MF, RPI.FrameIdx), | 
|  | 1373 | MachineMemOperand::MOStore, 8, 8)); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1374 | } | 
|  | 1375 | return true; | 
|  | 1376 | } | 
|  | 1377 |  | 
|  | 1378 | bool AArch64FrameLowering::restoreCalleeSavedRegisters( | 
|  | 1379 | MachineBasicBlock &MBB, MachineBasicBlock::iterator MI, | 
| Krzysztof Parzyszek | bea30c6 | 2017-08-10 16:17:32 +0000 | [diff] [blame] | 1380 | std::vector<CalleeSavedInfo> &CSI, | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1381 | const TargetRegisterInfo *TRI) const { | 
|  | 1382 | MachineFunction &MF = *MBB.getParent(); | 
| Eric Christopher | fc6de42 | 2014-08-05 02:39:49 +0000 | [diff] [blame] | 1383 | const TargetInstrInfo &TII = *MF.getSubtarget().getInstrInfo(); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1384 | DebugLoc DL; | 
| Geoff Berry | 29d4a69 | 2016-02-01 19:07:06 +0000 | [diff] [blame] | 1385 | SmallVector<RegPairInfo, 8> RegPairs; | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1386 |  | 
|  | 1387 | if (MI != MBB.end()) | 
|  | 1388 | DL = MI->getDebugLoc(); | 
|  | 1389 |  | 
| Peter Collingbourne | f11eb3e | 2018-04-04 21:55:44 +0000 | [diff] [blame] | 1390 | bool NeedShadowCallStackProlog = false; | 
|  | 1391 | computeCalleeSaveRegisterPairs(MF, CSI, TRI, RegPairs, | 
|  | 1392 | NeedShadowCallStackProlog); | 
| Geoff Berry | 29d4a69 | 2016-02-01 19:07:06 +0000 | [diff] [blame] | 1393 |  | 
| Francis Visoiu Mistrih | 164560b | 2018-03-14 20:34:03 +0000 | [diff] [blame] | 1394 | auto EmitMI = [&](const RegPairInfo &RPI) { | 
| Geoff Berry | 29d4a69 | 2016-02-01 19:07:06 +0000 | [diff] [blame] | 1395 | unsigned Reg1 = RPI.Reg1; | 
|  | 1396 | unsigned Reg2 = RPI.Reg2; | 
|  | 1397 |  | 
| Geoff Berry | a533564 | 2016-05-06 16:34:59 +0000 | [diff] [blame] | 1398 | // Issue sequence of restores for cs regs. The last restore may be converted | 
|  | 1399 | // to a post-increment load later by emitEpilogue if the callee-save stack | 
|  | 1400 | // area allocation can't be combined with the local stack area allocation. | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1401 | // For example: | 
|  | 1402 | //    ldp     fp, lr, [sp, #32]       // addImm(+4) | 
|  | 1403 | //    ldp     x20, x19, [sp, #16]     // addImm(+2) | 
| Geoff Berry | a533564 | 2016-05-06 16:34:59 +0000 | [diff] [blame] | 1404 | //    ldp     x22, x21, [sp, #0]      // addImm(+0) | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1405 | // Note: see comment in spillCalleeSavedRegisters() | 
|  | 1406 | unsigned LdrOpc; | 
| Geoff Berry | a533564 | 2016-05-06 16:34:59 +0000 | [diff] [blame] | 1407 | if (RPI.IsGPR) | 
|  | 1408 | LdrOpc = RPI.isPaired() ? AArch64::LDPXi : AArch64::LDRXui; | 
|  | 1409 | else | 
|  | 1410 | LdrOpc = RPI.isPaired() ? AArch64::LDPDi : AArch64::LDRDui; | 
| Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1411 | LLVM_DEBUG(dbgs() << "CSR restore: (" << printReg(Reg1, TRI); | 
|  | 1412 | if (RPI.isPaired()) dbgs() << ", " << printReg(Reg2, TRI); | 
|  | 1413 | dbgs() << ") -> fi#(" << RPI.FrameIdx; | 
|  | 1414 | if (RPI.isPaired()) dbgs() << ", " << RPI.FrameIdx + 1; | 
|  | 1415 | dbgs() << ")\n"); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1416 |  | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1417 | MachineInstrBuilder MIB = BuildMI(MBB, MI, DL, TII.get(LdrOpc)); | 
| Geoff Berry | c376406 | 2016-04-15 15:16:19 +0000 | [diff] [blame] | 1418 | if (RPI.isPaired()) { | 
| Geoff Berry | a533564 | 2016-05-06 16:34:59 +0000 | [diff] [blame] | 1419 | MIB.addReg(Reg2, getDefRegState(true)); | 
| Geoff Berry | c376406 | 2016-04-15 15:16:19 +0000 | [diff] [blame] | 1420 | MIB.addMemOperand(MF.getMachineMemOperand( | 
|  | 1421 | MachinePointerInfo::getFixedStack(MF, RPI.FrameIdx + 1), | 
|  | 1422 | MachineMemOperand::MOLoad, 8, 8)); | 
| Geoff Berry | c376406 | 2016-04-15 15:16:19 +0000 | [diff] [blame] | 1423 | } | 
| Geoff Berry | a533564 | 2016-05-06 16:34:59 +0000 | [diff] [blame] | 1424 | MIB.addReg(Reg1, getDefRegState(true)) | 
|  | 1425 | .addReg(AArch64::SP) | 
|  | 1426 | .addImm(RPI.Offset) // [sp, #offset*8] where the factor*8 is implicit | 
|  | 1427 | .setMIFlag(MachineInstr::FrameDestroy); | 
| Geoff Berry | c376406 | 2016-04-15 15:16:19 +0000 | [diff] [blame] | 1428 | MIB.addMemOperand(MF.getMachineMemOperand( | 
|  | 1429 | MachinePointerInfo::getFixedStack(MF, RPI.FrameIdx), | 
|  | 1430 | MachineMemOperand::MOLoad, 8, 8)); | 
| Francis Visoiu Mistrih | 164560b | 2018-03-14 20:34:03 +0000 | [diff] [blame] | 1431 | }; | 
|  | 1432 |  | 
|  | 1433 | if (ReverseCSRRestoreSeq) | 
|  | 1434 | for (const RegPairInfo &RPI : reverse(RegPairs)) | 
|  | 1435 | EmitMI(RPI); | 
|  | 1436 | else | 
|  | 1437 | for (const RegPairInfo &RPI : RegPairs) | 
|  | 1438 | EmitMI(RPI); | 
| Peter Collingbourne | f11eb3e | 2018-04-04 21:55:44 +0000 | [diff] [blame] | 1439 |  | 
|  | 1440 | if (NeedShadowCallStackProlog) { | 
|  | 1441 | // Shadow call stack epilog: ldr x30, [x18, #-8]! | 
|  | 1442 | BuildMI(MBB, MI, DL, TII.get(AArch64::LDRXpre)) | 
|  | 1443 | .addReg(AArch64::X18, RegState::Define) | 
|  | 1444 | .addReg(AArch64::LR, RegState::Define) | 
|  | 1445 | .addReg(AArch64::X18) | 
|  | 1446 | .addImm(-8) | 
|  | 1447 | .setMIFlag(MachineInstr::FrameDestroy); | 
|  | 1448 | } | 
|  | 1449 |  | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1450 | return true; | 
|  | 1451 | } | 
|  | 1452 |  | 
| Matthias Braun | 0256486 | 2015-07-14 17:17:13 +0000 | [diff] [blame] | 1453 | void AArch64FrameLowering::determineCalleeSaves(MachineFunction &MF, | 
|  | 1454 | BitVector &SavedRegs, | 
|  | 1455 | RegScavenger *RS) const { | 
|  | 1456 | // All calls are tail calls in GHC calling conv, and functions have no | 
|  | 1457 | // prologue/epilogue. | 
| Matthias Braun | f1caa28 | 2017-12-15 22:22:58 +0000 | [diff] [blame] | 1458 | if (MF.getFunction().getCallingConv() == CallingConv::GHC) | 
| Matthias Braun | 0256486 | 2015-07-14 17:17:13 +0000 | [diff] [blame] | 1459 | return; | 
|  | 1460 |  | 
|  | 1461 | TargetFrameLowering::determineCalleeSaves(MF, SavedRegs, RS); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1462 | const AArch64RegisterInfo *RegInfo = static_cast<const AArch64RegisterInfo *>( | 
| Eric Christopher | fc6de42 | 2014-08-05 02:39:49 +0000 | [diff] [blame] | 1463 | MF.getSubtarget().getRegisterInfo()); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1464 | AArch64FunctionInfo *AFI = MF.getInfo<AArch64FunctionInfo>(); | 
| Geoff Berry | c25d3bd | 2016-02-12 16:31:41 +0000 | [diff] [blame] | 1465 | unsigned UnspilledCSGPR = AArch64::NoRegister; | 
|  | 1466 | unsigned UnspilledCSGPRPaired = AArch64::NoRegister; | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1467 |  | 
| Martin Storsjo | 2778fd0 | 2017-12-20 06:51:45 +0000 | [diff] [blame] | 1468 | MachineFrameInfo &MFI = MF.getFrameInfo(); | 
|  | 1469 | const MCPhysReg *CSRegs = RegInfo->getCalleeSavedRegs(&MF); | 
|  | 1470 |  | 
|  | 1471 | unsigned BasePointerReg = RegInfo->hasBasePointer(MF) | 
|  | 1472 | ? RegInfo->getBaseRegister() | 
|  | 1473 | : (unsigned)AArch64::NoRegister; | 
|  | 1474 |  | 
|  | 1475 | unsigned SpillEstimate = SavedRegs.count(); | 
|  | 1476 | for (unsigned i = 0; CSRegs[i]; ++i) { | 
|  | 1477 | unsigned Reg = CSRegs[i]; | 
|  | 1478 | unsigned PairedReg = CSRegs[i ^ 1]; | 
|  | 1479 | if (Reg == BasePointerReg) | 
|  | 1480 | SpillEstimate++; | 
|  | 1481 | if (produceCompactUnwindFrame(MF) && !SavedRegs.test(PairedReg)) | 
|  | 1482 | SpillEstimate++; | 
|  | 1483 | } | 
|  | 1484 | SpillEstimate += 2; // Conservatively include FP+LR in the estimate | 
|  | 1485 | unsigned StackEstimate = MFI.estimateStackSize(MF) + 8 * SpillEstimate; | 
|  | 1486 |  | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1487 | // The frame record needs to be created by saving the appropriate registers | 
| Martin Storsjo | 2778fd0 | 2017-12-20 06:51:45 +0000 | [diff] [blame] | 1488 | if (hasFP(MF) || windowsRequiresStackProbe(MF, StackEstimate)) { | 
| Matthias Braun | 0256486 | 2015-07-14 17:17:13 +0000 | [diff] [blame] | 1489 | SavedRegs.set(AArch64::FP); | 
|  | 1490 | SavedRegs.set(AArch64::LR); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1491 | } | 
|  | 1492 |  | 
| Matthias Braun | d78597e | 2017-04-21 22:42:08 +0000 | [diff] [blame] | 1493 | unsigned ExtraCSSpill = 0; | 
| Geoff Berry | c25d3bd | 2016-02-12 16:31:41 +0000 | [diff] [blame] | 1494 | // Figure out which callee-saved registers to save/restore. | 
|  | 1495 | for (unsigned i = 0; CSRegs[i]; ++i) { | 
|  | 1496 | const unsigned Reg = CSRegs[i]; | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1497 |  | 
| Geoff Berry | 7e4ba3d | 2016-02-19 18:27:32 +0000 | [diff] [blame] | 1498 | // Add the base pointer register to SavedRegs if it is callee-save. | 
|  | 1499 | if (Reg == BasePointerReg) | 
| Geoff Berry | c25d3bd | 2016-02-12 16:31:41 +0000 | [diff] [blame] | 1500 | SavedRegs.set(Reg); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1501 |  | 
| Geoff Berry | c25d3bd | 2016-02-12 16:31:41 +0000 | [diff] [blame] | 1502 | bool RegUsed = SavedRegs.test(Reg); | 
|  | 1503 | unsigned PairedReg = CSRegs[i ^ 1]; | 
|  | 1504 | if (!RegUsed) { | 
|  | 1505 | if (AArch64::GPR64RegClass.contains(Reg) && | 
|  | 1506 | !RegInfo->isReservedReg(MF, Reg)) { | 
|  | 1507 | UnspilledCSGPR = Reg; | 
|  | 1508 | UnspilledCSGPRPaired = PairedReg; | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1509 | } | 
|  | 1510 | continue; | 
|  | 1511 | } | 
|  | 1512 |  | 
| Geoff Berry | c25d3bd | 2016-02-12 16:31:41 +0000 | [diff] [blame] | 1513 | // MachO's compact unwind format relies on all registers being stored in | 
|  | 1514 | // pairs. | 
|  | 1515 | // FIXME: the usual format is actually better if unwinding isn't needed. | 
| Manman Ren | 5751814 | 2016-04-11 21:08:06 +0000 | [diff] [blame] | 1516 | if (produceCompactUnwindFrame(MF) && !SavedRegs.test(PairedReg)) { | 
| Geoff Berry | c25d3bd | 2016-02-12 16:31:41 +0000 | [diff] [blame] | 1517 | SavedRegs.set(PairedReg); | 
| Geoff Berry | 74cb718 | 2016-05-16 20:52:28 +0000 | [diff] [blame] | 1518 | if (AArch64::GPR64RegClass.contains(PairedReg) && | 
|  | 1519 | !RegInfo->isReservedReg(MF, PairedReg)) | 
| Matthias Braun | d78597e | 2017-04-21 22:42:08 +0000 | [diff] [blame] | 1520 | ExtraCSSpill = PairedReg; | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1521 | } | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1522 | } | 
| Geoff Berry | c25d3bd | 2016-02-12 16:31:41 +0000 | [diff] [blame] | 1523 |  | 
| Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1524 | LLVM_DEBUG(dbgs() << "*** determineCalleeSaves\nUsed CSRs:"; | 
|  | 1525 | for (unsigned Reg | 
|  | 1526 | : SavedRegs.set_bits()) dbgs() | 
|  | 1527 | << ' ' << printReg(Reg, RegInfo); | 
|  | 1528 | dbgs() << "\n";); | 
| Geoff Berry | c25d3bd | 2016-02-12 16:31:41 +0000 | [diff] [blame] | 1529 |  | 
|  | 1530 | // If any callee-saved registers are used, the frame cannot be eliminated. | 
|  | 1531 | unsigned NumRegsSpilled = SavedRegs.count(); | 
|  | 1532 | bool CanEliminateFrame = NumRegsSpilled == 0; | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1533 |  | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1534 | // The CSR spill slots have not been allocated yet, so estimateStackSize | 
|  | 1535 | // won't include them. | 
| Matthias Braun | 941a705 | 2016-07-28 18:40:00 +0000 | [diff] [blame] | 1536 | unsigned CFSize = MFI.estimateStackSize(MF) + 8 * NumRegsSpilled; | 
| Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1537 | LLVM_DEBUG(dbgs() << "Estimated stack frame size: " << CFSize << " bytes.\n"); | 
| Kristof Beyls | 2af1e90 | 2017-05-30 06:58:41 +0000 | [diff] [blame] | 1538 | unsigned EstimatedStackSizeLimit = estimateRSStackSizeLimit(MF); | 
|  | 1539 | bool BigStack = (CFSize > EstimatedStackSizeLimit); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1540 | if (BigStack || !CanEliminateFrame || RegInfo->cannotEliminateFrame(MF)) | 
|  | 1541 | AFI->setHasStackFrame(true); | 
|  | 1542 |  | 
|  | 1543 | // Estimate if we might need to scavenge a register at some point in order | 
|  | 1544 | // to materialize a stack offset. If so, either spill one additional | 
|  | 1545 | // callee-saved register or reserve a special spill slot to facilitate | 
|  | 1546 | // register scavenging. If we already spilled an extra callee-saved register | 
|  | 1547 | // above to keep the number of spills even, we don't need to do anything else | 
|  | 1548 | // here. | 
| Matthias Braun | d78597e | 2017-04-21 22:42:08 +0000 | [diff] [blame] | 1549 | if (BigStack) { | 
|  | 1550 | if (!ExtraCSSpill && UnspilledCSGPR != AArch64::NoRegister) { | 
| Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1551 | LLVM_DEBUG(dbgs() << "Spilling " << printReg(UnspilledCSGPR, RegInfo) | 
|  | 1552 | << " to get a scratch register.\n"); | 
| Geoff Berry | c25d3bd | 2016-02-12 16:31:41 +0000 | [diff] [blame] | 1553 | SavedRegs.set(UnspilledCSGPR); | 
|  | 1554 | // MachO's compact unwind format relies on all registers being stored in | 
|  | 1555 | // pairs, so if we need to spill one extra for BigStack, then we need to | 
|  | 1556 | // store the pair. | 
| Manman Ren | 5751814 | 2016-04-11 21:08:06 +0000 | [diff] [blame] | 1557 | if (produceCompactUnwindFrame(MF)) | 
| Geoff Berry | c25d3bd | 2016-02-12 16:31:41 +0000 | [diff] [blame] | 1558 | SavedRegs.set(UnspilledCSGPRPaired); | 
| Matthias Braun | d78597e | 2017-04-21 22:42:08 +0000 | [diff] [blame] | 1559 | ExtraCSSpill = UnspilledCSGPRPaired; | 
| Geoff Berry | c25d3bd | 2016-02-12 16:31:41 +0000 | [diff] [blame] | 1560 | NumRegsSpilled = SavedRegs.count(); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1561 | } | 
|  | 1562 |  | 
|  | 1563 | // If we didn't find an extra callee-saved register to spill, create | 
|  | 1564 | // an emergency spill slot. | 
| Matthias Braun | d78597e | 2017-04-21 22:42:08 +0000 | [diff] [blame] | 1565 | if (!ExtraCSSpill || MF.getRegInfo().isPhysRegUsed(ExtraCSSpill)) { | 
| Krzysztof Parzyszek | 44e25f3 | 2017-04-24 18:55:33 +0000 | [diff] [blame] | 1566 | const TargetRegisterInfo *TRI = MF.getSubtarget().getRegisterInfo(); | 
|  | 1567 | const TargetRegisterClass &RC = AArch64::GPR64RegClass; | 
|  | 1568 | unsigned Size = TRI->getSpillSize(RC); | 
|  | 1569 | unsigned Align = TRI->getSpillAlignment(RC); | 
|  | 1570 | int FI = MFI.CreateStackObject(Size, Align, false); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1571 | RS->addScavengingFrameIndex(FI); | 
| Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1572 | LLVM_DEBUG(dbgs() << "No available CS registers, allocated fi#" << FI | 
|  | 1573 | << " as the emergency spill slot.\n"); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1574 | } | 
|  | 1575 | } | 
| Geoff Berry | 04bf91a | 2016-02-01 16:29:19 +0000 | [diff] [blame] | 1576 |  | 
| Geoff Berry | c25d3bd | 2016-02-12 16:31:41 +0000 | [diff] [blame] | 1577 | // Round up to register pair alignment to avoid additional SP adjustment | 
|  | 1578 | // instructions. | 
|  | 1579 | AFI->setCalleeSavedStackSize(alignTo(8 * NumRegsSpilled, 16)); | 
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1580 | } | 
| Geoff Berry | 66f6b65 | 2016-06-02 16:22:07 +0000 | [diff] [blame] | 1581 |  | 
|  | 1582 | bool AArch64FrameLowering::enableStackSlotScavenging( | 
|  | 1583 | const MachineFunction &MF) const { | 
|  | 1584 | const AArch64FunctionInfo *AFI = MF.getInfo<AArch64FunctionInfo>(); | 
|  | 1585 | return AFI->hasCalleeSaveStackFreeSpace(); | 
|  | 1586 | } |