Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 1 | //===----- X86CallFrameOptimization.cpp - Optimize x86 call sequences -----===// |
| 2 | // |
Chandler Carruth | 2946cd7 | 2019-01-19 08:50:56 +0000 | [diff] [blame] | 3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
| 4 | // See https://llvm.org/LICENSE.txt for license information. |
| 5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 6 | // |
| 7 | //===----------------------------------------------------------------------===// |
| 8 | // |
| 9 | // This file defines a pass that optimizes call sequences on x86. |
Michael Kuperstein | 1921d3d | 2015-02-11 08:53:55 +0000 | [diff] [blame] | 10 | // Currently, it converts movs of function parameters onto the stack into |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 11 | // pushes. This is beneficial for two main reasons: |
David L Kreitzer | d5cb341 | 2016-04-19 17:43:44 +0000 | [diff] [blame] | 12 | // 1) The push instruction encoding is much smaller than a stack-ptr-based mov. |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 13 | // 2) It is possible to push memory arguments directly. So, if the |
David L Kreitzer | 99775c1 | 2016-04-12 21:45:09 +0000 | [diff] [blame] | 14 | // the transformation is performed pre-reg-alloc, it can help relieve |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 15 | // register pressure. |
| 16 | // |
| 17 | //===----------------------------------------------------------------------===// |
| 18 | |
Eugene Zelenko | fbd13c5 | 2017-02-02 22:55:55 +0000 | [diff] [blame] | 19 | #include "MCTargetDesc/X86BaseInfo.h" |
| 20 | #include "X86FrameLowering.h" |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 21 | #include "X86InstrInfo.h" |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 22 | #include "X86MachineFunctionInfo.h" |
Eugene Zelenko | fbd13c5 | 2017-02-02 22:55:55 +0000 | [diff] [blame] | 23 | #include "X86RegisterInfo.h" |
David L Kreitzer | 99775c1 | 2016-04-12 21:45:09 +0000 | [diff] [blame] | 24 | #include "X86Subtarget.h" |
Eugene Zelenko | fbd13c5 | 2017-02-02 22:55:55 +0000 | [diff] [blame] | 25 | #include "llvm/ADT/DenseSet.h" |
| 26 | #include "llvm/ADT/SmallVector.h" |
| 27 | #include "llvm/ADT/StringRef.h" |
| 28 | #include "llvm/CodeGen/MachineBasicBlock.h" |
| 29 | #include "llvm/CodeGen/MachineFrameInfo.h" |
| 30 | #include "llvm/CodeGen/MachineFunction.h" |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 31 | #include "llvm/CodeGen/MachineFunctionPass.h" |
Eugene Zelenko | fbd13c5 | 2017-02-02 22:55:55 +0000 | [diff] [blame] | 32 | #include "llvm/CodeGen/MachineInstr.h" |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 33 | #include "llvm/CodeGen/MachineInstrBuilder.h" |
Eugene Zelenko | fbd13c5 | 2017-02-02 22:55:55 +0000 | [diff] [blame] | 34 | #include "llvm/CodeGen/MachineOperand.h" |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 35 | #include "llvm/CodeGen/MachineRegisterInfo.h" |
David Blaikie | 3f833ed | 2017-11-08 01:01:31 +0000 | [diff] [blame] | 36 | #include "llvm/CodeGen/TargetInstrInfo.h" |
David Blaikie | b3bde2e | 2017-11-17 01:07:10 +0000 | [diff] [blame] | 37 | #include "llvm/CodeGen/TargetRegisterInfo.h" |
Eugene Zelenko | fbd13c5 | 2017-02-02 22:55:55 +0000 | [diff] [blame] | 38 | #include "llvm/IR/DebugLoc.h" |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 39 | #include "llvm/IR/Function.h" |
Eugene Zelenko | fbd13c5 | 2017-02-02 22:55:55 +0000 | [diff] [blame] | 40 | #include "llvm/MC/MCDwarf.h" |
| 41 | #include "llvm/Support/CommandLine.h" |
| 42 | #include "llvm/Support/ErrorHandling.h" |
| 43 | #include "llvm/Support/MathExtras.h" |
Eugene Zelenko | fbd13c5 | 2017-02-02 22:55:55 +0000 | [diff] [blame] | 44 | #include <cassert> |
| 45 | #include <cstddef> |
| 46 | #include <cstdint> |
| 47 | #include <iterator> |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 48 | |
| 49 | using namespace llvm; |
| 50 | |
| 51 | #define DEBUG_TYPE "x86-cf-opt" |
| 52 | |
Benjamin Kramer | 970eac4 | 2015-02-06 17:51:54 +0000 | [diff] [blame] | 53 | static cl::opt<bool> |
| 54 | NoX86CFOpt("no-x86-call-frame-opt", |
| 55 | cl::desc("Avoid optimizing x86 call frames for size"), |
| 56 | cl::init(false), cl::Hidden); |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 57 | |
| 58 | namespace { |
Eugene Zelenko | fbd13c5 | 2017-02-02 22:55:55 +0000 | [diff] [blame] | 59 | |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 60 | class X86CallFrameOptimization : public MachineFunctionPass { |
| 61 | public: |
Tom Stellard | f335672 | 2019-06-13 02:09:32 +0000 | [diff] [blame^] | 62 | X86CallFrameOptimization() : MachineFunctionPass(ID) { } |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 63 | |
| 64 | bool runOnMachineFunction(MachineFunction &MF) override; |
| 65 | |
Zvi Rackover | c6d0b6c | 2017-10-24 05:47:07 +0000 | [diff] [blame] | 66 | static char ID; |
| 67 | |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 68 | private: |
Michael Kuperstein | 1921d3d | 2015-02-11 08:53:55 +0000 | [diff] [blame] | 69 | // Information we know about a particular call site |
| 70 | struct CallContext { |
Zvi Rackover | bf31bf7 | 2017-10-24 13:24:26 +0000 | [diff] [blame] | 71 | CallContext() : FrameSetup(nullptr), ArgStoreVector(4, nullptr) {} |
Michael Kuperstein | 1921d3d | 2015-02-11 08:53:55 +0000 | [diff] [blame] | 72 | |
Andrew Kaylor | e2ea93c | 2015-09-08 18:18:46 +0000 | [diff] [blame] | 73 | // Iterator referring to the frame setup instruction |
| 74 | MachineBasicBlock::iterator FrameSetup; |
| 75 | |
| 76 | // Actual call instruction |
Eugene Zelenko | fbd13c5 | 2017-02-02 22:55:55 +0000 | [diff] [blame] | 77 | MachineInstr *Call = nullptr; |
Michael Kuperstein | 1921d3d | 2015-02-11 08:53:55 +0000 | [diff] [blame] | 78 | |
| 79 | // A copy of the stack pointer |
Eugene Zelenko | fbd13c5 | 2017-02-02 22:55:55 +0000 | [diff] [blame] | 80 | MachineInstr *SPCopy = nullptr; |
Michael Kuperstein | 1921d3d | 2015-02-11 08:53:55 +0000 | [diff] [blame] | 81 | |
| 82 | // The total displacement of all passed parameters |
Eugene Zelenko | fbd13c5 | 2017-02-02 22:55:55 +0000 | [diff] [blame] | 83 | int64_t ExpectedDist = 0; |
Michael Kuperstein | 1921d3d | 2015-02-11 08:53:55 +0000 | [diff] [blame] | 84 | |
Zvi Rackover | bf31bf7 | 2017-10-24 13:24:26 +0000 | [diff] [blame] | 85 | // The sequence of storing instructions used to pass the parameters |
| 86 | SmallVector<MachineInstr *, 4> ArgStoreVector; |
Michael Kuperstein | 1921d3d | 2015-02-11 08:53:55 +0000 | [diff] [blame] | 87 | |
Michael Kuperstein | db95d04 | 2015-02-12 08:36:35 +0000 | [diff] [blame] | 88 | // True if this call site has no stack parameters |
Eugene Zelenko | fbd13c5 | 2017-02-02 22:55:55 +0000 | [diff] [blame] | 89 | bool NoStackParams = false; |
Michael Kuperstein | db95d04 | 2015-02-12 08:36:35 +0000 | [diff] [blame] | 90 | |
David L Kreitzer | 99775c1 | 2016-04-12 21:45:09 +0000 | [diff] [blame] | 91 | // True if this call site can use push instructions |
Eugene Zelenko | fbd13c5 | 2017-02-02 22:55:55 +0000 | [diff] [blame] | 92 | bool UsePush = false; |
Michael Kuperstein | 1921d3d | 2015-02-11 08:53:55 +0000 | [diff] [blame] | 93 | }; |
| 94 | |
Andrew Kaylor | e2ea93c | 2015-09-08 18:18:46 +0000 | [diff] [blame] | 95 | typedef SmallVector<CallContext, 8> ContextVector; |
Michael Kuperstein | db95d04 | 2015-02-12 08:36:35 +0000 | [diff] [blame] | 96 | |
| 97 | bool isLegal(MachineFunction &MF); |
Michael Kuperstein | dadb847 | 2015-07-16 13:54:14 +0000 | [diff] [blame] | 98 | |
Andrew Kaylor | e2ea93c | 2015-09-08 18:18:46 +0000 | [diff] [blame] | 99 | bool isProfitable(MachineFunction &MF, ContextVector &CallSeqMap); |
Michael Kuperstein | db95d04 | 2015-02-12 08:36:35 +0000 | [diff] [blame] | 100 | |
Michael Kuperstein | 1921d3d | 2015-02-11 08:53:55 +0000 | [diff] [blame] | 101 | void collectCallInfo(MachineFunction &MF, MachineBasicBlock &MBB, |
| 102 | MachineBasicBlock::iterator I, CallContext &Context); |
| 103 | |
Hans Wennborg | 501e739 | 2016-05-05 16:39:31 +0000 | [diff] [blame] | 104 | void adjustCallSequence(MachineFunction &MF, const CallContext &Context); |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 105 | |
| 106 | MachineInstr *canFoldIntoRegPush(MachineBasicBlock::iterator FrameSetup, |
| 107 | unsigned Reg); |
| 108 | |
Michael Kuperstein | dadb847 | 2015-07-16 13:54:14 +0000 | [diff] [blame] | 109 | enum InstClassification { Convert, Skip, Exit }; |
| 110 | |
| 111 | InstClassification classifyInstruction(MachineBasicBlock &MBB, |
| 112 | MachineBasicBlock::iterator MI, |
| 113 | const X86RegisterInfo &RegInfo, |
| 114 | DenseSet<unsigned int> &UsedRegs); |
| 115 | |
Mehdi Amini | 117296c | 2016-10-01 02:56:57 +0000 | [diff] [blame] | 116 | StringRef getPassName() const override { return "X86 Optimize Call Frame"; } |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 117 | |
Serge Pavlov | 49acf9c | 2017-04-13 14:10:52 +0000 | [diff] [blame] | 118 | const X86InstrInfo *TII; |
Michael Kuperstein | 73dc852 | 2015-11-03 08:17:25 +0000 | [diff] [blame] | 119 | const X86FrameLowering *TFL; |
| 120 | const X86Subtarget *STI; |
David L Kreitzer | 0fe4632 | 2016-05-02 13:45:25 +0000 | [diff] [blame] | 121 | MachineRegisterInfo *MRI; |
David L Kreitzer | d5cb341 | 2016-04-19 17:43:44 +0000 | [diff] [blame] | 122 | unsigned SlotSize; |
| 123 | unsigned Log2SlotSize; |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 124 | }; |
| 125 | |
Eugene Zelenko | fbd13c5 | 2017-02-02 22:55:55 +0000 | [diff] [blame] | 126 | } // end anonymous namespace |
Zvi Rackover | c6d0b6c | 2017-10-24 05:47:07 +0000 | [diff] [blame] | 127 | char X86CallFrameOptimization::ID = 0; |
| 128 | INITIALIZE_PASS(X86CallFrameOptimization, DEBUG_TYPE, |
| 129 | "X86 Call Frame Optimization", false, false) |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 130 | |
Michael Kuperstein | dadb847 | 2015-07-16 13:54:14 +0000 | [diff] [blame] | 131 | // This checks whether the transformation is legal. |
Michael Kuperstein | db95d04 | 2015-02-12 08:36:35 +0000 | [diff] [blame] | 132 | // Also returns false in cases where it's potentially legal, but |
| 133 | // we don't even want to try. |
| 134 | bool X86CallFrameOptimization::isLegal(MachineFunction &MF) { |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 135 | if (NoX86CFOpt.getValue()) |
| 136 | return false; |
| 137 | |
Michael Kuperstein | 73dc852 | 2015-11-03 08:17:25 +0000 | [diff] [blame] | 138 | // We can't encode multiple DW_CFA_GNU_args_size or DW_CFA_def_cfa_offset |
| 139 | // in the compact unwind encoding that Darwin uses. So, bail if there |
| 140 | // is a danger of that being generated. |
David L Kreitzer | 99775c1 | 2016-04-12 21:45:09 +0000 | [diff] [blame] | 141 | if (STI->isTargetDarwin() && |
Matthias Braun | d0ee66c | 2016-12-01 19:32:15 +0000 | [diff] [blame] | 142 | (!MF.getLandingPads().empty() || |
Matthias Braun | f1caa28 | 2017-12-15 22:22:58 +0000 | [diff] [blame] | 143 | (MF.getFunction().needsUnwindTableEntry() && !TFL->hasFP(MF)))) |
Frederic Riss | 263b772 | 2015-10-08 15:45:08 +0000 | [diff] [blame] | 144 | return false; |
| 145 | |
David L Kreitzer | 0fe4632 | 2016-05-02 13:45:25 +0000 | [diff] [blame] | 146 | // It is not valid to change the stack pointer outside the prolog/epilog |
| 147 | // on 64-bit Windows. |
| 148 | if (STI->isTargetWin64()) |
| 149 | return false; |
| 150 | |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 151 | // You would expect straight-line code between call-frame setup and |
| 152 | // call-frame destroy. You would be wrong. There are circumstances (e.g. |
| 153 | // CMOV_GR8 expansion of a select that feeds a function call!) where we can |
| 154 | // end up with the setup and the destroy in different basic blocks. |
| 155 | // This is bad, and breaks SP adjustment. |
| 156 | // So, check that all of the frames in the function are closed inside |
| 157 | // the same block, and, for good measure, that there are no nested frames. |
Matthias Braun | fa3872e | 2015-05-18 20:27:55 +0000 | [diff] [blame] | 158 | unsigned FrameSetupOpcode = TII->getCallFrameSetupOpcode(); |
| 159 | unsigned FrameDestroyOpcode = TII->getCallFrameDestroyOpcode(); |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 160 | for (MachineBasicBlock &BB : MF) { |
| 161 | bool InsideFrameSequence = false; |
| 162 | for (MachineInstr &MI : BB) { |
| 163 | if (MI.getOpcode() == FrameSetupOpcode) { |
| 164 | if (InsideFrameSequence) |
| 165 | return false; |
| 166 | InsideFrameSequence = true; |
Michael Kuperstein | 1921d3d | 2015-02-11 08:53:55 +0000 | [diff] [blame] | 167 | } else if (MI.getOpcode() == FrameDestroyOpcode) { |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 168 | if (!InsideFrameSequence) |
| 169 | return false; |
| 170 | InsideFrameSequence = false; |
| 171 | } |
| 172 | } |
| 173 | |
| 174 | if (InsideFrameSequence) |
| 175 | return false; |
| 176 | } |
| 177 | |
Michael Kuperstein | db95d04 | 2015-02-12 08:36:35 +0000 | [diff] [blame] | 178 | return true; |
| 179 | } |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 180 | |
Joerg Sonnenberger | 772bb5b | 2016-03-22 22:24:52 +0000 | [diff] [blame] | 181 | // Check whether this transformation is profitable for a particular |
Michael Kuperstein | db95d04 | 2015-02-12 08:36:35 +0000 | [diff] [blame] | 182 | // function - in terms of code size. |
David L Kreitzer | 99775c1 | 2016-04-12 21:45:09 +0000 | [diff] [blame] | 183 | bool X86CallFrameOptimization::isProfitable(MachineFunction &MF, |
| 184 | ContextVector &CallSeqVector) { |
Michael Kuperstein | db95d04 | 2015-02-12 08:36:35 +0000 | [diff] [blame] | 185 | // This transformation is always a win when we do not expect to have |
Michael Kuperstein | 1921d3d | 2015-02-11 08:53:55 +0000 | [diff] [blame] | 186 | // a reserved call frame. Under other circumstances, it may be either |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 187 | // a win or a loss, and requires a heuristic. |
Matthias Braun | 941a705 | 2016-07-28 18:40:00 +0000 | [diff] [blame] | 188 | bool CannotReserveFrame = MF.getFrameInfo().hasVarSizedObjects(); |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 189 | if (CannotReserveFrame) |
| 190 | return true; |
| 191 | |
Reid Kleckner | 938bd6f | 2015-07-16 01:30:00 +0000 | [diff] [blame] | 192 | unsigned StackAlign = TFL->getStackAlignment(); |
Michael Kuperstein | dadb847 | 2015-07-16 13:54:14 +0000 | [diff] [blame] | 193 | |
Michael Kuperstein | db95d04 | 2015-02-12 08:36:35 +0000 | [diff] [blame] | 194 | int64_t Advantage = 0; |
Andrew Kaylor | e2ea93c | 2015-09-08 18:18:46 +0000 | [diff] [blame] | 195 | for (auto CC : CallSeqVector) { |
Michael Kuperstein | db95d04 | 2015-02-12 08:36:35 +0000 | [diff] [blame] | 196 | // Call sites where no parameters are passed on the stack |
| 197 | // do not affect the cost, since there needs to be no |
| 198 | // stack adjustment. |
Andrew Kaylor | e2ea93c | 2015-09-08 18:18:46 +0000 | [diff] [blame] | 199 | if (CC.NoStackParams) |
Michael Kuperstein | db95d04 | 2015-02-12 08:36:35 +0000 | [diff] [blame] | 200 | continue; |
| 201 | |
Andrew Kaylor | e2ea93c | 2015-09-08 18:18:46 +0000 | [diff] [blame] | 202 | if (!CC.UsePush) { |
Michael Kuperstein | db95d04 | 2015-02-12 08:36:35 +0000 | [diff] [blame] | 203 | // If we don't use pushes for a particular call site, |
| 204 | // we pay for not having a reserved call frame with an |
| 205 | // additional sub/add esp pair. The cost is ~3 bytes per instruction, |
| 206 | // depending on the size of the constant. |
| 207 | // TODO: Callee-pop functions should have a smaller penalty, because |
| 208 | // an add is needed even with a reserved call frame. |
| 209 | Advantage -= 6; |
| 210 | } else { |
| 211 | // We can use pushes. First, account for the fixed costs. |
| 212 | // We'll need a add after the call. |
| 213 | Advantage -= 3; |
David L Kreitzer | 0fe4632 | 2016-05-02 13:45:25 +0000 | [diff] [blame] | 214 | // If we have to realign the stack, we'll also need a sub before |
Andrew Kaylor | e2ea93c | 2015-09-08 18:18:46 +0000 | [diff] [blame] | 215 | if (CC.ExpectedDist % StackAlign) |
Michael Kuperstein | db95d04 | 2015-02-12 08:36:35 +0000 | [diff] [blame] | 216 | Advantage -= 3; |
| 217 | // Now, for each push, we save ~3 bytes. For small constants, we actually, |
| 218 | // save more (up to 5 bytes), but 3 should be a good approximation. |
David L Kreitzer | d5cb341 | 2016-04-19 17:43:44 +0000 | [diff] [blame] | 219 | Advantage += (CC.ExpectedDist >> Log2SlotSize) * 3; |
Michael Kuperstein | db95d04 | 2015-02-12 08:36:35 +0000 | [diff] [blame] | 220 | } |
| 221 | } |
| 222 | |
David L Kreitzer | 99775c1 | 2016-04-12 21:45:09 +0000 | [diff] [blame] | 223 | return Advantage >= 0; |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 224 | } |
| 225 | |
| 226 | bool X86CallFrameOptimization::runOnMachineFunction(MachineFunction &MF) { |
Michael Kuperstein | 73dc852 | 2015-11-03 08:17:25 +0000 | [diff] [blame] | 227 | STI = &MF.getSubtarget<X86Subtarget>(); |
| 228 | TII = STI->getInstrInfo(); |
| 229 | TFL = STI->getFrameLowering(); |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 230 | MRI = &MF.getRegInfo(); |
| 231 | |
David L Kreitzer | d5cb341 | 2016-04-19 17:43:44 +0000 | [diff] [blame] | 232 | const X86RegisterInfo &RegInfo = |
| 233 | *static_cast<const X86RegisterInfo *>(STI->getRegisterInfo()); |
| 234 | SlotSize = RegInfo.getSlotSize(); |
| 235 | assert(isPowerOf2_32(SlotSize) && "Expect power of 2 stack slot size"); |
| 236 | Log2SlotSize = Log2_32(SlotSize); |
| 237 | |
Matthias Braun | f1caa28 | 2017-12-15 22:22:58 +0000 | [diff] [blame] | 238 | if (skipFunction(MF.getFunction()) || !isLegal(MF)) |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 239 | return false; |
| 240 | |
Matthias Braun | fa3872e | 2015-05-18 20:27:55 +0000 | [diff] [blame] | 241 | unsigned FrameSetupOpcode = TII->getCallFrameSetupOpcode(); |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 242 | |
| 243 | bool Changed = false; |
| 244 | |
Andrew Kaylor | e2ea93c | 2015-09-08 18:18:46 +0000 | [diff] [blame] | 245 | ContextVector CallSeqVector; |
Michael Kuperstein | 1921d3d | 2015-02-11 08:53:55 +0000 | [diff] [blame] | 246 | |
Michael Kuperstein | 048cc3b | 2016-03-20 00:16:13 +0000 | [diff] [blame] | 247 | for (auto &MBB : MF) |
| 248 | for (auto &MI : MBB) |
| 249 | if (MI.getOpcode() == FrameSetupOpcode) { |
Andrew Kaylor | e2ea93c | 2015-09-08 18:18:46 +0000 | [diff] [blame] | 250 | CallContext Context; |
Michael Kuperstein | 048cc3b | 2016-03-20 00:16:13 +0000 | [diff] [blame] | 251 | collectCallInfo(MF, MBB, MI, Context); |
Andrew Kaylor | e2ea93c | 2015-09-08 18:18:46 +0000 | [diff] [blame] | 252 | CallSeqVector.push_back(Context); |
Michael Kuperstein | 1921d3d | 2015-02-11 08:53:55 +0000 | [diff] [blame] | 253 | } |
| 254 | |
Andrew Kaylor | e2ea93c | 2015-09-08 18:18:46 +0000 | [diff] [blame] | 255 | if (!isProfitable(MF, CallSeqVector)) |
Michael Kuperstein | db95d04 | 2015-02-12 08:36:35 +0000 | [diff] [blame] | 256 | return false; |
| 257 | |
Hans Wennborg | 501e739 | 2016-05-05 16:39:31 +0000 | [diff] [blame] | 258 | for (auto CC : CallSeqVector) { |
| 259 | if (CC.UsePush) { |
| 260 | adjustCallSequence(MF, CC); |
| 261 | Changed = true; |
| 262 | } |
| 263 | } |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 264 | |
| 265 | return Changed; |
| 266 | } |
| 267 | |
Michael Kuperstein | dadb847 | 2015-07-16 13:54:14 +0000 | [diff] [blame] | 268 | X86CallFrameOptimization::InstClassification |
| 269 | X86CallFrameOptimization::classifyInstruction( |
| 270 | MachineBasicBlock &MBB, MachineBasicBlock::iterator MI, |
| 271 | const X86RegisterInfo &RegInfo, DenseSet<unsigned int> &UsedRegs) { |
| 272 | if (MI == MBB.end()) |
| 273 | return Exit; |
| 274 | |
Zvi Rackover | 31b101a | 2017-10-24 12:13:05 +0000 | [diff] [blame] | 275 | // The instructions we actually care about are movs onto the stack or special |
| 276 | // cases of constant-stores to stack |
| 277 | switch (MI->getOpcode()) { |
| 278 | case X86::AND16mi8: |
| 279 | case X86::AND32mi8: |
| 280 | case X86::AND64mi8: { |
| 281 | MachineOperand ImmOp = MI->getOperand(X86::AddrNumOperands); |
| 282 | return ImmOp.getImm() == 0 ? Convert : Exit; |
| 283 | } |
| 284 | case X86::OR16mi8: |
| 285 | case X86::OR32mi8: |
| 286 | case X86::OR64mi8: { |
| 287 | MachineOperand ImmOp = MI->getOperand(X86::AddrNumOperands); |
| 288 | return ImmOp.getImm() == -1 ? Convert : Exit; |
| 289 | } |
| 290 | case X86::MOV32mi: |
| 291 | case X86::MOV32mr: |
| 292 | case X86::MOV64mi32: |
| 293 | case X86::MOV64mr: |
| 294 | return Convert; |
| 295 | } |
Michael Kuperstein | dadb847 | 2015-07-16 13:54:14 +0000 | [diff] [blame] | 296 | |
| 297 | // Not all calling conventions have only stack MOVs between the stack |
| 298 | // adjust and the call. |
| 299 | |
| 300 | // We want to tolerate other instructions, to cover more cases. |
| 301 | // In particular: |
| 302 | // a) PCrel calls, where we expect an additional COPY of the basereg. |
| 303 | // b) Passing frame-index addresses. |
| 304 | // c) Calling conventions that have inreg parameters. These generate |
| 305 | // both copies and movs into registers. |
| 306 | // To avoid creating lots of special cases, allow any instruction |
| 307 | // that does not write into memory, does not def or use the stack |
| 308 | // pointer, and does not def any register that was used by a preceding |
| 309 | // push. |
| 310 | // (Reading from memory is allowed, even if referenced through a |
| 311 | // frame index, since these will get adjusted properly in PEI) |
| 312 | |
| 313 | // The reason for the last condition is that the pushes can't replace |
| 314 | // the movs in place, because the order must be reversed. |
| 315 | // So if we have a MOV32mr that uses EDX, then an instruction that defs |
| 316 | // EDX, and then the call, after the transformation the push will use |
| 317 | // the modified version of EDX, and not the original one. |
| 318 | // Since we are still in SSA form at this point, we only need to |
| 319 | // make sure we don't clobber any *physical* registers that were |
| 320 | // used by an earlier mov that will become a push. |
| 321 | |
| 322 | if (MI->isCall() || MI->mayStore()) |
| 323 | return Exit; |
| 324 | |
| 325 | for (const MachineOperand &MO : MI->operands()) { |
| 326 | if (!MO.isReg()) |
| 327 | continue; |
| 328 | unsigned int Reg = MO.getReg(); |
| 329 | if (!RegInfo.isPhysicalRegister(Reg)) |
| 330 | continue; |
| 331 | if (RegInfo.regsOverlap(Reg, RegInfo.getStackRegister())) |
| 332 | return Exit; |
| 333 | if (MO.isDef()) { |
| 334 | for (unsigned int U : UsedRegs) |
| 335 | if (RegInfo.regsOverlap(Reg, U)) |
| 336 | return Exit; |
| 337 | } |
| 338 | } |
| 339 | |
| 340 | return Skip; |
| 341 | } |
| 342 | |
Michael Kuperstein | 1921d3d | 2015-02-11 08:53:55 +0000 | [diff] [blame] | 343 | void X86CallFrameOptimization::collectCallInfo(MachineFunction &MF, |
| 344 | MachineBasicBlock &MBB, |
| 345 | MachineBasicBlock::iterator I, |
| 346 | CallContext &Context) { |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 347 | // Check that this particular call sequence is amenable to the |
| 348 | // transformation. |
David L Kreitzer | 99775c1 | 2016-04-12 21:45:09 +0000 | [diff] [blame] | 349 | const X86RegisterInfo &RegInfo = |
| 350 | *static_cast<const X86RegisterInfo *>(STI->getRegisterInfo()); |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 351 | |
| 352 | // We expect to enter this at the beginning of a call sequence |
| 353 | assert(I->getOpcode() == TII->getCallFrameSetupOpcode()); |
| 354 | MachineBasicBlock::iterator FrameSetup = I++; |
Andrew Kaylor | e2ea93c | 2015-09-08 18:18:46 +0000 | [diff] [blame] | 355 | Context.FrameSetup = FrameSetup; |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 356 | |
Michael Kuperstein | db95d04 | 2015-02-12 08:36:35 +0000 | [diff] [blame] | 357 | // How much do we adjust the stack? This puts an upper bound on |
| 358 | // the number of parameters actually passed on it. |
Serge Pavlov | 49acf9c | 2017-04-13 14:10:52 +0000 | [diff] [blame] | 359 | unsigned int MaxAdjust = TII->getFrameSize(*FrameSetup) >> Log2SlotSize; |
Michael Kuperstein | dadb847 | 2015-07-16 13:54:14 +0000 | [diff] [blame] | 360 | |
Michael Kuperstein | db95d04 | 2015-02-12 08:36:35 +0000 | [diff] [blame] | 361 | // A zero adjustment means no stack parameters |
| 362 | if (!MaxAdjust) { |
| 363 | Context.NoStackParams = true; |
| 364 | return; |
| 365 | } |
| 366 | |
Michael Kuperstein | 5842b20 | 2016-12-07 19:31:08 +0000 | [diff] [blame] | 367 | // Skip over DEBUG_VALUE. |
| 368 | // For globals in PIC mode, we can have some LEAs here. Skip them as well. |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 369 | // TODO: Extend this to something that covers more cases. |
Shiva Chen | 801bf7e | 2018-05-09 02:42:00 +0000 | [diff] [blame] | 370 | while (I->getOpcode() == X86::LEA32r || I->isDebugInstr()) |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 371 | ++I; |
Michael Kuperstein | 1921d3d | 2015-02-11 08:53:55 +0000 | [diff] [blame] | 372 | |
Nico Weber | eb9488b | 2016-07-13 21:38:27 +0000 | [diff] [blame] | 373 | unsigned StackPtr = RegInfo.getStackRegister(); |
Zvi Rackover | 3c0d385 | 2017-10-24 07:38:29 +0000 | [diff] [blame] | 374 | auto StackPtrCopyInst = MBB.end(); |
Nico Weber | eb9488b | 2016-07-13 21:38:27 +0000 | [diff] [blame] | 375 | // SelectionDAG (but not FastISel) inserts a copy of ESP into a virtual |
Zvi Rackover | 3c0d385 | 2017-10-24 07:38:29 +0000 | [diff] [blame] | 376 | // register. If it's there, use that virtual register as stack pointer |
| 377 | // instead. Also, we need to locate this instruction so that we can later |
| 378 | // safely ignore it while doing the conservative processing of the call chain. |
| 379 | // The COPY can be located anywhere between the call-frame setup |
| 380 | // instruction and its first use. We use the call instruction as a boundary |
| 381 | // because it is usually cheaper to check if an instruction is a call than |
| 382 | // checking if an instruction uses a register. |
| 383 | for (auto J = I; !J->isCall(); ++J) |
| 384 | if (J->isCopy() && J->getOperand(0).isReg() && J->getOperand(1).isReg() && |
| 385 | J->getOperand(1).getReg() == StackPtr) { |
| 386 | StackPtrCopyInst = J; |
| 387 | Context.SPCopy = &*J++; |
| 388 | StackPtr = Context.SPCopy->getOperand(0).getReg(); |
| 389 | break; |
| 390 | } |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 391 | |
| 392 | // Scan the call setup sequence for the pattern we're looking for. |
David L Kreitzer | d5cb341 | 2016-04-19 17:43:44 +0000 | [diff] [blame] | 393 | // We only handle a simple case - a sequence of store instructions that |
| 394 | // push a sequence of stack-slot-aligned values onto the stack, with |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 395 | // no gaps between them. |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 396 | if (MaxAdjust > 4) |
Zvi Rackover | bf31bf7 | 2017-10-24 13:24:26 +0000 | [diff] [blame] | 397 | Context.ArgStoreVector.resize(MaxAdjust, nullptr); |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 398 | |
Michael Kuperstein | dadb847 | 2015-07-16 13:54:14 +0000 | [diff] [blame] | 399 | DenseSet<unsigned int> UsedRegs; |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 400 | |
Zvi Rackover | 3c0d385 | 2017-10-24 07:38:29 +0000 | [diff] [blame] | 401 | for (InstClassification Classification = Skip; Classification != Exit; ++I) { |
| 402 | // If this is the COPY of the stack pointer, it's ok to ignore. |
| 403 | if (I == StackPtrCopyInst) |
Michael Kuperstein | dadb847 | 2015-07-16 13:54:14 +0000 | [diff] [blame] | 404 | continue; |
Zvi Rackover | 3c0d385 | 2017-10-24 07:38:29 +0000 | [diff] [blame] | 405 | Classification = classifyInstruction(MBB, I, RegInfo, UsedRegs); |
| 406 | if (Classification != Convert) |
| 407 | continue; |
David L Kreitzer | d5cb341 | 2016-04-19 17:43:44 +0000 | [diff] [blame] | 408 | // We know the instruction has a supported store opcode. |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 409 | // We only want movs of the form: |
David L Kreitzer | d5cb341 | 2016-04-19 17:43:44 +0000 | [diff] [blame] | 410 | // mov imm/reg, k(%StackPtr) |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 411 | // If we run into something else, bail. |
| 412 | // Note that AddrBaseReg may, counter to its name, not be a register, |
| 413 | // but rather a frame index. |
| 414 | // TODO: Support the fi case. This should probably work now that we |
| 415 | // have the infrastructure to track the stack pointer within a call |
| 416 | // sequence. |
| 417 | if (!I->getOperand(X86::AddrBaseReg).isReg() || |
| 418 | (I->getOperand(X86::AddrBaseReg).getReg() != StackPtr) || |
| 419 | !I->getOperand(X86::AddrScaleAmt).isImm() || |
| 420 | (I->getOperand(X86::AddrScaleAmt).getImm() != 1) || |
| 421 | (I->getOperand(X86::AddrIndexReg).getReg() != X86::NoRegister) || |
| 422 | (I->getOperand(X86::AddrSegmentReg).getReg() != X86::NoRegister) || |
| 423 | !I->getOperand(X86::AddrDisp).isImm()) |
Michael Kuperstein | 1921d3d | 2015-02-11 08:53:55 +0000 | [diff] [blame] | 424 | return; |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 425 | |
| 426 | int64_t StackDisp = I->getOperand(X86::AddrDisp).getImm(); |
Michael Kuperstein | 1921d3d | 2015-02-11 08:53:55 +0000 | [diff] [blame] | 427 | assert(StackDisp >= 0 && |
| 428 | "Negative stack displacement when passing parameters"); |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 429 | |
| 430 | // We really don't want to consider the unaligned case. |
David L Kreitzer | d5cb341 | 2016-04-19 17:43:44 +0000 | [diff] [blame] | 431 | if (StackDisp & (SlotSize - 1)) |
Michael Kuperstein | 1921d3d | 2015-02-11 08:53:55 +0000 | [diff] [blame] | 432 | return; |
David L Kreitzer | d5cb341 | 2016-04-19 17:43:44 +0000 | [diff] [blame] | 433 | StackDisp >>= Log2SlotSize; |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 434 | |
Zvi Rackover | bf31bf7 | 2017-10-24 13:24:26 +0000 | [diff] [blame] | 435 | assert((size_t)StackDisp < Context.ArgStoreVector.size() && |
Michael Kuperstein | 1921d3d | 2015-02-11 08:53:55 +0000 | [diff] [blame] | 436 | "Function call has more parameters than the stack is adjusted for."); |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 437 | |
| 438 | // If the same stack slot is being filled twice, something's fishy. |
Zvi Rackover | bf31bf7 | 2017-10-24 13:24:26 +0000 | [diff] [blame] | 439 | if (Context.ArgStoreVector[StackDisp] != nullptr) |
Michael Kuperstein | 1921d3d | 2015-02-11 08:53:55 +0000 | [diff] [blame] | 440 | return; |
Zvi Rackover | bf31bf7 | 2017-10-24 13:24:26 +0000 | [diff] [blame] | 441 | Context.ArgStoreVector[StackDisp] = &*I; |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 442 | |
Michael Kuperstein | dadb847 | 2015-07-16 13:54:14 +0000 | [diff] [blame] | 443 | for (const MachineOperand &MO : I->uses()) { |
| 444 | if (!MO.isReg()) |
| 445 | continue; |
| 446 | unsigned int Reg = MO.getReg(); |
| 447 | if (RegInfo.isPhysicalRegister(Reg)) |
| 448 | UsedRegs.insert(Reg); |
| 449 | } |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 450 | } |
| 451 | |
Zvi Rackover | 3c0d385 | 2017-10-24 07:38:29 +0000 | [diff] [blame] | 452 | --I; |
| 453 | |
Michael Kuperstein | dadb847 | 2015-07-16 13:54:14 +0000 | [diff] [blame] | 454 | // We now expect the end of the sequence. If we stopped early, |
| 455 | // or reached the end of the block without finding a call, bail. |
| 456 | if (I == MBB.end() || !I->isCall()) |
Michael Kuperstein | 1921d3d | 2015-02-11 08:53:55 +0000 | [diff] [blame] | 457 | return; |
| 458 | |
Duncan P. N. Exon Smith | 7b4c18e | 2016-07-12 03:18:50 +0000 | [diff] [blame] | 459 | Context.Call = &*I; |
Serge Pavlov | 49acf9c | 2017-04-13 14:10:52 +0000 | [diff] [blame] | 460 | if ((++I)->getOpcode() != TII->getCallFrameDestroyOpcode()) |
Michael Kuperstein | 1921d3d | 2015-02-11 08:53:55 +0000 | [diff] [blame] | 461 | return; |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 462 | |
| 463 | // Now, go through the vector, and see that we don't have any gaps, |
Zvi Rackover | bf31bf7 | 2017-10-24 13:24:26 +0000 | [diff] [blame] | 464 | // but only a series of storing instructions. |
| 465 | auto MMI = Context.ArgStoreVector.begin(), MME = Context.ArgStoreVector.end(); |
David L Kreitzer | d5cb341 | 2016-04-19 17:43:44 +0000 | [diff] [blame] | 466 | for (; MMI != MME; ++MMI, Context.ExpectedDist += SlotSize) |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 467 | if (*MMI == nullptr) |
| 468 | break; |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 469 | |
Michael Kuperstein | 1921d3d | 2015-02-11 08:53:55 +0000 | [diff] [blame] | 470 | // If the call had no parameters, do nothing |
Zvi Rackover | bf31bf7 | 2017-10-24 13:24:26 +0000 | [diff] [blame] | 471 | if (MMI == Context.ArgStoreVector.begin()) |
Michael Kuperstein | 1921d3d | 2015-02-11 08:53:55 +0000 | [diff] [blame] | 472 | return; |
| 473 | |
| 474 | // We are either at the last parameter, or a gap. |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 475 | // Make sure it's not a gap |
| 476 | for (; MMI != MME; ++MMI) |
| 477 | if (*MMI != nullptr) |
Michael Kuperstein | 1921d3d | 2015-02-11 08:53:55 +0000 | [diff] [blame] | 478 | return; |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 479 | |
Michael Kuperstein | 1921d3d | 2015-02-11 08:53:55 +0000 | [diff] [blame] | 480 | Context.UsePush = true; |
Michael Kuperstein | 1921d3d | 2015-02-11 08:53:55 +0000 | [diff] [blame] | 481 | } |
| 482 | |
Hans Wennborg | 501e739 | 2016-05-05 16:39:31 +0000 | [diff] [blame] | 483 | void X86CallFrameOptimization::adjustCallSequence(MachineFunction &MF, |
Michael Kuperstein | 1921d3d | 2015-02-11 08:53:55 +0000 | [diff] [blame] | 484 | const CallContext &Context) { |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 485 | // Ok, we can in fact do the transformation for this call. |
| 486 | // Do not remove the FrameSetup instruction, but adjust the parameters. |
| 487 | // PEI will end up finalizing the handling of this. |
Andrew Kaylor | e2ea93c | 2015-09-08 18:18:46 +0000 | [diff] [blame] | 488 | MachineBasicBlock::iterator FrameSetup = Context.FrameSetup; |
| 489 | MachineBasicBlock &MBB = *(FrameSetup->getParent()); |
Serge Pavlov | 49acf9c | 2017-04-13 14:10:52 +0000 | [diff] [blame] | 490 | TII->setFrameAdjustment(*FrameSetup, Context.ExpectedDist); |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 491 | |
Andrew Kaylor | e2ea93c | 2015-09-08 18:18:46 +0000 | [diff] [blame] | 492 | DebugLoc DL = FrameSetup->getDebugLoc(); |
David L Kreitzer | 0fe4632 | 2016-05-02 13:45:25 +0000 | [diff] [blame] | 493 | bool Is64Bit = STI->is64Bit(); |
Zvi Rackover | 31b101a | 2017-10-24 12:13:05 +0000 | [diff] [blame] | 494 | // Now, iterate through the vector in reverse order, and replace the store to |
| 495 | // stack with pushes. MOVmi/MOVmr doesn't have any defs, so no need to |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 496 | // replace uses. |
David L Kreitzer | d5cb341 | 2016-04-19 17:43:44 +0000 | [diff] [blame] | 497 | for (int Idx = (Context.ExpectedDist >> Log2SlotSize) - 1; Idx >= 0; --Idx) { |
Zvi Rackover | bf31bf7 | 2017-10-24 13:24:26 +0000 | [diff] [blame] | 498 | MachineBasicBlock::iterator Store = *Context.ArgStoreVector[Idx]; |
| 499 | MachineOperand PushOp = Store->getOperand(X86::AddrNumOperands); |
Michael Kuperstein | 73dc852 | 2015-11-03 08:17:25 +0000 | [diff] [blame] | 500 | MachineBasicBlock::iterator Push = nullptr; |
David L Kreitzer | d5cb341 | 2016-04-19 17:43:44 +0000 | [diff] [blame] | 501 | unsigned PushOpcode; |
Zvi Rackover | bf31bf7 | 2017-10-24 13:24:26 +0000 | [diff] [blame] | 502 | switch (Store->getOpcode()) { |
David L Kreitzer | d5cb341 | 2016-04-19 17:43:44 +0000 | [diff] [blame] | 503 | default: |
| 504 | llvm_unreachable("Unexpected Opcode!"); |
Zvi Rackover | 31b101a | 2017-10-24 12:13:05 +0000 | [diff] [blame] | 505 | case X86::AND16mi8: |
| 506 | case X86::AND32mi8: |
| 507 | case X86::AND64mi8: |
| 508 | case X86::OR16mi8: |
| 509 | case X86::OR32mi8: |
| 510 | case X86::OR64mi8: |
David L Kreitzer | d5cb341 | 2016-04-19 17:43:44 +0000 | [diff] [blame] | 511 | case X86::MOV32mi: |
David L Kreitzer | 0fe4632 | 2016-05-02 13:45:25 +0000 | [diff] [blame] | 512 | case X86::MOV64mi32: |
| 513 | PushOpcode = Is64Bit ? X86::PUSH64i32 : X86::PUSHi32; |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 514 | // If the operand is a small (8-bit) immediate, we can use a |
| 515 | // PUSH instruction with a shorter encoding. |
| 516 | // Note that isImm() may fail even though this is a MOVmi, because |
| 517 | // the operand can also be a symbol. |
| 518 | if (PushOp.isImm()) { |
| 519 | int64_t Val = PushOp.getImm(); |
| 520 | if (isInt<8>(Val)) |
David L Kreitzer | 0fe4632 | 2016-05-02 13:45:25 +0000 | [diff] [blame] | 521 | PushOpcode = Is64Bit ? X86::PUSH64i8 : X86::PUSH32i8; |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 522 | } |
Diana Picus | 116bbab | 2017-01-13 09:58:52 +0000 | [diff] [blame] | 523 | Push = BuildMI(MBB, Context.Call, DL, TII->get(PushOpcode)).add(PushOp); |
David L Kreitzer | d5cb341 | 2016-04-19 17:43:44 +0000 | [diff] [blame] | 524 | break; |
| 525 | case X86::MOV32mr: |
Eugene Zelenko | fbd13c5 | 2017-02-02 22:55:55 +0000 | [diff] [blame] | 526 | case X86::MOV64mr: { |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 527 | unsigned int Reg = PushOp.getReg(); |
| 528 | |
David L Kreitzer | 0fe4632 | 2016-05-02 13:45:25 +0000 | [diff] [blame] | 529 | // If storing a 32-bit vreg on 64-bit targets, extend to a 64-bit vreg |
| 530 | // in preparation for the PUSH64. The upper 32 bits can be undef. |
Zvi Rackover | bf31bf7 | 2017-10-24 13:24:26 +0000 | [diff] [blame] | 531 | if (Is64Bit && Store->getOpcode() == X86::MOV32mr) { |
David L Kreitzer | 0fe4632 | 2016-05-02 13:45:25 +0000 | [diff] [blame] | 532 | unsigned UndefReg = MRI->createVirtualRegister(&X86::GR64RegClass); |
| 533 | Reg = MRI->createVirtualRegister(&X86::GR64RegClass); |
| 534 | BuildMI(MBB, Context.Call, DL, TII->get(X86::IMPLICIT_DEF), UndefReg); |
| 535 | BuildMI(MBB, Context.Call, DL, TII->get(X86::INSERT_SUBREG), Reg) |
Diana Picus | 116bbab | 2017-01-13 09:58:52 +0000 | [diff] [blame] | 536 | .addReg(UndefReg) |
| 537 | .add(PushOp) |
| 538 | .addImm(X86::sub_32bit); |
David L Kreitzer | 0fe4632 | 2016-05-02 13:45:25 +0000 | [diff] [blame] | 539 | } |
| 540 | |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 541 | // If PUSHrmm is not slow on this target, try to fold the source of the |
| 542 | // push into the instruction. |
Michael Kuperstein | 73dc852 | 2015-11-03 08:17:25 +0000 | [diff] [blame] | 543 | bool SlowPUSHrmm = STI->isAtom() || STI->isSLM(); |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 544 | |
| 545 | // Check that this is legal to fold. Right now, we're extremely |
| 546 | // conservative about that. |
| 547 | MachineInstr *DefMov = nullptr; |
| 548 | if (!SlowPUSHrmm && (DefMov = canFoldIntoRegPush(FrameSetup, Reg))) { |
David L Kreitzer | 0fe4632 | 2016-05-02 13:45:25 +0000 | [diff] [blame] | 549 | PushOpcode = Is64Bit ? X86::PUSH64rmm : X86::PUSH32rmm; |
David L Kreitzer | d5cb341 | 2016-04-19 17:43:44 +0000 | [diff] [blame] | 550 | Push = BuildMI(MBB, Context.Call, DL, TII->get(PushOpcode)); |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 551 | |
| 552 | unsigned NumOps = DefMov->getDesc().getNumOperands(); |
| 553 | for (unsigned i = NumOps - X86::AddrNumOperands; i != NumOps; ++i) |
| 554 | Push->addOperand(DefMov->getOperand(i)); |
| 555 | |
| 556 | DefMov->eraseFromParent(); |
| 557 | } else { |
David L Kreitzer | 0fe4632 | 2016-05-02 13:45:25 +0000 | [diff] [blame] | 558 | PushOpcode = Is64Bit ? X86::PUSH64r : X86::PUSH32r; |
David L Kreitzer | d5cb341 | 2016-04-19 17:43:44 +0000 | [diff] [blame] | 559 | Push = BuildMI(MBB, Context.Call, DL, TII->get(PushOpcode)) |
David L Kreitzer | 99775c1 | 2016-04-12 21:45:09 +0000 | [diff] [blame] | 560 | .addReg(Reg) |
| 561 | .getInstr(); |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 562 | } |
David L Kreitzer | d5cb341 | 2016-04-19 17:43:44 +0000 | [diff] [blame] | 563 | break; |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 564 | } |
Eugene Zelenko | fbd13c5 | 2017-02-02 22:55:55 +0000 | [diff] [blame] | 565 | } |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 566 | |
Michael Kuperstein | 73dc852 | 2015-11-03 08:17:25 +0000 | [diff] [blame] | 567 | // For debugging, when using SP-based CFA, we need to adjust the CFA |
| 568 | // offset after each push. |
Michael Kuperstein | 77ce9d3 | 2015-12-06 13:06:20 +0000 | [diff] [blame] | 569 | // TODO: This is needed only if we require precise CFA. |
Daniel Jasper | 559aa75 | 2017-06-29 13:58:24 +0000 | [diff] [blame] | 570 | if (!TFL->hasFP(MF)) |
| 571 | TFL->BuildCFI( |
| 572 | MBB, std::next(Push), DL, |
| 573 | MCCFIInstruction::createAdjustCfaOffset(nullptr, SlotSize)); |
| 574 | |
Zvi Rackover | bf31bf7 | 2017-10-24 13:24:26 +0000 | [diff] [blame] | 575 | MBB.erase(Store); |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 576 | } |
| 577 | |
| 578 | // The stack-pointer copy is no longer used in the call sequences. |
| 579 | // There should not be any other users, but we can't commit to that, so: |
Nico Weber | eb9488b | 2016-07-13 21:38:27 +0000 | [diff] [blame] | 580 | if (Context.SPCopy && MRI->use_empty(Context.SPCopy->getOperand(0).getReg())) |
Michael Kuperstein | 1921d3d | 2015-02-11 08:53:55 +0000 | [diff] [blame] | 581 | Context.SPCopy->eraseFromParent(); |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 582 | |
| 583 | // Once we've done this, we need to make sure PEI doesn't assume a reserved |
| 584 | // frame. |
| 585 | X86MachineFunctionInfo *FuncInfo = MF.getInfo<X86MachineFunctionInfo>(); |
| 586 | FuncInfo->setHasPushSequences(true); |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 587 | } |
| 588 | |
| 589 | MachineInstr *X86CallFrameOptimization::canFoldIntoRegPush( |
| 590 | MachineBasicBlock::iterator FrameSetup, unsigned Reg) { |
| 591 | // Do an extremely restricted form of load folding. |
| 592 | // ISel will often create patterns like: |
| 593 | // movl 4(%edi), %eax |
| 594 | // movl 8(%edi), %ecx |
| 595 | // movl 12(%edi), %edx |
| 596 | // movl %edx, 8(%esp) |
| 597 | // movl %ecx, 4(%esp) |
| 598 | // movl %eax, (%esp) |
| 599 | // call |
| 600 | // Get rid of those with prejudice. |
| 601 | if (!TargetRegisterInfo::isVirtualRegister(Reg)) |
| 602 | return nullptr; |
| 603 | |
| 604 | // Make sure this is the only use of Reg. |
| 605 | if (!MRI->hasOneNonDBGUse(Reg)) |
| 606 | return nullptr; |
| 607 | |
Duncan P. N. Exon Smith | 7b4c18e | 2016-07-12 03:18:50 +0000 | [diff] [blame] | 608 | MachineInstr &DefMI = *MRI->getVRegDef(Reg); |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 609 | |
| 610 | // Make sure the def is a MOV from memory. |
David L Kreitzer | 29711c0 | 2016-06-30 21:43:11 +0000 | [diff] [blame] | 611 | // If the def is in another block, give up. |
Duncan P. N. Exon Smith | 7b4c18e | 2016-07-12 03:18:50 +0000 | [diff] [blame] | 612 | if ((DefMI.getOpcode() != X86::MOV32rm && |
| 613 | DefMI.getOpcode() != X86::MOV64rm) || |
| 614 | DefMI.getParent() != FrameSetup->getParent()) |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 615 | return nullptr; |
| 616 | |
Michael Kuperstein | bc7f99a | 2015-08-12 10:14:58 +0000 | [diff] [blame] | 617 | // Make sure we don't have any instructions between DefMI and the |
| 618 | // push that make folding the load illegal. |
Duncan P. N. Exon Smith | 7b4c18e | 2016-07-12 03:18:50 +0000 | [diff] [blame] | 619 | for (MachineBasicBlock::iterator I = DefMI; I != FrameSetup; ++I) |
Michael Kuperstein | bc7f99a | 2015-08-12 10:14:58 +0000 | [diff] [blame] | 620 | if (I->isLoadFoldBarrier()) |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 621 | return nullptr; |
| 622 | |
Duncan P. N. Exon Smith | 7b4c18e | 2016-07-12 03:18:50 +0000 | [diff] [blame] | 623 | return &DefMI; |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 624 | } |
Eugene Zelenko | fbd13c5 | 2017-02-02 22:55:55 +0000 | [diff] [blame] | 625 | |
| 626 | FunctionPass *llvm::createX86CallFrameOptimization() { |
| 627 | return new X86CallFrameOptimization(); |
| 628 | } |