Eric Christopher | 213a5da | 2015-12-21 23:04:27 +0000 | [diff] [blame] | 1 | //===------- X86ExpandPseudo.cpp - Expand pseudo instructions -------------===// |
| 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
| 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
| 9 | // |
| 10 | // This file contains a pass that expands pseudo instructions into target |
| 11 | // instructions to allow proper scheduling, if-conversion, other late |
| 12 | // optimizations, or simply the encoding of the instructions. |
| 13 | // |
| 14 | //===----------------------------------------------------------------------===// |
| 15 | |
| 16 | #include "X86.h" |
| 17 | #include "X86FrameLowering.h" |
| 18 | #include "X86InstrBuilder.h" |
| 19 | #include "X86InstrInfo.h" |
| 20 | #include "X86MachineFunctionInfo.h" |
| 21 | #include "X86Subtarget.h" |
| 22 | #include "llvm/Analysis/EHPersonalities.h" |
| 23 | #include "llvm/CodeGen/MachineFunctionPass.h" |
| 24 | #include "llvm/CodeGen/MachineInstrBuilder.h" |
| 25 | #include "llvm/CodeGen/Passes.h" // For IDs of passes that are preserved. |
| 26 | #include "llvm/IR/GlobalValue.h" |
| 27 | using namespace llvm; |
| 28 | |
| 29 | #define DEBUG_TYPE "x86-pseudo" |
| 30 | |
| 31 | namespace { |
| 32 | class X86ExpandPseudo : public MachineFunctionPass { |
| 33 | public: |
| 34 | static char ID; |
| 35 | X86ExpandPseudo() : MachineFunctionPass(ID) {} |
| 36 | |
| 37 | void getAnalysisUsage(AnalysisUsage &AU) const override { |
| 38 | AU.setPreservesCFG(); |
| 39 | AU.addPreservedID(MachineLoopInfoID); |
| 40 | AU.addPreservedID(MachineDominatorsID); |
| 41 | MachineFunctionPass::getAnalysisUsage(AU); |
| 42 | } |
| 43 | |
| 44 | const X86Subtarget *STI; |
| 45 | const X86InstrInfo *TII; |
| 46 | const X86RegisterInfo *TRI; |
| 47 | const X86FrameLowering *X86FL; |
| 48 | |
| 49 | bool runOnMachineFunction(MachineFunction &Fn) override; |
| 50 | |
| 51 | const char *getPassName() const override { |
| 52 | return "X86 pseudo instruction expansion pass"; |
| 53 | } |
| 54 | |
| 55 | private: |
| 56 | bool ExpandMI(MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI); |
| 57 | bool ExpandMBB(MachineBasicBlock &MBB); |
| 58 | }; |
| 59 | char X86ExpandPseudo::ID = 0; |
| 60 | } // End anonymous namespace. |
| 61 | |
| 62 | /// If \p MBBI is a pseudo instruction, this method expands |
| 63 | /// it to the corresponding (sequence of) actual instruction(s). |
| 64 | /// \returns true if \p MBBI has been expanded. |
| 65 | bool X86ExpandPseudo::ExpandMI(MachineBasicBlock &MBB, |
| 66 | MachineBasicBlock::iterator MBBI) { |
| 67 | MachineInstr &MI = *MBBI; |
| 68 | unsigned Opcode = MI.getOpcode(); |
| 69 | DebugLoc DL = MBBI->getDebugLoc(); |
| 70 | switch (Opcode) { |
| 71 | default: |
| 72 | return false; |
| 73 | case X86::TCRETURNdi: |
| 74 | case X86::TCRETURNri: |
| 75 | case X86::TCRETURNmi: |
| 76 | case X86::TCRETURNdi64: |
| 77 | case X86::TCRETURNri64: |
| 78 | case X86::TCRETURNmi64: { |
| 79 | bool isMem = Opcode == X86::TCRETURNmi || Opcode == X86::TCRETURNmi64; |
| 80 | MachineOperand &JumpTarget = MBBI->getOperand(0); |
| 81 | MachineOperand &StackAdjust = MBBI->getOperand(isMem ? 5 : 1); |
| 82 | assert(StackAdjust.isImm() && "Expecting immediate value."); |
| 83 | |
| 84 | // Adjust stack pointer. |
| 85 | int StackAdj = StackAdjust.getImm(); |
| 86 | |
| 87 | if (StackAdj) { |
| 88 | // Check for possible merge with preceding ADD instruction. |
| 89 | StackAdj += X86FL->mergeSPUpdates(MBB, MBBI, true); |
| 90 | X86FL->emitSPUpdate(MBB, MBBI, StackAdj, /*InEpilogue=*/true); |
| 91 | } |
| 92 | |
| 93 | // Jump to label or value in register. |
| 94 | bool IsWin64 = STI->isTargetWin64(); |
| 95 | if (Opcode == X86::TCRETURNdi || Opcode == X86::TCRETURNdi64) { |
| 96 | unsigned Op = (Opcode == X86::TCRETURNdi) |
| 97 | ? X86::TAILJMPd |
| 98 | : (IsWin64 ? X86::TAILJMPd64_REX : X86::TAILJMPd64); |
| 99 | MachineInstrBuilder MIB = BuildMI(MBB, MBBI, DL, TII->get(Op)); |
| 100 | if (JumpTarget.isGlobal()) |
| 101 | MIB.addGlobalAddress(JumpTarget.getGlobal(), JumpTarget.getOffset(), |
| 102 | JumpTarget.getTargetFlags()); |
| 103 | else { |
| 104 | assert(JumpTarget.isSymbol()); |
| 105 | MIB.addExternalSymbol(JumpTarget.getSymbolName(), |
| 106 | JumpTarget.getTargetFlags()); |
| 107 | } |
| 108 | } else if (Opcode == X86::TCRETURNmi || Opcode == X86::TCRETURNmi64) { |
| 109 | unsigned Op = (Opcode == X86::TCRETURNmi) |
| 110 | ? X86::TAILJMPm |
| 111 | : (IsWin64 ? X86::TAILJMPm64_REX : X86::TAILJMPm64); |
| 112 | MachineInstrBuilder MIB = BuildMI(MBB, MBBI, DL, TII->get(Op)); |
| 113 | for (unsigned i = 0; i != 5; ++i) |
| 114 | MIB.addOperand(MBBI->getOperand(i)); |
| 115 | } else if (Opcode == X86::TCRETURNri64) { |
| 116 | BuildMI(MBB, MBBI, DL, |
| 117 | TII->get(IsWin64 ? X86::TAILJMPr64_REX : X86::TAILJMPr64)) |
| 118 | .addReg(JumpTarget.getReg(), RegState::Kill); |
| 119 | } else { |
| 120 | BuildMI(MBB, MBBI, DL, TII->get(X86::TAILJMPr)) |
| 121 | .addReg(JumpTarget.getReg(), RegState::Kill); |
| 122 | } |
| 123 | |
| 124 | MachineInstr *NewMI = std::prev(MBBI); |
Duncan P. N. Exon Smith | fd8cc23 | 2016-02-27 20:01:33 +0000 | [diff] [blame] | 125 | NewMI->copyImplicitOps(*MBBI->getParent()->getParent(), *MBBI); |
Eric Christopher | 213a5da | 2015-12-21 23:04:27 +0000 | [diff] [blame] | 126 | |
| 127 | // Delete the pseudo instruction TCRETURN. |
| 128 | MBB.erase(MBBI); |
| 129 | |
| 130 | return true; |
| 131 | } |
| 132 | case X86::EH_RETURN: |
| 133 | case X86::EH_RETURN64: { |
| 134 | MachineOperand &DestAddr = MBBI->getOperand(0); |
| 135 | assert(DestAddr.isReg() && "Offset should be in register!"); |
| 136 | const bool Uses64BitFramePtr = |
| 137 | STI->isTarget64BitLP64() || STI->isTargetNaCl64(); |
| 138 | unsigned StackPtr = TRI->getStackRegister(); |
| 139 | BuildMI(MBB, MBBI, DL, |
| 140 | TII->get(Uses64BitFramePtr ? X86::MOV64rr : X86::MOV32rr), StackPtr) |
| 141 | .addReg(DestAddr.getReg()); |
| 142 | // The EH_RETURN pseudo is really removed during the MC Lowering. |
| 143 | return true; |
| 144 | } |
| 145 | case X86::IRET: { |
| 146 | // Adjust stack to erase error code |
| 147 | int64_t StackAdj = MBBI->getOperand(0).getImm(); |
| 148 | X86FL->emitSPUpdate(MBB, MBBI, StackAdj, true); |
| 149 | // Replace pseudo with machine iret |
| 150 | BuildMI(MBB, MBBI, DL, |
| 151 | TII->get(STI->is64Bit() ? X86::IRET64 : X86::IRET32)); |
| 152 | MBB.erase(MBBI); |
| 153 | return true; |
| 154 | } |
David Majnemer | d2f767d | 2016-03-04 22:56:17 +0000 | [diff] [blame] | 155 | case X86::RET: { |
| 156 | // Adjust stack to erase error code |
| 157 | int64_t StackAdj = MBBI->getOperand(0).getImm(); |
| 158 | MachineInstrBuilder MIB; |
| 159 | if (StackAdj == 0) { |
| 160 | MIB = BuildMI(MBB, MBBI, DL, |
| 161 | TII->get(STI->is64Bit() ? X86::RETQ : X86::RETL)); |
| 162 | } else if (isUInt<16>(StackAdj)) { |
| 163 | MIB = BuildMI(MBB, MBBI, DL, |
| 164 | TII->get(STI->is64Bit() ? X86::RETIQ : X86::RETIL)) |
| 165 | .addImm(StackAdj); |
| 166 | } else { |
David Majnemer | 71a1c2c | 2016-03-04 23:02:15 +0000 | [diff] [blame^] | 167 | assert(!STI->is64Bit() && |
| 168 | "shouldn't need to do this for x86_64 targets!"); |
David Majnemer | d2f767d | 2016-03-04 22:56:17 +0000 | [diff] [blame] | 169 | // A ret can only handle immediates as big as 2**16-1. If we need to pop |
| 170 | // off bytes before the return address, we must do it manually. |
David Majnemer | 71a1c2c | 2016-03-04 23:02:15 +0000 | [diff] [blame^] | 171 | BuildMI(MBB, MBBI, DL, TII->get(X86::POP32r)).addReg(X86::ECX, RegState::Define); |
David Majnemer | d2f767d | 2016-03-04 22:56:17 +0000 | [diff] [blame] | 172 | X86FL->emitSPUpdate(MBB, MBBI, StackAdj, /*InEpilogue=*/true); |
David Majnemer | 71a1c2c | 2016-03-04 23:02:15 +0000 | [diff] [blame^] | 173 | BuildMI(MBB, MBBI, DL, TII->get(X86::PUSH32r)).addReg(X86::ECX); |
| 174 | MIB = BuildMI(MBB, MBBI, DL, TII->get(X86::RETL)); |
David Majnemer | d2f767d | 2016-03-04 22:56:17 +0000 | [diff] [blame] | 175 | } |
| 176 | for (unsigned I = 1, E = MBBI->getNumOperands(); I != E; ++I) |
| 177 | MIB.addOperand(MBBI->getOperand(I)); |
| 178 | MBB.erase(MBBI); |
| 179 | return true; |
| 180 | } |
Eric Christopher | 213a5da | 2015-12-21 23:04:27 +0000 | [diff] [blame] | 181 | case X86::EH_RESTORE: { |
| 182 | // Restore ESP and EBP, and optionally ESI if required. |
| 183 | bool IsSEH = isAsynchronousEHPersonality(classifyEHPersonality( |
| 184 | MBB.getParent()->getFunction()->getPersonalityFn())); |
| 185 | X86FL->restoreWin32EHStackPointers(MBB, MBBI, DL, /*RestoreSP=*/IsSEH); |
| 186 | MBBI->eraseFromParent(); |
| 187 | return true; |
| 188 | } |
| 189 | } |
| 190 | llvm_unreachable("Previous switch has a fallthrough?"); |
| 191 | } |
| 192 | |
| 193 | /// Expand all pseudo instructions contained in \p MBB. |
| 194 | /// \returns true if any expansion occurred for \p MBB. |
| 195 | bool X86ExpandPseudo::ExpandMBB(MachineBasicBlock &MBB) { |
| 196 | bool Modified = false; |
| 197 | |
| 198 | // MBBI may be invalidated by the expansion. |
| 199 | MachineBasicBlock::iterator MBBI = MBB.begin(), E = MBB.end(); |
| 200 | while (MBBI != E) { |
| 201 | MachineBasicBlock::iterator NMBBI = std::next(MBBI); |
| 202 | Modified |= ExpandMI(MBB, MBBI); |
| 203 | MBBI = NMBBI; |
| 204 | } |
| 205 | |
| 206 | return Modified; |
| 207 | } |
| 208 | |
| 209 | bool X86ExpandPseudo::runOnMachineFunction(MachineFunction &MF) { |
| 210 | STI = &static_cast<const X86Subtarget &>(MF.getSubtarget()); |
| 211 | TII = STI->getInstrInfo(); |
| 212 | TRI = STI->getRegisterInfo(); |
| 213 | X86FL = STI->getFrameLowering(); |
| 214 | |
| 215 | bool Modified = false; |
| 216 | for (MachineBasicBlock &MBB : MF) |
| 217 | Modified |= ExpandMBB(MBB); |
| 218 | return Modified; |
| 219 | } |
| 220 | |
| 221 | /// Returns an instance of the pseudo instruction expansion pass. |
| 222 | FunctionPass *llvm::createX86ExpandPseudoPass() { |
| 223 | return new X86ExpandPseudo(); |
| 224 | } |