blob: e59cd8ec705992497621f87662ec92ea7e37bbf6 [file] [log] [blame]
Tim Northover3d7a0572019-05-24 08:39:43 +00001//===-- SwiftErrorValueTracking.cpp --------------------------------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This implements a limited mem2reg-like analysis to promote uses of function
10// arguments and allocas marked with swiftalloc from memory into virtual
11// registers tracked by this class.
12//
13//===----------------------------------------------------------------------===//
14
15#include "llvm/CodeGen/SwiftErrorValueTracking.h"
16#include "llvm/ADT/SmallSet.h"
17#include "llvm/CodeGen/MachineRegisterInfo.h"
18#include "llvm/CodeGen/MachineInstrBuilder.h"
19#include "llvm/CodeGen/TargetInstrInfo.h"
20#include "llvm/CodeGen/TargetLowering.h"
21#include "llvm/IR/Value.h"
22
23using namespace llvm;
24
25unsigned SwiftErrorValueTracking::getOrCreateVReg(const MachineBasicBlock *MBB,
26 const Value *Val) {
27 auto Key = std::make_pair(MBB, Val);
28 auto It = VRegDefMap.find(Key);
29 // If this is the first use of this swifterror value in this basic block,
30 // create a new virtual register.
31 // After we processed all basic blocks we will satisfy this "upwards exposed
32 // use" by inserting a copy or phi at the beginning of this block.
33 if (It == VRegDefMap.end()) {
34 auto &DL = MF->getDataLayout();
35 const TargetRegisterClass *RC = TLI->getRegClassFor(TLI->getPointerTy(DL));
36 auto VReg = MF->getRegInfo().createVirtualRegister(RC);
37 VRegDefMap[Key] = VReg;
38 VRegUpwardsUse[Key] = VReg;
39 return VReg;
40 } else
41 return It->second;
42}
43
44void SwiftErrorValueTracking::setCurrentVReg(const MachineBasicBlock *MBB,
Matt Arsenaulte3a676e2019-06-24 15:50:29 +000045 const Value *Val, Register VReg) {
Tim Northover3d7a0572019-05-24 08:39:43 +000046 VRegDefMap[std::make_pair(MBB, Val)] = VReg;
47}
48
49unsigned SwiftErrorValueTracking::getOrCreateVRegDefAt(
50 const Instruction *I, const MachineBasicBlock *MBB, const Value *Val) {
51 auto Key = PointerIntPair<const Instruction *, 1, bool>(I, true);
52 auto It = VRegDefUses.find(Key);
53 if (It != VRegDefUses.end())
54 return It->second;
55
56 auto &DL = MF->getDataLayout();
57 const TargetRegisterClass *RC = TLI->getRegClassFor(TLI->getPointerTy(DL));
58 unsigned VReg = MF->getRegInfo().createVirtualRegister(RC);
59 VRegDefUses[Key] = VReg;
60 setCurrentVReg(MBB, Val, VReg);
61 return VReg;
62}
63
64unsigned SwiftErrorValueTracking::getOrCreateVRegUseAt(
65 const Instruction *I, const MachineBasicBlock *MBB, const Value *Val) {
66 auto Key = PointerIntPair<const Instruction *, 1, bool>(I, false);
67 auto It = VRegDefUses.find(Key);
68 if (It != VRegDefUses.end())
69 return It->second;
70
71 unsigned VReg = getOrCreateVReg(MBB, Val);
72 VRegDefUses[Key] = VReg;
73 return VReg;
74}
75
76/// Set up SwiftErrorVals by going through the function. If the function has
77/// swifterror argument, it will be the first entry.
78void SwiftErrorValueTracking::setFunction(MachineFunction &mf) {
79 MF = &mf;
80 Fn = &MF->getFunction();
81 TLI = MF->getSubtarget().getTargetLowering();
82 TII = MF->getSubtarget().getInstrInfo();
83
84 if (!TLI->supportSwiftError())
85 return;
86
87 SwiftErrorVals.clear();
88 VRegDefMap.clear();
89 VRegUpwardsUse.clear();
90 VRegDefUses.clear();
91 SwiftErrorArg = nullptr;
92
93 // Check if function has a swifterror argument.
94 bool HaveSeenSwiftErrorArg = false;
95 for (Function::const_arg_iterator AI = Fn->arg_begin(), AE = Fn->arg_end();
96 AI != AE; ++AI)
97 if (AI->hasSwiftErrorAttr()) {
98 assert(!HaveSeenSwiftErrorArg &&
99 "Must have only one swifterror parameter");
100 (void)HaveSeenSwiftErrorArg; // silence warning.
101 HaveSeenSwiftErrorArg = true;
102 SwiftErrorArg = &*AI;
103 SwiftErrorVals.push_back(&*AI);
104 }
105
106 for (const auto &LLVMBB : *Fn)
107 for (const auto &Inst : LLVMBB) {
108 if (const AllocaInst *Alloca = dyn_cast<AllocaInst>(&Inst))
109 if (Alloca->isSwiftError())
110 SwiftErrorVals.push_back(Alloca);
111 }
112}
113
114bool SwiftErrorValueTracking::createEntriesInEntryBlock(DebugLoc DbgLoc) {
115 if (!TLI->supportSwiftError())
116 return false;
117
118 // We only need to do this when we have swifterror parameter or swifterror
119 // alloc.
120 if (SwiftErrorVals.empty())
121 return false;
122
123 MachineBasicBlock *MBB = &*MF->begin();
124 auto &DL = MF->getDataLayout();
125 auto const *RC = TLI->getRegClassFor(TLI->getPointerTy(DL));
126 bool Inserted = false;
127 for (const auto *SwiftErrorVal : SwiftErrorVals) {
128 // We will always generate a copy from the argument. It is always used at
129 // least by the 'return' of the swifterror.
130 if (SwiftErrorArg && SwiftErrorArg == SwiftErrorVal)
131 continue;
132 unsigned VReg = MF->getRegInfo().createVirtualRegister(RC);
133 // Assign Undef to Vreg. We construct MI directly to make sure it works
134 // with FastISel.
135 BuildMI(*MBB, MBB->getFirstNonPHI(), DbgLoc,
136 TII->get(TargetOpcode::IMPLICIT_DEF), VReg);
137
138 setCurrentVReg(MBB, SwiftErrorVal, VReg);
139 Inserted = true;
140 }
141
142 return Inserted;
143}
144
145/// Propagate swifterror values through the machine function CFG.
146void SwiftErrorValueTracking::propagateVRegs() {
147 if (!TLI->supportSwiftError())
148 return;
149
150 // We only need to do this when we have swifterror parameter or swifterror
151 // alloc.
152 if (SwiftErrorVals.empty())
153 return;
154
155 // For each machine basic block in reverse post order.
156 ReversePostOrderTraversal<MachineFunction *> RPOT(MF);
157 for (MachineBasicBlock *MBB : RPOT) {
158 // For each swifterror value in the function.
159 for (const auto *SwiftErrorVal : SwiftErrorVals) {
160 auto Key = std::make_pair(MBB, SwiftErrorVal);
161 auto UUseIt = VRegUpwardsUse.find(Key);
162 auto VRegDefIt = VRegDefMap.find(Key);
163 bool UpwardsUse = UUseIt != VRegUpwardsUse.end();
Matt Arsenaulte3a676e2019-06-24 15:50:29 +0000164 Register UUseVReg = UpwardsUse ? UUseIt->second : Register();
Tim Northover3d7a0572019-05-24 08:39:43 +0000165 bool DownwardDef = VRegDefIt != VRegDefMap.end();
166 assert(!(UpwardsUse && !DownwardDef) &&
167 "We can't have an upwards use but no downwards def");
168
169 // If there is no upwards exposed use and an entry for the swifterror in
170 // the def map for this value we don't need to do anything: We already
171 // have a downward def for this basic block.
172 if (!UpwardsUse && DownwardDef)
173 continue;
174
175 // Otherwise we either have an upwards exposed use vreg that we need to
176 // materialize or need to forward the downward def from predecessors.
177
178 // Check whether we have a single vreg def from all predecessors.
179 // Otherwise we need a phi.
180 SmallVector<std::pair<MachineBasicBlock *, unsigned>, 4> VRegs;
181 SmallSet<const MachineBasicBlock *, 8> Visited;
182 for (auto *Pred : MBB->predecessors()) {
183 if (!Visited.insert(Pred).second)
184 continue;
185 VRegs.push_back(std::make_pair(
186 Pred, getOrCreateVReg(Pred, SwiftErrorVal)));
187 if (Pred != MBB)
188 continue;
189 // We have a self-edge.
190 // If there was no upwards use in this basic block there is now one: the
191 // phi needs to use it self.
192 if (!UpwardsUse) {
193 UpwardsUse = true;
194 UUseIt = VRegUpwardsUse.find(Key);
195 assert(UUseIt != VRegUpwardsUse.end());
196 UUseVReg = UUseIt->second;
197 }
198 }
199
200 // We need a phi node if we have more than one predecessor with different
201 // downward defs.
202 bool needPHI =
203 VRegs.size() >= 1 &&
204 std::find_if(
205 VRegs.begin(), VRegs.end(),
206 [&](const std::pair<const MachineBasicBlock *, unsigned> &V)
207 -> bool { return V.second != VRegs[0].second; }) !=
208 VRegs.end();
209
210 // If there is no upwards exposed used and we don't need a phi just
211 // forward the swifterror vreg from the predecessor(s).
212 if (!UpwardsUse && !needPHI) {
213 assert(!VRegs.empty() &&
214 "No predecessors? The entry block should bail out earlier");
215 // Just forward the swifterror vreg from the predecessor(s).
216 setCurrentVReg(MBB, SwiftErrorVal, VRegs[0].second);
217 continue;
218 }
219
220 auto DLoc = isa<Instruction>(SwiftErrorVal)
221 ? cast<Instruction>(SwiftErrorVal)->getDebugLoc()
222 : DebugLoc();
223 const auto *TII = MF->getSubtarget().getInstrInfo();
224
225 // If we don't need a phi create a copy to the upward exposed vreg.
226 if (!needPHI) {
227 assert(UpwardsUse);
228 assert(!VRegs.empty() &&
229 "No predecessors? Is the Calling Convention correct?");
230 unsigned DestReg = UUseVReg;
231 BuildMI(*MBB, MBB->getFirstNonPHI(), DLoc, TII->get(TargetOpcode::COPY),
232 DestReg)
233 .addReg(VRegs[0].second);
234 continue;
235 }
236
237 // We need a phi: if there is an upwards exposed use we already have a
238 // destination virtual register number otherwise we generate a new one.
239 auto &DL = MF->getDataLayout();
240 auto const *RC = TLI->getRegClassFor(TLI->getPointerTy(DL));
Matt Arsenaulte3a676e2019-06-24 15:50:29 +0000241 Register PHIVReg =
Tim Northover3d7a0572019-05-24 08:39:43 +0000242 UpwardsUse ? UUseVReg : MF->getRegInfo().createVirtualRegister(RC);
243 MachineInstrBuilder PHI =
244 BuildMI(*MBB, MBB->getFirstNonPHI(), DLoc,
245 TII->get(TargetOpcode::PHI), PHIVReg);
246 for (auto BBRegPair : VRegs) {
247 PHI.addReg(BBRegPair.second).addMBB(BBRegPair.first);
248 }
249
250 // We did not have a definition in this block before: store the phi's vreg
251 // as this block downward exposed def.
252 if (!UpwardsUse)
253 setCurrentVReg(MBB, SwiftErrorVal, PHIVReg);
254 }
255 }
256}
257
258void SwiftErrorValueTracking::preassignVRegs(
259 MachineBasicBlock *MBB, BasicBlock::const_iterator Begin,
260 BasicBlock::const_iterator End) {
261 if (!TLI->supportSwiftError() || SwiftErrorVals.empty())
262 return;
263
264 // Iterator over instructions and assign vregs to swifterror defs and uses.
265 for (auto It = Begin; It != End; ++It) {
266 ImmutableCallSite CS(&*It);
267 if (CS) {
268 // A call-site with a swifterror argument is both use and def.
269 const Value *SwiftErrorAddr = nullptr;
270 for (auto &Arg : CS.args()) {
271 if (!Arg->isSwiftError())
272 continue;
273 // Use of swifterror.
274 assert(!SwiftErrorAddr && "Cannot have multiple swifterror arguments");
275 SwiftErrorAddr = &*Arg;
276 assert(SwiftErrorAddr->isSwiftError() &&
277 "Must have a swifterror value argument");
278 getOrCreateVRegUseAt(&*It, MBB, SwiftErrorAddr);
279 }
280 if (!SwiftErrorAddr)
281 continue;
282
283 // Def of swifterror.
284 getOrCreateVRegDefAt(&*It, MBB, SwiftErrorAddr);
285
286 // A load is a use.
287 } else if (const LoadInst *LI = dyn_cast<const LoadInst>(&*It)) {
288 const Value *V = LI->getOperand(0);
289 if (!V->isSwiftError())
290 continue;
291
292 getOrCreateVRegUseAt(LI, MBB, V);
293
294 // A store is a def.
295 } else if (const StoreInst *SI = dyn_cast<const StoreInst>(&*It)) {
296 const Value *SwiftErrorAddr = SI->getOperand(1);
297 if (!SwiftErrorAddr->isSwiftError())
298 continue;
299
300 // Def of swifterror.
301 getOrCreateVRegDefAt(&*It, MBB, SwiftErrorAddr);
302
303 // A return in a swiferror returning function is a use.
304 } else if (const ReturnInst *R = dyn_cast<const ReturnInst>(&*It)) {
305 const Function *F = R->getParent()->getParent();
306 if (!F->getAttributes().hasAttrSomewhere(Attribute::SwiftError))
307 continue;
308
309 getOrCreateVRegUseAt(R, MBB, SwiftErrorArg);
310 }
311 }
312}