blob: 26f5301de651e797a8da8c01b36f5fbbadbac23a [file] [log] [blame]
Sanjoy Das69fad072015-06-15 18:44:27 +00001//===-- ImplicitNullChecks.cpp - Fold null checks into memory accesses ----===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This pass turns explicit null checks of the form
11//
12// test %r10, %r10
13// je throw_npe
14// movl (%r10), %esi
15// ...
16//
17// to
18//
19// faulting_load_op("movl (%r10), %esi", throw_npe)
20// ...
21//
22// With the help of a runtime that understands the .fault_maps section,
23// faulting_load_op branches to throw_npe if executing movl (%r10), %esi incurs
24// a page fault.
25//
26//===----------------------------------------------------------------------===//
27
Sanjoy Dasb7718452015-07-09 20:13:25 +000028#include "llvm/ADT/DenseSet.h"
Sanjoy Das69fad072015-06-15 18:44:27 +000029#include "llvm/ADT/SmallVector.h"
Sanjoy Das8ee6a302015-07-06 23:32:10 +000030#include "llvm/ADT/Statistic.h"
Sanjoy Dase57bf682016-06-22 22:16:51 +000031#include "llvm/Analysis/AliasAnalysis.h"
Sanjoy Das69fad072015-06-15 18:44:27 +000032#include "llvm/CodeGen/Passes.h"
33#include "llvm/CodeGen/MachineFunction.h"
Sanjoy Dasb7718452015-07-09 20:13:25 +000034#include "llvm/CodeGen/MachineMemOperand.h"
Sanjoy Das69fad072015-06-15 18:44:27 +000035#include "llvm/CodeGen/MachineOperand.h"
36#include "llvm/CodeGen/MachineFunctionPass.h"
37#include "llvm/CodeGen/MachineInstrBuilder.h"
38#include "llvm/CodeGen/MachineRegisterInfo.h"
39#include "llvm/CodeGen/MachineModuleInfo.h"
40#include "llvm/IR/BasicBlock.h"
41#include "llvm/IR/Instruction.h"
Chen Li00038782015-08-04 04:41:34 +000042#include "llvm/IR/LLVMContext.h"
Sanjoy Das69fad072015-06-15 18:44:27 +000043#include "llvm/Support/CommandLine.h"
44#include "llvm/Support/Debug.h"
45#include "llvm/Target/TargetSubtargetInfo.h"
46#include "llvm/Target/TargetInstrInfo.h"
47
48using namespace llvm;
49
Chad Rosierc27a18f2016-03-09 16:00:35 +000050static cl::opt<int> PageSize("imp-null-check-page-size",
51 cl::desc("The page size of the target in bytes"),
52 cl::init(4096));
Sanjoy Das69fad072015-06-15 18:44:27 +000053
Sanjoy Das9a129802016-12-23 00:41:21 +000054static cl::opt<unsigned> MaxInstsToConsider(
55 "imp-null-max-insts-to-consider",
56 cl::desc("The max number of instructions to consider hoisting loads over "
57 "(the algorithm is quadratic over this number)"),
58 cl::init(8));
59
Sanjoy Das8ee6a302015-07-06 23:32:10 +000060#define DEBUG_TYPE "implicit-null-checks"
61
62STATISTIC(NumImplicitNullChecks,
63 "Number of explicit null checks made implicit");
64
Sanjoy Das69fad072015-06-15 18:44:27 +000065namespace {
66
67class ImplicitNullChecks : public MachineFunctionPass {
Sanjoy Das9a129802016-12-23 00:41:21 +000068 /// Return true if \c computeDependence can process \p MI.
69 static bool canHandle(const MachineInstr *MI);
70
71 /// Helper function for \c computeDependence. Return true if \p A
72 /// and \p B do not have any dependences between them, and can be
73 /// re-ordered without changing program semantics.
74 bool canReorder(const MachineInstr *A, const MachineInstr *B);
75
76 /// A data type for representing the result computed by \c
77 /// computeDependence. States whether it is okay to reorder the
78 /// instruction passed to \c computeDependence with at most one
79 /// depednency.
80 struct DependenceResult {
81 /// Can we actually re-order \p MI with \p Insts (see \c
82 /// computeDependence).
83 bool CanReorder;
84
85 /// If non-None, then an instruction in \p Insts that also must be
86 /// hoisted.
87 Optional<ArrayRef<MachineInstr *>::iterator> PotentialDependence;
88
89 /*implicit*/ DependenceResult(
90 bool CanReorder,
91 Optional<ArrayRef<MachineInstr *>::iterator> PotentialDependence)
92 : CanReorder(CanReorder), PotentialDependence(PotentialDependence) {
93 assert((!PotentialDependence || CanReorder) &&
94 "!CanReorder && PotentialDependence.hasValue() not allowed!");
95 }
96 };
97
98 /// Compute a result for the following question: can \p MI be
99 /// re-ordered from after \p Insts to before it.
100 ///
101 /// \c canHandle should return true for all instructions in \p
102 /// Insts.
103 DependenceResult computeDependence(const MachineInstr *MI,
104 ArrayRef<MachineInstr *> Insts);
105
Sanjoy Das69fad072015-06-15 18:44:27 +0000106 /// Represents one null check that can be made implicit.
Sanjoy Dase173b9a2016-06-21 02:10:18 +0000107 class NullCheck {
Sanjoy Das69fad072015-06-15 18:44:27 +0000108 // The memory operation the null check can be folded into.
109 MachineInstr *MemOperation;
110
111 // The instruction actually doing the null check (Ptr != 0).
112 MachineInstr *CheckOperation;
113
114 // The block the check resides in.
115 MachineBasicBlock *CheckBlock;
116
Eric Christopher572e03a2015-06-19 01:53:21 +0000117 // The block branched to if the pointer is non-null.
Sanjoy Das69fad072015-06-15 18:44:27 +0000118 MachineBasicBlock *NotNullSucc;
119
Eric Christopher572e03a2015-06-19 01:53:21 +0000120 // The block branched to if the pointer is null.
Sanjoy Das69fad072015-06-15 18:44:27 +0000121 MachineBasicBlock *NullSucc;
122
Sanjoy Dase57bf682016-06-22 22:16:51 +0000123 // If this is non-null, then MemOperation has a dependency on on this
124 // instruction; and it needs to be hoisted to execute before MemOperation.
125 MachineInstr *OnlyDependency;
126
Sanjoy Dase173b9a2016-06-21 02:10:18 +0000127 public:
Sanjoy Das69fad072015-06-15 18:44:27 +0000128 explicit NullCheck(MachineInstr *memOperation, MachineInstr *checkOperation,
129 MachineBasicBlock *checkBlock,
130 MachineBasicBlock *notNullSucc,
Sanjoy Dase57bf682016-06-22 22:16:51 +0000131 MachineBasicBlock *nullSucc,
132 MachineInstr *onlyDependency)
Sanjoy Das69fad072015-06-15 18:44:27 +0000133 : MemOperation(memOperation), CheckOperation(checkOperation),
Sanjoy Dase57bf682016-06-22 22:16:51 +0000134 CheckBlock(checkBlock), NotNullSucc(notNullSucc), NullSucc(nullSucc),
135 OnlyDependency(onlyDependency) {}
Sanjoy Dase173b9a2016-06-21 02:10:18 +0000136
137 MachineInstr *getMemOperation() const { return MemOperation; }
138
139 MachineInstr *getCheckOperation() const { return CheckOperation; }
140
141 MachineBasicBlock *getCheckBlock() const { return CheckBlock; }
142
143 MachineBasicBlock *getNotNullSucc() const { return NotNullSucc; }
144
145 MachineBasicBlock *getNullSucc() const { return NullSucc; }
Sanjoy Dase57bf682016-06-22 22:16:51 +0000146
147 MachineInstr *getOnlyDependency() const { return OnlyDependency; }
Sanjoy Das69fad072015-06-15 18:44:27 +0000148 };
149
150 const TargetInstrInfo *TII = nullptr;
151 const TargetRegisterInfo *TRI = nullptr;
Sanjoy Dase57bf682016-06-22 22:16:51 +0000152 AliasAnalysis *AA = nullptr;
Sanjoy Das69fad072015-06-15 18:44:27 +0000153 MachineModuleInfo *MMI = nullptr;
154
155 bool analyzeBlockForNullChecks(MachineBasicBlock &MBB,
156 SmallVectorImpl<NullCheck> &NullCheckList);
157 MachineInstr *insertFaultingLoad(MachineInstr *LoadMI, MachineBasicBlock *MBB,
Quentin Colombet4e1d3892016-05-02 22:58:54 +0000158 MachineBasicBlock *HandlerMBB);
Sanjoy Das69fad072015-06-15 18:44:27 +0000159 void rewriteNullChecks(ArrayRef<NullCheck> NullCheckList);
160
161public:
162 static char ID;
163
164 ImplicitNullChecks() : MachineFunctionPass(ID) {
165 initializeImplicitNullChecksPass(*PassRegistry::getPassRegistry());
166 }
167
168 bool runOnMachineFunction(MachineFunction &MF) override;
Sanjoy Dase57bf682016-06-22 22:16:51 +0000169 void getAnalysisUsage(AnalysisUsage &AU) const override {
170 AU.addRequired<AAResultsWrapperPass>();
171 MachineFunctionPass::getAnalysisUsage(AU);
172 }
Derek Schuffad154c82016-03-28 17:05:30 +0000173
174 MachineFunctionProperties getRequiredProperties() const override {
175 return MachineFunctionProperties().set(
Matthias Braun1eb47362016-08-25 01:27:13 +0000176 MachineFunctionProperties::Property::NoVRegs);
Derek Schuffad154c82016-03-28 17:05:30 +0000177 }
Sanjoy Das69fad072015-06-15 18:44:27 +0000178};
Sanjoy Dasedc394f2015-11-12 20:51:44 +0000179
Sanjoy Dasedc394f2015-11-12 20:51:44 +0000180}
181
Sanjoy Das9a129802016-12-23 00:41:21 +0000182bool ImplicitNullChecks::canHandle(const MachineInstr *MI) {
183 if (MI->isCall() || MI->mayStore() || MI->hasUnmodeledSideEffects())
184 return false;
185 auto IsRegMask = [](const MachineOperand &MO) { return MO.isRegMask(); };
186 (void)IsRegMask;
Sanjoy Dasedc394f2015-11-12 20:51:44 +0000187
Sanjoy Das9a129802016-12-23 00:41:21 +0000188 assert(!llvm::any_of(MI->operands(), IsRegMask) &&
189 "Calls were filtered out above!");
Sanjoy Dasedc394f2015-11-12 20:51:44 +0000190
Sanjoy Das9a129802016-12-23 00:41:21 +0000191 auto IsUnordered = [](MachineMemOperand *MMO) { return MMO->isUnordered(); };
192 return llvm::all_of(MI->memoperands(), IsUnordered);
193}
Sanjoy Dasedc394f2015-11-12 20:51:44 +0000194
Sanjoy Das9a129802016-12-23 00:41:21 +0000195ImplicitNullChecks::DependenceResult
196ImplicitNullChecks::computeDependence(const MachineInstr *MI,
197 ArrayRef<MachineInstr *> Block) {
198 assert(llvm::all_of(Block, canHandle) && "Check this first!");
199 assert(!llvm::is_contained(Block, MI) && "Block must be exclusive of MI!");
200
201 Optional<ArrayRef<MachineInstr *>::iterator> Dep;
202
203 for (auto I = Block.begin(), E = Block.end(); I != E; ++I) {
204 if (canReorder(*I, MI))
205 continue;
206
207 if (Dep == None) {
208 // Found one possible dependency, keep track of it.
209 Dep = I;
210 } else {
211 // We found two dependencies, so bail out.
212 return {false, None};
Sanjoy Dasedc394f2015-11-12 20:51:44 +0000213 }
214 }
215
Sanjoy Das9a129802016-12-23 00:41:21 +0000216 return {true, Dep};
Sanjoy Dasedc394f2015-11-12 20:51:44 +0000217}
218
Sanjoy Das9a129802016-12-23 00:41:21 +0000219bool ImplicitNullChecks::canReorder(const MachineInstr *A,
220 const MachineInstr *B) {
221 assert(canHandle(A) && canHandle(B) && "Precondition!");
Sanjoy Dasedc394f2015-11-12 20:51:44 +0000222
Sanjoy Das9a129802016-12-23 00:41:21 +0000223 // canHandle makes sure that we _can_ correctly analyze the dependencies
224 // between A and B here -- for instance, we should not be dealing with heap
225 // load-store dependencies here.
Sanjoy Dasedc394f2015-11-12 20:51:44 +0000226
Sanjoy Das9a129802016-12-23 00:41:21 +0000227 for (auto MOA : A->operands()) {
228 if (!(MOA.isReg() && MOA.getReg()))
229 continue;
Sanjoy Dase57bf682016-06-22 22:16:51 +0000230
Sanjoy Das9a129802016-12-23 00:41:21 +0000231 unsigned RegA = MOA.getReg();
232 for (auto MOB : B->operands()) {
233 if (!(MOB.isReg() && MOB.getReg()))
234 continue;
Sanjoy Dase57bf682016-06-22 22:16:51 +0000235
Sanjoy Das9a129802016-12-23 00:41:21 +0000236 unsigned RegB = MOB.getReg();
Sanjoy Dase57bf682016-06-22 22:16:51 +0000237
Sanjoy Das9a129802016-12-23 00:41:21 +0000238 if (TRI->regsOverlap(RegA, RegB))
239 return false;
Sanjoy Dasedc394f2015-11-12 20:51:44 +0000240 }
241 }
242
243 return true;
Alexander Kornienkof00654e2015-06-23 09:49:53 +0000244}
Sanjoy Das69fad072015-06-15 18:44:27 +0000245
246bool ImplicitNullChecks::runOnMachineFunction(MachineFunction &MF) {
247 TII = MF.getSubtarget().getInstrInfo();
248 TRI = MF.getRegInfo().getTargetRegisterInfo();
249 MMI = &MF.getMMI();
Sanjoy Dase57bf682016-06-22 22:16:51 +0000250 AA = &getAnalysis<AAResultsWrapperPass>().getAAResults();
Sanjoy Das69fad072015-06-15 18:44:27 +0000251
252 SmallVector<NullCheck, 16> NullCheckList;
253
254 for (auto &MBB : MF)
255 analyzeBlockForNullChecks(MBB, NullCheckList);
256
257 if (!NullCheckList.empty())
258 rewriteNullChecks(NullCheckList);
259
260 return !NullCheckList.empty();
261}
262
Sanjoy Dase57bf682016-06-22 22:16:51 +0000263// Return true if any register aliasing \p Reg is live-in into \p MBB.
264static bool AnyAliasLiveIn(const TargetRegisterInfo *TRI,
265 MachineBasicBlock *MBB, unsigned Reg) {
266 for (MCRegAliasIterator AR(Reg, TRI, /*IncludeSelf*/ true); AR.isValid();
267 ++AR)
268 if (MBB->isLiveIn(*AR))
269 return true;
270 return false;
271}
272
Sanjoy Das69fad072015-06-15 18:44:27 +0000273/// Analyze MBB to check if its terminating branch can be turned into an
274/// implicit null check. If yes, append a description of the said null check to
275/// NullCheckList and return true, else return false.
276bool ImplicitNullChecks::analyzeBlockForNullChecks(
277 MachineBasicBlock &MBB, SmallVectorImpl<NullCheck> &NullCheckList) {
278 typedef TargetInstrInfo::MachineBranchPredicate MachineBranchPredicate;
279
Sanjoy Dase8b81642015-11-12 20:51:49 +0000280 MDNode *BranchMD = nullptr;
281 if (auto *BB = MBB.getBasicBlock())
282 BranchMD = BB->getTerminator()->getMetadata(LLVMContext::MD_make_implicit);
283
Sanjoy Das9c41a932015-06-30 21:22:32 +0000284 if (!BranchMD)
285 return false;
286
Sanjoy Das69fad072015-06-15 18:44:27 +0000287 MachineBranchPredicate MBP;
288
Jacques Pienaar71c30a12016-07-15 14:41:04 +0000289 if (TII->analyzeBranchPredicate(MBB, MBP, true))
Sanjoy Das69fad072015-06-15 18:44:27 +0000290 return false;
291
292 // Is the predicate comparing an integer to zero?
293 if (!(MBP.LHS.isReg() && MBP.RHS.isImm() && MBP.RHS.getImm() == 0 &&
294 (MBP.Predicate == MachineBranchPredicate::PRED_NE ||
295 MBP.Predicate == MachineBranchPredicate::PRED_EQ)))
296 return false;
297
298 // If we cannot erase the test instruction itself, then making the null check
299 // implicit does not buy us much.
300 if (!MBP.SingleUseCondition)
301 return false;
302
303 MachineBasicBlock *NotNullSucc, *NullSucc;
304
305 if (MBP.Predicate == MachineBranchPredicate::PRED_NE) {
306 NotNullSucc = MBP.TrueDest;
307 NullSucc = MBP.FalseDest;
308 } else {
309 NotNullSucc = MBP.FalseDest;
310 NullSucc = MBP.TrueDest;
311 }
312
313 // We handle the simplest case for now. We can potentially do better by using
314 // the machine dominator tree.
315 if (NotNullSucc->pred_size() != 1)
316 return false;
317
318 // Starting with a code fragment like:
319 //
320 // test %RAX, %RAX
321 // jne LblNotNull
322 //
323 // LblNull:
324 // callq throw_NullPointerException
325 //
326 // LblNotNull:
Sanjoy Dasb7718452015-07-09 20:13:25 +0000327 // Inst0
328 // Inst1
329 // ...
Sanjoy Das69fad072015-06-15 18:44:27 +0000330 // Def = Load (%RAX + <offset>)
331 // ...
332 //
333 //
334 // we want to end up with
335 //
Sanjoy Dasac9c5b12015-11-13 08:14:00 +0000336 // Def = FaultingLoad (%RAX + <offset>), LblNull
Sanjoy Das69fad072015-06-15 18:44:27 +0000337 // jmp LblNotNull ;; explicit or fallthrough
338 //
339 // LblNotNull:
Sanjoy Dasb7718452015-07-09 20:13:25 +0000340 // Inst0
341 // Inst1
Sanjoy Das69fad072015-06-15 18:44:27 +0000342 // ...
343 //
344 // LblNull:
345 // callq throw_NullPointerException
346 //
Sanjoy Dasac9c5b12015-11-13 08:14:00 +0000347 //
348 // To see why this is legal, consider the two possibilities:
349 //
350 // 1. %RAX is null: since we constrain <offset> to be less than PageSize, the
351 // load instruction dereferences the null page, causing a segmentation
352 // fault.
353 //
354 // 2. %RAX is not null: in this case we know that the load cannot fault, as
355 // otherwise the load would've faulted in the original program too and the
356 // original program would've been undefined.
357 //
358 // This reasoning cannot be extended to justify hoisting through arbitrary
359 // control flow. For instance, in the example below (in pseudo-C)
360 //
361 // if (ptr == null) { throw_npe(); unreachable; }
362 // if (some_cond) { return 42; }
363 // v = ptr->field; // LD
364 // ...
365 //
366 // we cannot (without code duplication) use the load marked "LD" to null check
367 // ptr -- clause (2) above does not apply in this case. In the above program
368 // the safety of ptr->field can be dependent on some_cond; and, for instance,
369 // ptr could be some non-null invalid reference that never gets loaded from
370 // because some_cond is always true.
Sanjoy Das69fad072015-06-15 18:44:27 +0000371
Sanjoy Das9a129802016-12-23 00:41:21 +0000372 const unsigned PointerReg = MBP.LHS.getReg();
Sanjoy Dasb7718452015-07-09 20:13:25 +0000373
Sanjoy Das9a129802016-12-23 00:41:21 +0000374 SmallVector<MachineInstr *, 8> InstsSeenSoFar;
Sanjoy Dasb7718452015-07-09 20:13:25 +0000375
Sanjoy Das9a129802016-12-23 00:41:21 +0000376 // Is \p MI a memory operation that can be used to null check the value in \p
377 // PointerReg?
378 auto IsSuitableMemoryOp = [&](MachineInstr &MI,
379 ArrayRef<MachineInstr *> PrevInsts) {
Chad Rosierc27a18f2016-03-09 16:00:35 +0000380 int64_t Offset;
Sanjoy Das9a129802016-12-23 00:41:21 +0000381 unsigned BaseReg;
Sanjoy Dase57bf682016-06-22 22:16:51 +0000382
Sanjoy Das9a129802016-12-23 00:41:21 +0000383 if (!TII->getMemOpBaseRegImmOfs(MI, BaseReg, Offset, TRI) ||
384 BaseReg != PointerReg)
Sanjoy Dasb7718452015-07-09 20:13:25 +0000385 return false;
Sanjoy Das9a129802016-12-23 00:41:21 +0000386
387 // We want the load to be issued at a sane offset from PointerReg, so that
388 // if PointerReg is null then the load reliably page faults.
389 if (!(MI.mayLoad() && !MI.isPredicable() && Offset < PageSize))
390 return false;
391
392 // Finally, we need to make sure that the load instruction actually is
393 // loading from PointerReg, and there isn't some re-definition of PointerReg
394 // between the compare and the load.
395 for (auto *PrevMI : PrevInsts)
396 for (auto &PrevMO : PrevMI->operands())
397 if (PrevMO.isReg() && PrevMO.getReg() &&
398 TRI->regsOverlap(PrevMO.getReg(), PointerReg))
399 return false;
400
401 return true;
402 };
403
404 // Return true if \p FaultingMI can be hoisted from after the the instructions
405 // in \p InstsSeenSoFar to before them. Set \p Dependence to a non-null value
406 // if we also need to (and legally can) hoist a depedency.
407 auto CanHoistLoadInst = [&](MachineInstr *FaultingMI,
408 ArrayRef<MachineInstr *> InstsSeenSoFar,
409 MachineInstr *&Dependence) {
410 auto DepResult = computeDependence(FaultingMI, InstsSeenSoFar);
411 if (!DepResult.CanReorder)
412 return false;
413
414 if (!DepResult.PotentialDependence) {
415 Dependence = nullptr;
416 return true;
417 }
418
419 auto DependenceItr = *DepResult.PotentialDependence;
420 auto *DependenceMI = *DependenceItr;
421
422 // We don't want to reason about speculating loads. Note -- at this point
423 // we should have already filtered out all of the other non-speculatable
424 // things, like calls and stores.
425 assert(canHandle(DependenceMI) && "Should never have reached here!");
426 if (DependenceMI->mayLoad())
427 return false;
428
429 for (auto &DependenceMO : DependenceMI->operands()) {
430 if (!(DependenceMO.isReg() && DependenceMO.getReg()))
431 continue;
432
433 // Make sure that we won't clobber any live ins to the sibling block by
434 // hoisting Dependency. For instance, we can't hoist INST to before the
435 // null check (even if it safe, and does not violate any dependencies in
436 // the non_null_block) if %rdx is live in to _null_block.
437 //
438 // test %rcx, %rcx
439 // je _null_block
440 // _non_null_block:
441 // %rdx<def> = INST
442 // ...
443 //
444 // This restriction does not apply to the faulting load inst because in
445 // case the pointer loaded from is in the null page, the load will not
446 // semantically execute, and affect machine state. That is, if the load
447 // was loading into %rax and it faults, the value of %rax should stay the
448 // same as it would have been had the load not have executed and we'd have
449 // branched to NullSucc directly.
450 if (AnyAliasLiveIn(TRI, NullSucc, DependenceMO.getReg()))
451 return false;
452
453 // The Dependency can't be re-defining the base register -- then we won't
454 // get the memory operation on the address we want. This is already
455 // checked in \c IsSuitableMemoryOp.
456 assert(!TRI->regsOverlap(DependenceMO.getReg(), PointerReg) &&
457 "Should have been checked before!");
458 }
459
460 auto DepDepResult = computeDependence(
461 DependenceMI, {InstsSeenSoFar.begin(), DependenceItr});
462
463 if (!DepDepResult.CanReorder || DepDepResult.PotentialDependence)
464 return false;
465
466 Dependence = DependenceMI;
467 return true;
468 };
469
470 for (auto &MI : *NotNullSucc) {
471 if (!canHandle(&MI) || InstsSeenSoFar.size() >= MaxInstsToConsider)
472 return false;
473
474 MachineInstr *Dependence;
475 if (IsSuitableMemoryOp(MI, InstsSeenSoFar) &&
476 CanHoistLoadInst(&MI, InstsSeenSoFar, Dependence)) {
477 NullCheckList.emplace_back(&MI, MBP.ConditionDef, &MBB, NotNullSucc,
478 NullSucc, Dependence);
479 return true;
480 }
481
482 InstsSeenSoFar.push_back(&MI);
Sanjoy Dasb7718452015-07-09 20:13:25 +0000483 }
484
Sanjoy Das69fad072015-06-15 18:44:27 +0000485 return false;
486}
487
488/// Wrap a machine load instruction, LoadMI, into a FAULTING_LOAD_OP machine
489/// instruction. The FAULTING_LOAD_OP instruction does the same load as LoadMI
Quentin Colombet4e1d3892016-05-02 22:58:54 +0000490/// (defining the same register), and branches to HandlerMBB if the load
Sanjoy Das69fad072015-06-15 18:44:27 +0000491/// faults. The FAULTING_LOAD_OP instruction is inserted at the end of MBB.
Quentin Colombet4e1d3892016-05-02 22:58:54 +0000492MachineInstr *
493ImplicitNullChecks::insertFaultingLoad(MachineInstr *LoadMI,
494 MachineBasicBlock *MBB,
495 MachineBasicBlock *HandlerMBB) {
Sanjoy Das93d608c2015-07-20 20:31:39 +0000496 const unsigned NoRegister = 0; // Guaranteed to be the NoRegister value for
497 // all targets.
498
Sanjoy Das69fad072015-06-15 18:44:27 +0000499 DebugLoc DL;
500 unsigned NumDefs = LoadMI->getDesc().getNumDefs();
Sanjoy Das93d608c2015-07-20 20:31:39 +0000501 assert(NumDefs <= 1 && "other cases unhandled!");
Sanjoy Das69fad072015-06-15 18:44:27 +0000502
Sanjoy Das93d608c2015-07-20 20:31:39 +0000503 unsigned DefReg = NoRegister;
504 if (NumDefs != 0) {
505 DefReg = LoadMI->defs().begin()->getReg();
506 assert(std::distance(LoadMI->defs().begin(), LoadMI->defs().end()) == 1 &&
507 "expected exactly one def!");
508 }
Sanjoy Das69fad072015-06-15 18:44:27 +0000509
510 auto MIB = BuildMI(MBB, DL, TII->get(TargetOpcode::FAULTING_LOAD_OP), DefReg)
Quentin Colombet4e1d3892016-05-02 22:58:54 +0000511 .addMBB(HandlerMBB)
Sanjoy Das69fad072015-06-15 18:44:27 +0000512 .addImm(LoadMI->getOpcode());
513
514 for (auto &MO : LoadMI->uses())
515 MIB.addOperand(MO);
516
517 MIB.setMemRefs(LoadMI->memoperands_begin(), LoadMI->memoperands_end());
518
519 return MIB;
520}
521
522/// Rewrite the null checks in NullCheckList into implicit null checks.
523void ImplicitNullChecks::rewriteNullChecks(
524 ArrayRef<ImplicitNullChecks::NullCheck> NullCheckList) {
525 DebugLoc DL;
526
527 for (auto &NC : NullCheckList) {
Sanjoy Das69fad072015-06-15 18:44:27 +0000528 // Remove the conditional branch dependent on the null check.
Matt Arsenault1b9fc8e2016-09-14 20:43:16 +0000529 unsigned BranchesRemoved = TII->removeBranch(*NC.getCheckBlock());
Sanjoy Das69fad072015-06-15 18:44:27 +0000530 (void)BranchesRemoved;
531 assert(BranchesRemoved > 0 && "expected at least one branch!");
532
Sanjoy Dase57bf682016-06-22 22:16:51 +0000533 if (auto *DepMI = NC.getOnlyDependency()) {
534 DepMI->removeFromParent();
535 NC.getCheckBlock()->insert(NC.getCheckBlock()->end(), DepMI);
536 }
537
Sanjoy Das69fad072015-06-15 18:44:27 +0000538 // Insert a faulting load where the conditional branch was originally. We
539 // check earlier ensures that this bit of code motion is legal. We do not
540 // touch the successors list for any basic block since we haven't changed
541 // control flow, we've just made it implicit.
Sanjoy Dase173b9a2016-06-21 02:10:18 +0000542 MachineInstr *FaultingLoad = insertFaultingLoad(
543 NC.getMemOperation(), NC.getCheckBlock(), NC.getNullSucc());
Quentin Colombet26dab3a2016-05-03 18:09:06 +0000544 // Now the values defined by MemOperation, if any, are live-in of
545 // the block of MemOperation.
546 // The original load operation may define implicit-defs alongside
547 // the loaded value.
Sanjoy Dase173b9a2016-06-21 02:10:18 +0000548 MachineBasicBlock *MBB = NC.getMemOperation()->getParent();
Quentin Colombet26dab3a2016-05-03 18:09:06 +0000549 for (const MachineOperand &MO : FaultingLoad->operands()) {
550 if (!MO.isReg() || !MO.isDef())
551 continue;
552 unsigned Reg = MO.getReg();
553 if (!Reg || MBB->isLiveIn(Reg))
554 continue;
555 MBB->addLiveIn(Reg);
Quentin Colombet12b69912016-04-27 23:26:40 +0000556 }
Sanjoy Dase57bf682016-06-22 22:16:51 +0000557
558 if (auto *DepMI = NC.getOnlyDependency()) {
559 for (auto &MO : DepMI->operands()) {
560 if (!MO.isReg() || !MO.getReg() || !MO.isDef())
561 continue;
562 if (!NC.getNotNullSucc()->isLiveIn(MO.getReg()))
563 NC.getNotNullSucc()->addLiveIn(MO.getReg());
564 }
565 }
566
Sanjoy Dase173b9a2016-06-21 02:10:18 +0000567 NC.getMemOperation()->eraseFromParent();
568 NC.getCheckOperation()->eraseFromParent();
Sanjoy Das69fad072015-06-15 18:44:27 +0000569
570 // Insert an *unconditional* branch to not-null successor.
Matt Arsenaulte8e0f5c2016-09-14 17:24:15 +0000571 TII->insertBranch(*NC.getCheckBlock(), NC.getNotNullSucc(), nullptr,
Sanjoy Dase173b9a2016-06-21 02:10:18 +0000572 /*Cond=*/None, DL);
Sanjoy Das69fad072015-06-15 18:44:27 +0000573
Sanjoy Das8ee6a302015-07-06 23:32:10 +0000574 NumImplicitNullChecks++;
Sanjoy Das69fad072015-06-15 18:44:27 +0000575 }
576}
577
Sanjoy Das9a129802016-12-23 00:41:21 +0000578
Sanjoy Das69fad072015-06-15 18:44:27 +0000579char ImplicitNullChecks::ID = 0;
580char &llvm::ImplicitNullChecksID = ImplicitNullChecks::ID;
581INITIALIZE_PASS_BEGIN(ImplicitNullChecks, "implicit-null-checks",
582 "Implicit null checks", false, false)
Sanjoy Dase57bf682016-06-22 22:16:51 +0000583INITIALIZE_PASS_DEPENDENCY(AAResultsWrapperPass)
Sanjoy Das69fad072015-06-15 18:44:27 +0000584INITIALIZE_PASS_END(ImplicitNullChecks, "implicit-null-checks",
585 "Implicit null checks", false, false)