blob: 401c1beaa42b9ac63e2b7add1ef4d1976de326a8 [file] [log] [blame]
Chandler Carruth9b081d92012-03-16 05:51:52 +00001//===- CodeMetrics.cpp - Code cost measurements ---------------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements code cost measurement utilities.
11//
12//===----------------------------------------------------------------------===//
13
14#include "llvm/Analysis/CodeMetrics.h"
Micah Villmow3574eca2012-10-08 16:38:25 +000015#include "llvm/DataLayout.h"
Chandler Carruthd04a8d42012-12-03 16:50:05 +000016#include "llvm/Function.h"
17#include "llvm/IntrinsicInst.h"
18#include "llvm/Support/CallSite.h"
Chandler Carruth9b081d92012-03-16 05:51:52 +000019
20using namespace llvm;
21
22/// callIsSmall - If a call is likely to lower to a single target instruction,
23/// or is otherwise deemed small return true.
24/// TODO: Perhaps calls like memcpy, strcpy, etc?
Chandler Carruthd5003ca2012-05-04 00:58:03 +000025bool llvm::callIsSmall(ImmutableCallSite CS) {
26 if (isa<IntrinsicInst>(CS.getInstruction()))
27 return true;
28
29 const Function *F = CS.getCalledFunction();
Chandler Carruth9b081d92012-03-16 05:51:52 +000030 if (!F) return false;
31
32 if (F->hasLocalLinkage()) return false;
33
34 if (!F->hasName()) return false;
35
36 StringRef Name = F->getName();
37
38 // These will all likely lower to a single selection DAG node.
39 if (Name == "copysign" || Name == "copysignf" || Name == "copysignl" ||
40 Name == "fabs" || Name == "fabsf" || Name == "fabsl" ||
41 Name == "sin" || Name == "sinf" || Name == "sinl" ||
42 Name == "cos" || Name == "cosf" || Name == "cosl" ||
43 Name == "sqrt" || Name == "sqrtf" || Name == "sqrtl" )
44 return true;
45
46 // These are all likely to be optimized into something smaller.
47 if (Name == "pow" || Name == "powf" || Name == "powl" ||
48 Name == "exp2" || Name == "exp2l" || Name == "exp2f" ||
49 Name == "floor" || Name == "floorf" || Name == "ceil" ||
50 Name == "round" || Name == "ffs" || Name == "ffsl" ||
51 Name == "abs" || Name == "labs" || Name == "llabs")
52 return true;
53
54 return false;
55}
56
Micah Villmow3574eca2012-10-08 16:38:25 +000057bool llvm::isInstructionFree(const Instruction *I, const DataLayout *TD) {
Chandler Carruthf2286b02012-03-31 12:42:41 +000058 if (isa<PHINode>(I))
59 return true;
60
61 // If a GEP has all constant indices, it will probably be folded with
62 // a load/store.
63 if (const GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(I))
64 return GEP->hasAllConstantIndices();
65
66 if (const IntrinsicInst *II = dyn_cast<IntrinsicInst>(I)) {
67 switch (II->getIntrinsicID()) {
68 default:
69 return false;
70 case Intrinsic::dbg_declare:
71 case Intrinsic::dbg_value:
72 case Intrinsic::invariant_start:
73 case Intrinsic::invariant_end:
74 case Intrinsic::lifetime_start:
75 case Intrinsic::lifetime_end:
76 case Intrinsic::objectsize:
77 case Intrinsic::ptr_annotation:
78 case Intrinsic::var_annotation:
79 // These intrinsics don't count as size.
80 return true;
81 }
82 }
83
84 if (const CastInst *CI = dyn_cast<CastInst>(I)) {
85 // Noop casts, including ptr <-> int, don't count.
Chandler Carruthd5003ca2012-05-04 00:58:03 +000086 if (CI->isLosslessCast())
Chandler Carruthf2286b02012-03-31 12:42:41 +000087 return true;
Chandler Carruthd5003ca2012-05-04 00:58:03 +000088
89 Value *Op = CI->getOperand(0);
90 // An inttoptr cast is free so long as the input is a legal integer type
91 // which doesn't contain values outside the range of a pointer.
92 if (isa<IntToPtrInst>(CI) && TD &&
93 TD->isLegalInteger(Op->getType()->getScalarSizeInBits()) &&
Chandler Carruth426c2bf2012-11-01 09:14:31 +000094 Op->getType()->getScalarSizeInBits() <= TD->getPointerSizeInBits())
Chandler Carruthd5003ca2012-05-04 00:58:03 +000095 return true;
96
97 // A ptrtoint cast is free so long as the result is large enough to store
98 // the pointer, and a legal integer type.
99 if (isa<PtrToIntInst>(CI) && TD &&
100 TD->isLegalInteger(Op->getType()->getScalarSizeInBits()) &&
Chandler Carruth426c2bf2012-11-01 09:14:31 +0000101 Op->getType()->getScalarSizeInBits() >= TD->getPointerSizeInBits())
Chandler Carruthd5003ca2012-05-04 00:58:03 +0000102 return true;
103
Chandler Carruthf2286b02012-03-31 12:42:41 +0000104 // trunc to a native type is free (assuming the target has compare and
105 // shift-right of the same width).
106 if (TD && isa<TruncInst>(CI) &&
107 TD->isLegalInteger(TD->getTypeSizeInBits(CI->getType())))
108 return true;
109 // Result of a cmp instruction is often extended (to be used by other
110 // cmp instructions, logical or return instructions). These are usually
111 // nop on most sane targets.
112 if (isa<CmpInst>(CI->getOperand(0)))
113 return true;
114 }
115
116 return false;
117}
118
Chandler Carruth9b081d92012-03-16 05:51:52 +0000119/// analyzeBasicBlock - Fill in the current structure with information gleaned
120/// from the specified block.
121void CodeMetrics::analyzeBasicBlock(const BasicBlock *BB,
Micah Villmow3574eca2012-10-08 16:38:25 +0000122 const DataLayout *TD) {
Chandler Carruth9b081d92012-03-16 05:51:52 +0000123 ++NumBlocks;
124 unsigned NumInstsBeforeThisBB = NumInsts;
125 for (BasicBlock::const_iterator II = BB->begin(), E = BB->end();
126 II != E; ++II) {
Chandler Carruthf2286b02012-03-31 12:42:41 +0000127 if (isInstructionFree(II, TD))
128 continue;
Chandler Carruth9b081d92012-03-16 05:51:52 +0000129
130 // Special handling for calls.
131 if (isa<CallInst>(II) || isa<InvokeInst>(II)) {
Chandler Carruth9b081d92012-03-16 05:51:52 +0000132 ImmutableCallSite CS(cast<Instruction>(II));
133
134 if (const Function *F = CS.getCalledFunction()) {
135 // If a function is both internal and has a single use, then it is
136 // extremely likely to get inlined in the future (it was probably
137 // exposed by an interleaved devirtualization pass).
138 if (!CS.isNoInline() && F->hasInternalLinkage() && F->hasOneUse())
139 ++NumInlineCandidates;
140
141 // If this call is to function itself, then the function is recursive.
142 // Inlining it into other functions is a bad idea, because this is
143 // basically just a form of loop peeling, and our metrics aren't useful
144 // for that case.
145 if (F == BB->getParent())
146 isRecursive = true;
147 }
148
Chandler Carruthd5003ca2012-05-04 00:58:03 +0000149 if (!callIsSmall(CS)) {
Chandler Carruth9b081d92012-03-16 05:51:52 +0000150 // Each argument to a call takes on average one instruction to set up.
151 NumInsts += CS.arg_size();
152
153 // We don't want inline asm to count as a call - that would prevent loop
154 // unrolling. The argument setup cost is still real, though.
155 if (!isa<InlineAsm>(CS.getCalledValue()))
156 ++NumCalls;
157 }
158 }
159
160 if (const AllocaInst *AI = dyn_cast<AllocaInst>(II)) {
161 if (!AI->isStaticAlloca())
162 this->usesDynamicAlloca = true;
163 }
164
165 if (isa<ExtractElementInst>(II) || II->getType()->isVectorTy())
166 ++NumVectorInsts;
167
James Molloy67ae1352012-12-20 16:04:27 +0000168 if (const CallInst *CI = dyn_cast<CallInst>(II))
169 if (CI->hasFnAttr(Attribute::NoDuplicate))
170 notDuplicatable = true;
171
172 if (const InvokeInst *InvI = dyn_cast<InvokeInst>(II))
173 if (InvI->hasFnAttr(Attribute::NoDuplicate))
174 notDuplicatable = true;
175
Chandler Carruth9b081d92012-03-16 05:51:52 +0000176 ++NumInsts;
177 }
178
179 if (isa<ReturnInst>(BB->getTerminator()))
180 ++NumRets;
181
182 // We never want to inline functions that contain an indirectbr. This is
183 // incorrect because all the blockaddress's (in static global initializers
184 // for example) would be referring to the original function, and this indirect
185 // jump would jump from the inlined copy of the function into the original
186 // function which is extremely undefined behavior.
187 // FIXME: This logic isn't really right; we can safely inline functions
188 // with indirectbr's as long as no other function or global references the
189 // blockaddress of a block within the current function. And as a QOI issue,
190 // if someone is using a blockaddress without an indirectbr, and that
191 // reference somehow ends up in another function or global, we probably
192 // don't want to inline this function.
James Molloy67ae1352012-12-20 16:04:27 +0000193 notDuplicatable |= isa<IndirectBrInst>(BB->getTerminator());
Chandler Carruth9b081d92012-03-16 05:51:52 +0000194
195 // Remember NumInsts for this BB.
196 NumBBInsts[BB] = NumInsts - NumInstsBeforeThisBB;
197}
198
Micah Villmow3574eca2012-10-08 16:38:25 +0000199void CodeMetrics::analyzeFunction(Function *F, const DataLayout *TD) {
Chandler Carruth9b081d92012-03-16 05:51:52 +0000200 // If this function contains a call that "returns twice" (e.g., setjmp or
201 // _setjmp) and it isn't marked with "returns twice" itself, never inline it.
202 // This is a hack because we depend on the user marking their local variables
203 // as volatile if they are live across a setjmp call, and they probably
204 // won't do this in callers.
205 exposesReturnsTwice = F->callsFunctionThatReturnsTwice() &&
Bill Wendling034b94b2012-12-19 07:18:57 +0000206 !F->getFnAttributes().hasAttribute(Attribute::ReturnsTwice);
Chandler Carruth9b081d92012-03-16 05:51:52 +0000207
208 // Look at the size of the callee.
209 for (Function::const_iterator BB = F->begin(), E = F->end(); BB != E; ++BB)
210 analyzeBasicBlock(&*BB, TD);
211}