blob: 19776b1e09c396192b29004ff96522c5aa7a3562 [file] [log] [blame]
Chris Lattner753a2b42010-01-05 07:32:13 +00001//===- InstCombineCalls.cpp -----------------------------------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements the visitCall and visitInvoke functions.
11//
12//===----------------------------------------------------------------------===//
13
14#include "InstCombine.h"
Chris Lattner753a2b42010-01-05 07:32:13 +000015#include "llvm/Support/CallSite.h"
16#include "llvm/Target/TargetData.h"
17#include "llvm/Analysis/MemoryBuiltins.h"
Eric Christopher27ceaa12010-03-06 10:50:38 +000018#include "llvm/Transforms/Utils/BuildLibCalls.h"
Chris Lattner687140c2010-12-25 20:37:57 +000019#include "llvm/Transforms/Utils/Local.h"
Chris Lattner753a2b42010-01-05 07:32:13 +000020using namespace llvm;
21
22/// getPromotedType - Return the specified type promoted as it would be to pass
23/// though a va_arg area.
Chris Lattnerdb125cf2011-07-18 04:54:35 +000024static Type *getPromotedType(Type *Ty) {
25 if (IntegerType* ITy = dyn_cast<IntegerType>(Ty)) {
Chris Lattner753a2b42010-01-05 07:32:13 +000026 if (ITy->getBitWidth() < 32)
27 return Type::getInt32Ty(Ty->getContext());
28 }
29 return Ty;
30}
31
Chris Lattner753a2b42010-01-05 07:32:13 +000032
33Instruction *InstCombiner::SimplifyMemTransfer(MemIntrinsic *MI) {
Chris Lattner687140c2010-12-25 20:37:57 +000034 unsigned DstAlign = getKnownAlignment(MI->getArgOperand(0), TD);
35 unsigned SrcAlign = getKnownAlignment(MI->getArgOperand(1), TD);
Chris Lattner753a2b42010-01-05 07:32:13 +000036 unsigned MinAlign = std::min(DstAlign, SrcAlign);
37 unsigned CopyAlign = MI->getAlignment();
38
39 if (CopyAlign < MinAlign) {
Jim Grosbach00e403a2012-02-03 00:07:04 +000040 MI->setAlignment(ConstantInt::get(MI->getAlignmentType(),
Chris Lattner753a2b42010-01-05 07:32:13 +000041 MinAlign, false));
42 return MI;
43 }
Jim Grosbach00e403a2012-02-03 00:07:04 +000044
Chris Lattner753a2b42010-01-05 07:32:13 +000045 // If MemCpyInst length is 1/2/4/8 bytes then replace memcpy with
46 // load/store.
Gabor Greifbcda85c2010-06-24 13:54:33 +000047 ConstantInt *MemOpLength = dyn_cast<ConstantInt>(MI->getArgOperand(2));
Chris Lattner753a2b42010-01-05 07:32:13 +000048 if (MemOpLength == 0) return 0;
Jim Grosbach00e403a2012-02-03 00:07:04 +000049
Chris Lattner753a2b42010-01-05 07:32:13 +000050 // Source and destination pointer types are always "i8*" for intrinsic. See
51 // if the size is something we can handle with a single primitive load/store.
52 // A single load+store correctly handles overlapping memory in the memmove
53 // case.
54 unsigned Size = MemOpLength->getZExtValue();
55 if (Size == 0) return MI; // Delete this mem transfer.
Jim Grosbach00e403a2012-02-03 00:07:04 +000056
Chris Lattner753a2b42010-01-05 07:32:13 +000057 if (Size > 8 || (Size&(Size-1)))
58 return 0; // If not 1/2/4/8 bytes, exit.
Jim Grosbach00e403a2012-02-03 00:07:04 +000059
Chris Lattner753a2b42010-01-05 07:32:13 +000060 // Use an integer load+store unless we can find something better.
Mon P Wang20adc9d2010-04-04 03:10:48 +000061 unsigned SrcAddrSp =
Gabor Greifbcda85c2010-06-24 13:54:33 +000062 cast<PointerType>(MI->getArgOperand(1)->getType())->getAddressSpace();
Gabor Greif4ec22582010-04-16 15:33:14 +000063 unsigned DstAddrSp =
Gabor Greifbcda85c2010-06-24 13:54:33 +000064 cast<PointerType>(MI->getArgOperand(0)->getType())->getAddressSpace();
Mon P Wang20adc9d2010-04-04 03:10:48 +000065
Chris Lattnerdb125cf2011-07-18 04:54:35 +000066 IntegerType* IntType = IntegerType::get(MI->getContext(), Size<<3);
Mon P Wang20adc9d2010-04-04 03:10:48 +000067 Type *NewSrcPtrTy = PointerType::get(IntType, SrcAddrSp);
68 Type *NewDstPtrTy = PointerType::get(IntType, DstAddrSp);
Jim Grosbach00e403a2012-02-03 00:07:04 +000069
Chris Lattner753a2b42010-01-05 07:32:13 +000070 // Memcpy forces the use of i8* for the source and destination. That means
71 // that if you're using memcpy to move one double around, you'll get a cast
72 // from double* to i8*. We'd much rather use a double load+store rather than
73 // an i64 load+store, here because this improves the odds that the source or
74 // dest address will be promotable. See if we can find a better type than the
75 // integer datatype.
Gabor Greifcea7ac72010-06-24 12:58:35 +000076 Value *StrippedDest = MI->getArgOperand(0)->stripPointerCasts();
77 if (StrippedDest != MI->getArgOperand(0)) {
Chris Lattnerdb125cf2011-07-18 04:54:35 +000078 Type *SrcETy = cast<PointerType>(StrippedDest->getType())
Chris Lattner753a2b42010-01-05 07:32:13 +000079 ->getElementType();
80 if (TD && SrcETy->isSized() && TD->getTypeStoreSize(SrcETy) == Size) {
81 // The SrcETy might be something like {{{double}}} or [1 x double]. Rip
82 // down through these levels if so.
83 while (!SrcETy->isSingleValueType()) {
Chris Lattnerdb125cf2011-07-18 04:54:35 +000084 if (StructType *STy = dyn_cast<StructType>(SrcETy)) {
Chris Lattner753a2b42010-01-05 07:32:13 +000085 if (STy->getNumElements() == 1)
86 SrcETy = STy->getElementType(0);
87 else
88 break;
Chris Lattnerdb125cf2011-07-18 04:54:35 +000089 } else if (ArrayType *ATy = dyn_cast<ArrayType>(SrcETy)) {
Chris Lattner753a2b42010-01-05 07:32:13 +000090 if (ATy->getNumElements() == 1)
91 SrcETy = ATy->getElementType();
92 else
93 break;
94 } else
95 break;
96 }
Jim Grosbach00e403a2012-02-03 00:07:04 +000097
Mon P Wang20adc9d2010-04-04 03:10:48 +000098 if (SrcETy->isSingleValueType()) {
99 NewSrcPtrTy = PointerType::get(SrcETy, SrcAddrSp);
100 NewDstPtrTy = PointerType::get(SrcETy, DstAddrSp);
101 }
Chris Lattner753a2b42010-01-05 07:32:13 +0000102 }
103 }
Jim Grosbach00e403a2012-02-03 00:07:04 +0000104
105
Chris Lattner753a2b42010-01-05 07:32:13 +0000106 // If the memcpy/memmove provides better alignment info than we can
107 // infer, use it.
108 SrcAlign = std::max(SrcAlign, CopyAlign);
109 DstAlign = std::max(DstAlign, CopyAlign);
Jim Grosbach00e403a2012-02-03 00:07:04 +0000110
Gabor Greif9c68a7b2010-06-25 07:57:14 +0000111 Value *Src = Builder->CreateBitCast(MI->getArgOperand(1), NewSrcPtrTy);
112 Value *Dest = Builder->CreateBitCast(MI->getArgOperand(0), NewDstPtrTy);
Eli Friedman59f15912011-05-18 19:57:14 +0000113 LoadInst *L = Builder->CreateLoad(Src, MI->isVolatile());
114 L->setAlignment(SrcAlign);
115 StoreInst *S = Builder->CreateStore(L, Dest, MI->isVolatile());
116 S->setAlignment(DstAlign);
Chris Lattner753a2b42010-01-05 07:32:13 +0000117
118 // Set the size of the copy to 0, it will be deleted on the next iteration.
Gabor Greifa90c5c72010-06-28 16:50:57 +0000119 MI->setArgOperand(2, Constant::getNullValue(MemOpLength->getType()));
Chris Lattner753a2b42010-01-05 07:32:13 +0000120 return MI;
121}
122
123Instruction *InstCombiner::SimplifyMemSet(MemSetInst *MI) {
Chris Lattnerae47be12010-12-25 20:52:04 +0000124 unsigned Alignment = getKnownAlignment(MI->getDest(), TD);
Chris Lattner753a2b42010-01-05 07:32:13 +0000125 if (MI->getAlignment() < Alignment) {
126 MI->setAlignment(ConstantInt::get(MI->getAlignmentType(),
127 Alignment, false));
128 return MI;
129 }
Jim Grosbach00e403a2012-02-03 00:07:04 +0000130
Chris Lattner753a2b42010-01-05 07:32:13 +0000131 // Extract the length and alignment and fill if they are constant.
132 ConstantInt *LenC = dyn_cast<ConstantInt>(MI->getLength());
133 ConstantInt *FillC = dyn_cast<ConstantInt>(MI->getValue());
Duncan Sandsb0bc6c32010-02-15 16:12:20 +0000134 if (!LenC || !FillC || !FillC->getType()->isIntegerTy(8))
Chris Lattner753a2b42010-01-05 07:32:13 +0000135 return 0;
136 uint64_t Len = LenC->getZExtValue();
137 Alignment = MI->getAlignment();
Jim Grosbach00e403a2012-02-03 00:07:04 +0000138
Chris Lattner753a2b42010-01-05 07:32:13 +0000139 // If the length is zero, this is a no-op
140 if (Len == 0) return MI; // memset(d,c,0,a) -> noop
Jim Grosbach00e403a2012-02-03 00:07:04 +0000141
Chris Lattner753a2b42010-01-05 07:32:13 +0000142 // memset(s,c,n) -> store s, c (for n=1,2,4,8)
143 if (Len <= 8 && isPowerOf2_32((uint32_t)Len)) {
Chris Lattnerdb125cf2011-07-18 04:54:35 +0000144 Type *ITy = IntegerType::get(MI->getContext(), Len*8); // n=1 -> i8.
Jim Grosbach00e403a2012-02-03 00:07:04 +0000145
Chris Lattner753a2b42010-01-05 07:32:13 +0000146 Value *Dest = MI->getDest();
Mon P Wang55fb9b02010-12-20 01:05:30 +0000147 unsigned DstAddrSp = cast<PointerType>(Dest->getType())->getAddressSpace();
148 Type *NewDstPtrTy = PointerType::get(ITy, DstAddrSp);
149 Dest = Builder->CreateBitCast(Dest, NewDstPtrTy);
Chris Lattner753a2b42010-01-05 07:32:13 +0000150
151 // Alignment 0 is identity for alignment 1 for memset, but not store.
152 if (Alignment == 0) Alignment = 1;
Jim Grosbach00e403a2012-02-03 00:07:04 +0000153
Chris Lattner753a2b42010-01-05 07:32:13 +0000154 // Extract the fill value and store.
155 uint64_t Fill = FillC->getZExtValue()*0x0101010101010101ULL;
Eli Friedman59f15912011-05-18 19:57:14 +0000156 StoreInst *S = Builder->CreateStore(ConstantInt::get(ITy, Fill), Dest,
157 MI->isVolatile());
158 S->setAlignment(Alignment);
Jim Grosbach00e403a2012-02-03 00:07:04 +0000159
Chris Lattner753a2b42010-01-05 07:32:13 +0000160 // Set the size of the copy to 0, it will be deleted on the next iteration.
161 MI->setLength(Constant::getNullValue(LenC->getType()));
162 return MI;
163 }
164
165 return 0;
166}
167
Jim Grosbach00e403a2012-02-03 00:07:04 +0000168/// visitCallInst - CallInst simplification. This mostly only handles folding
Chris Lattner753a2b42010-01-05 07:32:13 +0000169/// of intrinsic instructions. For normal calls, it allows visitCallSite to do
170/// the heavy lifting.
171///
172Instruction *InstCombiner::visitCallInst(CallInst &CI) {
173 if (isFreeCall(&CI))
174 return visitFree(CI);
Nuno Lopescb348b92012-05-03 22:08:19 +0000175 if (extractMallocCall(&CI) || extractCallocCall(&CI))
Duncan Sands1d9b9732010-05-27 19:09:06 +0000176 return visitMalloc(CI);
Chris Lattner753a2b42010-01-05 07:32:13 +0000177
178 // If the caller function is nounwind, mark the call as nounwind, even if the
179 // callee isn't.
180 if (CI.getParent()->getParent()->doesNotThrow() &&
181 !CI.doesNotThrow()) {
182 CI.setDoesNotThrow();
183 return &CI;
184 }
Jim Grosbach00e403a2012-02-03 00:07:04 +0000185
Chris Lattner753a2b42010-01-05 07:32:13 +0000186 IntrinsicInst *II = dyn_cast<IntrinsicInst>(&CI);
187 if (!II) return visitCallSite(&CI);
Gabor Greifcea7ac72010-06-24 12:58:35 +0000188
Chris Lattner753a2b42010-01-05 07:32:13 +0000189 // Intrinsics cannot occur in an invoke, so handle them here instead of in
190 // visitCallSite.
191 if (MemIntrinsic *MI = dyn_cast<MemIntrinsic>(II)) {
192 bool Changed = false;
193
194 // memmove/cpy/set of zero bytes is a noop.
195 if (Constant *NumBytes = dyn_cast<Constant>(MI->getLength())) {
Chris Lattner6eff7512010-10-01 05:51:02 +0000196 if (NumBytes->isNullValue())
197 return EraseInstFromFunction(CI);
Chris Lattner753a2b42010-01-05 07:32:13 +0000198
199 if (ConstantInt *CI = dyn_cast<ConstantInt>(NumBytes))
200 if (CI->getZExtValue() == 1) {
201 // Replace the instruction with just byte operations. We would
202 // transform other cases to loads/stores, but we don't know if
203 // alignment is sufficient.
204 }
205 }
Jim Grosbach00e403a2012-02-03 00:07:04 +0000206
Chris Lattner6eff7512010-10-01 05:51:02 +0000207 // No other transformations apply to volatile transfers.
208 if (MI->isVolatile())
209 return 0;
Chris Lattner753a2b42010-01-05 07:32:13 +0000210
211 // If we have a memmove and the source operation is a constant global,
212 // then the source and dest pointers can't alias, so we can change this
213 // into a call to memcpy.
214 if (MemMoveInst *MMI = dyn_cast<MemMoveInst>(MI)) {
215 if (GlobalVariable *GVSrc = dyn_cast<GlobalVariable>(MMI->getSource()))
216 if (GVSrc->isConstant()) {
Eric Christopher551754c2010-04-16 23:37:20 +0000217 Module *M = CI.getParent()->getParent()->getParent();
Chris Lattner753a2b42010-01-05 07:32:13 +0000218 Intrinsic::ID MemCpyID = Intrinsic::memcpy;
Jay Foad5fdd6c82011-07-12 14:06:48 +0000219 Type *Tys[3] = { CI.getArgOperand(0)->getType(),
220 CI.getArgOperand(1)->getType(),
221 CI.getArgOperand(2)->getType() };
Benjamin Kramereb9a85f2011-07-14 17:45:39 +0000222 CI.setCalledFunction(Intrinsic::getDeclaration(M, MemCpyID, Tys));
Chris Lattner753a2b42010-01-05 07:32:13 +0000223 Changed = true;
224 }
225 }
226
227 if (MemTransferInst *MTI = dyn_cast<MemTransferInst>(MI)) {
228 // memmove(x,x,size) -> noop.
229 if (MTI->getSource() == MTI->getDest())
230 return EraseInstFromFunction(CI);
Eric Christopher551754c2010-04-16 23:37:20 +0000231 }
Chris Lattner753a2b42010-01-05 07:32:13 +0000232
Eric Christopher551754c2010-04-16 23:37:20 +0000233 // If we can determine a pointer alignment that is bigger than currently
234 // set, update the alignment.
235 if (isa<MemTransferInst>(MI)) {
236 if (Instruction *I = SimplifyMemTransfer(MI))
Chris Lattner753a2b42010-01-05 07:32:13 +0000237 return I;
238 } else if (MemSetInst *MSI = dyn_cast<MemSetInst>(MI)) {
239 if (Instruction *I = SimplifyMemSet(MSI))
240 return I;
241 }
Gabor Greifc310fcc2010-06-24 13:42:49 +0000242
Chris Lattner753a2b42010-01-05 07:32:13 +0000243 if (Changed) return II;
244 }
Jim Grosbach00e403a2012-02-03 00:07:04 +0000245
Chris Lattner753a2b42010-01-05 07:32:13 +0000246 switch (II->getIntrinsicID()) {
247 default: break;
Eric Christopher415326b2010-02-09 21:24:27 +0000248 case Intrinsic::objectsize: {
Eric Christopher26d0e892010-02-11 01:48:54 +0000249 // We need target data for just about everything so depend on it.
Nuno Lopesa199e012012-05-03 16:06:07 +0000250 if (!TD) return 0;
Jim Grosbach00e403a2012-02-03 00:07:04 +0000251
Chris Lattnerdb125cf2011-07-18 04:54:35 +0000252 Type *ReturnTy = CI.getType();
Nuno Lopes23e75da2012-05-22 15:25:31 +0000253 uint64_t DontKnow = II->getArgOperand(1) == Builder->getTrue() ? 0 : -1ULL;
Evan Chenga8623262010-03-05 20:47:23 +0000254
Eric Christopher26d0e892010-02-11 01:48:54 +0000255 // Get to the real allocated thing and offset as fast as possible.
Gabor Greifcea7ac72010-06-24 12:58:35 +0000256 Value *Op1 = II->getArgOperand(0)->stripPointerCasts();
Benjamin Kramer783a5c22011-01-06 13:07:49 +0000257
Nuno Lopes23e75da2012-05-22 15:25:31 +0000258 uint64_t Offset = 0;
259 uint64_t Size = -1ULL;
260
261 // Try to look through constant GEPs.
262 if (GEPOperator *GEP = dyn_cast<GEPOperator>(Op1)) {
263 if (!GEP->hasAllConstantIndices()) return 0;
264
265 // Get the current byte offset into the thing. Use the original
266 // operand in case we're looking through a bitcast.
267 SmallVector<Value*, 8> Ops(GEP->idx_begin(), GEP->idx_end());
268 if (!GEP->getPointerOperandType()->isPointerTy())
Nadav Rotem16087692011-12-05 06:29:09 +0000269 return 0;
Nuno Lopes23e75da2012-05-22 15:25:31 +0000270 Offset = TD->getIndexedOffset(GEP->getPointerOperandType(), Ops);
271
Benjamin Kramer783a5c22011-01-06 13:07:49 +0000272 Op1 = GEP->getPointerOperand()->stripPointerCasts();
Nuno Lopes23e75da2012-05-22 15:25:31 +0000273
274 // Make sure we're not a constant offset from an external
275 // global.
276 if (GlobalVariable *GV = dyn_cast<GlobalVariable>(Op1))
277 if (!GV->hasDefinitiveInitializer()) return 0;
Benjamin Kramer783a5c22011-01-06 13:07:49 +0000278 }
279
Nuno Lopes23e75da2012-05-22 15:25:31 +0000280 // If we've stripped down to a single global variable that we
281 // can know the size of then just return that.
282 if (GlobalVariable *GV = dyn_cast<GlobalVariable>(Op1)) {
283 if (GV->hasDefinitiveInitializer()) {
284 Constant *C = GV->getInitializer();
285 Size = TD->getTypeAllocSize(C->getType());
286 } else {
287 // Can't determine size of the GV.
288 Constant *RetVal = ConstantInt::get(ReturnTy, DontKnow);
289 return ReplaceInstUsesWith(CI, RetVal);
290 }
291 } else if (AllocaInst *AI = dyn_cast<AllocaInst>(Op1)) {
292 // Get alloca size.
293 if (AI->getAllocatedType()->isSized()) {
294 Size = TD->getTypeAllocSize(AI->getAllocatedType());
295 if (AI->isArrayAllocation()) {
296 const ConstantInt *C = dyn_cast<ConstantInt>(AI->getArraySize());
297 if (!C) return 0;
298 Size *= C->getZExtValue();
299 }
300 }
301 } else if (CallInst *MI = extractMallocCall(Op1)) {
302 // Get allocation size.
303 Value *Arg = MI->getArgOperand(0);
304 if (ConstantInt *CI = dyn_cast<ConstantInt>(Arg))
305 Size = CI->getZExtValue();
306
307 } else if (CallInst *MI = extractCallocCall(Op1)) {
308 // Get allocation size.
309 Value *Arg1 = MI->getArgOperand(0);
310 Value *Arg2 = MI->getArgOperand(1);
311 if (ConstantInt *CI1 = dyn_cast<ConstantInt>(Arg1))
312 if (ConstantInt *CI2 = dyn_cast<ConstantInt>(Arg2))
313 Size = (CI1->getValue() * CI2->getValue()).getZExtValue();
314 }
Evan Chenga8623262010-03-05 20:47:23 +0000315
316 // Do not return "I don't know" here. Later optimization passes could
317 // make it possible to evaluate objectsize to a constant.
Nuno Lopes23e75da2012-05-22 15:25:31 +0000318 if (Size == -1ULL)
Nuno Lopesa199e012012-05-03 16:06:07 +0000319 return 0;
Benjamin Kramer783a5c22011-01-06 13:07:49 +0000320
Nuno Lopes23e75da2012-05-22 15:25:31 +0000321 if (Size < Offset) {
322 // Out of bound reference? Negative index normalized to large
323 // index? Just return "I don't know".
324 return ReplaceInstUsesWith(CI, ConstantInt::get(ReturnTy, DontKnow));
Nuno Lopes9d236f92012-05-10 23:17:35 +0000325 }
Nuno Lopes23e75da2012-05-22 15:25:31 +0000326 return ReplaceInstUsesWith(CI, ConstantInt::get(ReturnTy, Size-Offset));
Eric Christopher415326b2010-02-09 21:24:27 +0000327 }
Chris Lattner753a2b42010-01-05 07:32:13 +0000328 case Intrinsic::bswap:
329 // bswap(bswap(x)) -> x
Gabor Greifcea7ac72010-06-24 12:58:35 +0000330 if (IntrinsicInst *Operand = dyn_cast<IntrinsicInst>(II->getArgOperand(0)))
Chris Lattner753a2b42010-01-05 07:32:13 +0000331 if (Operand->getIntrinsicID() == Intrinsic::bswap)
Gabor Greifcea7ac72010-06-24 12:58:35 +0000332 return ReplaceInstUsesWith(CI, Operand->getArgOperand(0));
Jim Grosbach00e403a2012-02-03 00:07:04 +0000333
Chris Lattner753a2b42010-01-05 07:32:13 +0000334 // bswap(trunc(bswap(x))) -> trunc(lshr(x, c))
Gabor Greifcea7ac72010-06-24 12:58:35 +0000335 if (TruncInst *TI = dyn_cast<TruncInst>(II->getArgOperand(0))) {
Chris Lattner753a2b42010-01-05 07:32:13 +0000336 if (IntrinsicInst *Operand = dyn_cast<IntrinsicInst>(TI->getOperand(0)))
337 if (Operand->getIntrinsicID() == Intrinsic::bswap) {
338 unsigned C = Operand->getType()->getPrimitiveSizeInBits() -
339 TI->getType()->getPrimitiveSizeInBits();
340 Value *CV = ConstantInt::get(Operand->getType(), C);
Gabor Greifcea7ac72010-06-24 12:58:35 +0000341 Value *V = Builder->CreateLShr(Operand->getArgOperand(0), CV);
Chris Lattner753a2b42010-01-05 07:32:13 +0000342 return new TruncInst(V, TI->getType());
343 }
344 }
Jim Grosbach00e403a2012-02-03 00:07:04 +0000345
Chris Lattner753a2b42010-01-05 07:32:13 +0000346 break;
347 case Intrinsic::powi:
Gabor Greifcea7ac72010-06-24 12:58:35 +0000348 if (ConstantInt *Power = dyn_cast<ConstantInt>(II->getArgOperand(1))) {
Chris Lattner753a2b42010-01-05 07:32:13 +0000349 // powi(x, 0) -> 1.0
350 if (Power->isZero())
351 return ReplaceInstUsesWith(CI, ConstantFP::get(CI.getType(), 1.0));
352 // powi(x, 1) -> x
353 if (Power->isOne())
Gabor Greifcea7ac72010-06-24 12:58:35 +0000354 return ReplaceInstUsesWith(CI, II->getArgOperand(0));
Chris Lattner753a2b42010-01-05 07:32:13 +0000355 // powi(x, -1) -> 1/x
356 if (Power->isAllOnesValue())
357 return BinaryOperator::CreateFDiv(ConstantFP::get(CI.getType(), 1.0),
Gabor Greifcea7ac72010-06-24 12:58:35 +0000358 II->getArgOperand(0));
Chris Lattner753a2b42010-01-05 07:32:13 +0000359 }
360 break;
361 case Intrinsic::cttz: {
362 // If all bits below the first known one are known zero,
363 // this value is constant.
Chris Lattnerdb125cf2011-07-18 04:54:35 +0000364 IntegerType *IT = dyn_cast<IntegerType>(II->getArgOperand(0)->getType());
Owen Andersonf1ac4652011-07-01 21:52:38 +0000365 // FIXME: Try to simplify vectors of integers.
366 if (!IT) break;
Chris Lattner753a2b42010-01-05 07:32:13 +0000367 uint32_t BitWidth = IT->getBitWidth();
368 APInt KnownZero(BitWidth, 0);
369 APInt KnownOne(BitWidth, 0);
Rafael Espindola26c8dcc2012-04-04 12:51:34 +0000370 ComputeMaskedBits(II->getArgOperand(0), KnownZero, KnownOne);
Chris Lattner753a2b42010-01-05 07:32:13 +0000371 unsigned TrailingZeros = KnownOne.countTrailingZeros();
372 APInt Mask(APInt::getLowBitsSet(BitWidth, TrailingZeros));
373 if ((Mask & KnownZero) == Mask)
374 return ReplaceInstUsesWith(CI, ConstantInt::get(IT,
375 APInt(BitWidth, TrailingZeros)));
Jim Grosbach00e403a2012-02-03 00:07:04 +0000376
Chris Lattner753a2b42010-01-05 07:32:13 +0000377 }
378 break;
379 case Intrinsic::ctlz: {
380 // If all bits above the first known one are known zero,
381 // this value is constant.
Chris Lattnerdb125cf2011-07-18 04:54:35 +0000382 IntegerType *IT = dyn_cast<IntegerType>(II->getArgOperand(0)->getType());
Owen Andersonf1ac4652011-07-01 21:52:38 +0000383 // FIXME: Try to simplify vectors of integers.
384 if (!IT) break;
Chris Lattner753a2b42010-01-05 07:32:13 +0000385 uint32_t BitWidth = IT->getBitWidth();
386 APInt KnownZero(BitWidth, 0);
387 APInt KnownOne(BitWidth, 0);
Rafael Espindola26c8dcc2012-04-04 12:51:34 +0000388 ComputeMaskedBits(II->getArgOperand(0), KnownZero, KnownOne);
Chris Lattner753a2b42010-01-05 07:32:13 +0000389 unsigned LeadingZeros = KnownOne.countLeadingZeros();
390 APInt Mask(APInt::getHighBitsSet(BitWidth, LeadingZeros));
391 if ((Mask & KnownZero) == Mask)
392 return ReplaceInstUsesWith(CI, ConstantInt::get(IT,
393 APInt(BitWidth, LeadingZeros)));
Jim Grosbach00e403a2012-02-03 00:07:04 +0000394
Chris Lattner753a2b42010-01-05 07:32:13 +0000395 }
396 break;
397 case Intrinsic::uadd_with_overflow: {
Gabor Greifcea7ac72010-06-24 12:58:35 +0000398 Value *LHS = II->getArgOperand(0), *RHS = II->getArgOperand(1);
Chris Lattnerdb125cf2011-07-18 04:54:35 +0000399 IntegerType *IT = cast<IntegerType>(II->getArgOperand(0)->getType());
Chris Lattner753a2b42010-01-05 07:32:13 +0000400 uint32_t BitWidth = IT->getBitWidth();
Chris Lattner753a2b42010-01-05 07:32:13 +0000401 APInt LHSKnownZero(BitWidth, 0);
402 APInt LHSKnownOne(BitWidth, 0);
Rafael Espindola26c8dcc2012-04-04 12:51:34 +0000403 ComputeMaskedBits(LHS, LHSKnownZero, LHSKnownOne);
Chris Lattner753a2b42010-01-05 07:32:13 +0000404 bool LHSKnownNegative = LHSKnownOne[BitWidth - 1];
405 bool LHSKnownPositive = LHSKnownZero[BitWidth - 1];
406
407 if (LHSKnownNegative || LHSKnownPositive) {
408 APInt RHSKnownZero(BitWidth, 0);
409 APInt RHSKnownOne(BitWidth, 0);
Rafael Espindola26c8dcc2012-04-04 12:51:34 +0000410 ComputeMaskedBits(RHS, RHSKnownZero, RHSKnownOne);
Chris Lattner753a2b42010-01-05 07:32:13 +0000411 bool RHSKnownNegative = RHSKnownOne[BitWidth - 1];
412 bool RHSKnownPositive = RHSKnownZero[BitWidth - 1];
413 if (LHSKnownNegative && RHSKnownNegative) {
414 // The sign bit is set in both cases: this MUST overflow.
415 // Create a simple add instruction, and insert it into the struct.
Eli Friedman59f15912011-05-18 19:57:14 +0000416 Value *Add = Builder->CreateAdd(LHS, RHS);
417 Add->takeName(&CI);
Chris Lattner753a2b42010-01-05 07:32:13 +0000418 Constant *V[] = {
Eli Friedman59f15912011-05-18 19:57:14 +0000419 UndefValue::get(LHS->getType()),
420 ConstantInt::getTrue(II->getContext())
Chris Lattner753a2b42010-01-05 07:32:13 +0000421 };
Chris Lattnerdb125cf2011-07-18 04:54:35 +0000422 StructType *ST = cast<StructType>(II->getType());
Chris Lattnerb065b062011-06-20 04:01:31 +0000423 Constant *Struct = ConstantStruct::get(ST, V);
Chris Lattner753a2b42010-01-05 07:32:13 +0000424 return InsertValueInst::Create(Struct, Add, 0);
425 }
Eli Friedman59f15912011-05-18 19:57:14 +0000426
Chris Lattner753a2b42010-01-05 07:32:13 +0000427 if (LHSKnownPositive && RHSKnownPositive) {
428 // The sign bit is clear in both cases: this CANNOT overflow.
429 // Create a simple add instruction, and insert it into the struct.
Eli Friedman59f15912011-05-18 19:57:14 +0000430 Value *Add = Builder->CreateNUWAdd(LHS, RHS);
431 Add->takeName(&CI);
Chris Lattner753a2b42010-01-05 07:32:13 +0000432 Constant *V[] = {
433 UndefValue::get(LHS->getType()),
434 ConstantInt::getFalse(II->getContext())
435 };
Chris Lattnerdb125cf2011-07-18 04:54:35 +0000436 StructType *ST = cast<StructType>(II->getType());
Chris Lattnerb065b062011-06-20 04:01:31 +0000437 Constant *Struct = ConstantStruct::get(ST, V);
Chris Lattner753a2b42010-01-05 07:32:13 +0000438 return InsertValueInst::Create(Struct, Add, 0);
439 }
440 }
441 }
442 // FALL THROUGH uadd into sadd
443 case Intrinsic::sadd_with_overflow:
444 // Canonicalize constants into the RHS.
Gabor Greifa90c5c72010-06-28 16:50:57 +0000445 if (isa<Constant>(II->getArgOperand(0)) &&
446 !isa<Constant>(II->getArgOperand(1))) {
447 Value *LHS = II->getArgOperand(0);
448 II->setArgOperand(0, II->getArgOperand(1));
449 II->setArgOperand(1, LHS);
Chris Lattner753a2b42010-01-05 07:32:13 +0000450 return II;
451 }
452
453 // X + undef -> undef
Gabor Greif9c68a7b2010-06-25 07:57:14 +0000454 if (isa<UndefValue>(II->getArgOperand(1)))
Chris Lattner753a2b42010-01-05 07:32:13 +0000455 return ReplaceInstUsesWith(CI, UndefValue::get(II->getType()));
Jim Grosbach00e403a2012-02-03 00:07:04 +0000456
Gabor Greif9c68a7b2010-06-25 07:57:14 +0000457 if (ConstantInt *RHS = dyn_cast<ConstantInt>(II->getArgOperand(1))) {
Chris Lattner753a2b42010-01-05 07:32:13 +0000458 // X + 0 -> {X, false}
459 if (RHS->isZero()) {
460 Constant *V[] = {
Eli Friedman4fffb342010-08-09 20:49:43 +0000461 UndefValue::get(II->getArgOperand(0)->getType()),
Chris Lattner753a2b42010-01-05 07:32:13 +0000462 ConstantInt::getFalse(II->getContext())
463 };
Chris Lattnerb065b062011-06-20 04:01:31 +0000464 Constant *Struct =
465 ConstantStruct::get(cast<StructType>(II->getType()), V);
Gabor Greifcea7ac72010-06-24 12:58:35 +0000466 return InsertValueInst::Create(Struct, II->getArgOperand(0), 0);
Chris Lattner753a2b42010-01-05 07:32:13 +0000467 }
468 }
469 break;
470 case Intrinsic::usub_with_overflow:
471 case Intrinsic::ssub_with_overflow:
472 // undef - X -> undef
473 // X - undef -> undef
Gabor Greif9c68a7b2010-06-25 07:57:14 +0000474 if (isa<UndefValue>(II->getArgOperand(0)) ||
475 isa<UndefValue>(II->getArgOperand(1)))
Chris Lattner753a2b42010-01-05 07:32:13 +0000476 return ReplaceInstUsesWith(CI, UndefValue::get(II->getType()));
Jim Grosbach00e403a2012-02-03 00:07:04 +0000477
Gabor Greif9c68a7b2010-06-25 07:57:14 +0000478 if (ConstantInt *RHS = dyn_cast<ConstantInt>(II->getArgOperand(1))) {
Chris Lattner753a2b42010-01-05 07:32:13 +0000479 // X - 0 -> {X, false}
480 if (RHS->isZero()) {
481 Constant *V[] = {
Gabor Greif9c68a7b2010-06-25 07:57:14 +0000482 UndefValue::get(II->getArgOperand(0)->getType()),
Chris Lattner753a2b42010-01-05 07:32:13 +0000483 ConstantInt::getFalse(II->getContext())
484 };
Jim Grosbach00e403a2012-02-03 00:07:04 +0000485 Constant *Struct =
Chris Lattnerb065b062011-06-20 04:01:31 +0000486 ConstantStruct::get(cast<StructType>(II->getType()), V);
Gabor Greif9c68a7b2010-06-25 07:57:14 +0000487 return InsertValueInst::Create(Struct, II->getArgOperand(0), 0);
Chris Lattner753a2b42010-01-05 07:32:13 +0000488 }
489 }
490 break;
Benjamin Kramer6b96fe72011-03-10 18:40:14 +0000491 case Intrinsic::umul_with_overflow: {
492 Value *LHS = II->getArgOperand(0), *RHS = II->getArgOperand(1);
493 unsigned BitWidth = cast<IntegerType>(LHS->getType())->getBitWidth();
Benjamin Kramer6b96fe72011-03-10 18:40:14 +0000494
495 APInt LHSKnownZero(BitWidth, 0);
496 APInt LHSKnownOne(BitWidth, 0);
Rafael Espindola26c8dcc2012-04-04 12:51:34 +0000497 ComputeMaskedBits(LHS, LHSKnownZero, LHSKnownOne);
Benjamin Kramer6b96fe72011-03-10 18:40:14 +0000498 APInt RHSKnownZero(BitWidth, 0);
499 APInt RHSKnownOne(BitWidth, 0);
Rafael Espindola26c8dcc2012-04-04 12:51:34 +0000500 ComputeMaskedBits(RHS, RHSKnownZero, RHSKnownOne);
Benjamin Kramer6b96fe72011-03-10 18:40:14 +0000501
Benjamin Kramerd655e6e2011-03-27 15:04:38 +0000502 // Get the largest possible values for each operand.
503 APInt LHSMax = ~LHSKnownZero;
504 APInt RHSMax = ~RHSKnownZero;
Benjamin Kramer6b96fe72011-03-10 18:40:14 +0000505
506 // If multiplying the maximum values does not overflow then we can turn
507 // this into a plain NUW mul.
Benjamin Kramerd655e6e2011-03-27 15:04:38 +0000508 bool Overflow;
509 LHSMax.umul_ov(RHSMax, Overflow);
510 if (!Overflow) {
Benjamin Kramer6b96fe72011-03-10 18:40:14 +0000511 Value *Mul = Builder->CreateNUWMul(LHS, RHS, "umul_with_overflow");
512 Constant *V[] = {
513 UndefValue::get(LHS->getType()),
514 Builder->getFalse()
515 };
Chris Lattnerb065b062011-06-20 04:01:31 +0000516 Constant *Struct = ConstantStruct::get(cast<StructType>(II->getType()),V);
Benjamin Kramer6b96fe72011-03-10 18:40:14 +0000517 return InsertValueInst::Create(Struct, Mul, 0);
518 }
519 } // FALL THROUGH
Chris Lattner753a2b42010-01-05 07:32:13 +0000520 case Intrinsic::smul_with_overflow:
521 // Canonicalize constants into the RHS.
Gabor Greifa90c5c72010-06-28 16:50:57 +0000522 if (isa<Constant>(II->getArgOperand(0)) &&
523 !isa<Constant>(II->getArgOperand(1))) {
524 Value *LHS = II->getArgOperand(0);
525 II->setArgOperand(0, II->getArgOperand(1));
526 II->setArgOperand(1, LHS);
Chris Lattner753a2b42010-01-05 07:32:13 +0000527 return II;
528 }
529
530 // X * undef -> undef
Gabor Greif9c68a7b2010-06-25 07:57:14 +0000531 if (isa<UndefValue>(II->getArgOperand(1)))
Chris Lattner753a2b42010-01-05 07:32:13 +0000532 return ReplaceInstUsesWith(CI, UndefValue::get(II->getType()));
Jim Grosbach00e403a2012-02-03 00:07:04 +0000533
Gabor Greif9c68a7b2010-06-25 07:57:14 +0000534 if (ConstantInt *RHSI = dyn_cast<ConstantInt>(II->getArgOperand(1))) {
Chris Lattner753a2b42010-01-05 07:32:13 +0000535 // X*0 -> {0, false}
536 if (RHSI->isZero())
537 return ReplaceInstUsesWith(CI, Constant::getNullValue(II->getType()));
Jim Grosbach00e403a2012-02-03 00:07:04 +0000538
Chris Lattner753a2b42010-01-05 07:32:13 +0000539 // X * 1 -> {X, false}
540 if (RHSI->equalsInt(1)) {
541 Constant *V[] = {
Gabor Greifcea7ac72010-06-24 12:58:35 +0000542 UndefValue::get(II->getArgOperand(0)->getType()),
Chris Lattner753a2b42010-01-05 07:32:13 +0000543 ConstantInt::getFalse(II->getContext())
544 };
Jim Grosbach00e403a2012-02-03 00:07:04 +0000545 Constant *Struct =
Chris Lattnerb065b062011-06-20 04:01:31 +0000546 ConstantStruct::get(cast<StructType>(II->getType()), V);
Gabor Greifcea7ac72010-06-24 12:58:35 +0000547 return InsertValueInst::Create(Struct, II->getArgOperand(0), 0);
Chris Lattner753a2b42010-01-05 07:32:13 +0000548 }
549 }
550 break;
551 case Intrinsic::ppc_altivec_lvx:
552 case Intrinsic::ppc_altivec_lvxl:
Bill Wendlingf93f7b22011-04-13 00:36:11 +0000553 // Turn PPC lvx -> load if the pointer is known aligned.
Chris Lattner687140c2010-12-25 20:37:57 +0000554 if (getOrEnforceKnownAlignment(II->getArgOperand(0), 16, TD) >= 16) {
Gabor Greifcea7ac72010-06-24 12:58:35 +0000555 Value *Ptr = Builder->CreateBitCast(II->getArgOperand(0),
Chris Lattner753a2b42010-01-05 07:32:13 +0000556 PointerType::getUnqual(II->getType()));
557 return new LoadInst(Ptr);
558 }
559 break;
560 case Intrinsic::ppc_altivec_stvx:
561 case Intrinsic::ppc_altivec_stvxl:
562 // Turn stvx -> store if the pointer is known aligned.
Chris Lattner687140c2010-12-25 20:37:57 +0000563 if (getOrEnforceKnownAlignment(II->getArgOperand(1), 16, TD) >= 16) {
Jim Grosbach00e403a2012-02-03 00:07:04 +0000564 Type *OpPtrTy =
Gabor Greif2f1ab742010-06-24 15:51:11 +0000565 PointerType::getUnqual(II->getArgOperand(0)->getType());
566 Value *Ptr = Builder->CreateBitCast(II->getArgOperand(1), OpPtrTy);
567 return new StoreInst(II->getArgOperand(0), Ptr);
Chris Lattner753a2b42010-01-05 07:32:13 +0000568 }
569 break;
570 case Intrinsic::x86_sse_storeu_ps:
571 case Intrinsic::x86_sse2_storeu_pd:
572 case Intrinsic::x86_sse2_storeu_dq:
573 // Turn X86 storeu -> store if the pointer is known aligned.
Chris Lattner687140c2010-12-25 20:37:57 +0000574 if (getOrEnforceKnownAlignment(II->getArgOperand(0), 16, TD) >= 16) {
Jim Grosbach00e403a2012-02-03 00:07:04 +0000575 Type *OpPtrTy =
Gabor Greif2f1ab742010-06-24 15:51:11 +0000576 PointerType::getUnqual(II->getArgOperand(1)->getType());
577 Value *Ptr = Builder->CreateBitCast(II->getArgOperand(0), OpPtrTy);
578 return new StoreInst(II->getArgOperand(1), Ptr);
Chris Lattner753a2b42010-01-05 07:32:13 +0000579 }
580 break;
Chandler Carruth9cc9f502011-01-10 07:19:37 +0000581
582 case Intrinsic::x86_sse_cvtss2si:
583 case Intrinsic::x86_sse_cvtss2si64:
584 case Intrinsic::x86_sse_cvttss2si:
585 case Intrinsic::x86_sse_cvttss2si64:
586 case Intrinsic::x86_sse2_cvtsd2si:
587 case Intrinsic::x86_sse2_cvtsd2si64:
588 case Intrinsic::x86_sse2_cvttsd2si:
589 case Intrinsic::x86_sse2_cvttsd2si64: {
590 // These intrinsics only demand the 0th element of their input vectors. If
Chris Lattner753a2b42010-01-05 07:32:13 +0000591 // we can simplify the input based on that, do so now.
592 unsigned VWidth =
Gabor Greif9c68a7b2010-06-25 07:57:14 +0000593 cast<VectorType>(II->getArgOperand(0)->getType())->getNumElements();
Chris Lattner753a2b42010-01-05 07:32:13 +0000594 APInt DemandedElts(VWidth, 1);
595 APInt UndefElts(VWidth, 0);
Gabor Greifa3997812010-07-22 10:37:47 +0000596 if (Value *V = SimplifyDemandedVectorElts(II->getArgOperand(0),
597 DemandedElts, UndefElts)) {
Gabor Greifa90c5c72010-06-28 16:50:57 +0000598 II->setArgOperand(0, V);
Chris Lattner753a2b42010-01-05 07:32:13 +0000599 return II;
600 }
601 break;
602 }
Chandler Carruth9cc9f502011-01-10 07:19:37 +0000603
Stuart Hastingsca1ef482011-05-17 22:13:31 +0000604
605 case Intrinsic::x86_sse41_pmovsxbw:
606 case Intrinsic::x86_sse41_pmovsxwd:
607 case Intrinsic::x86_sse41_pmovsxdq:
608 case Intrinsic::x86_sse41_pmovzxbw:
609 case Intrinsic::x86_sse41_pmovzxwd:
610 case Intrinsic::x86_sse41_pmovzxdq: {
Evan Chengaaa7f492011-05-19 18:18:39 +0000611 // pmov{s|z}x ignores the upper half of their input vectors.
Stuart Hastingsca1ef482011-05-17 22:13:31 +0000612 unsigned VWidth =
613 cast<VectorType>(II->getArgOperand(0)->getType())->getNumElements();
614 unsigned LowHalfElts = VWidth / 2;
Stuart Hastingsd1166112011-05-18 15:54:26 +0000615 APInt InputDemandedElts(APInt::getBitsSet(VWidth, 0, LowHalfElts));
Stuart Hastingsca1ef482011-05-17 22:13:31 +0000616 APInt UndefElts(VWidth, 0);
617 if (Value *TmpV = SimplifyDemandedVectorElts(II->getArgOperand(0),
618 InputDemandedElts,
619 UndefElts)) {
620 II->setArgOperand(0, TmpV);
621 return II;
622 }
623 break;
624 }
625
Chris Lattner753a2b42010-01-05 07:32:13 +0000626 case Intrinsic::ppc_altivec_vperm:
627 // Turn vperm(V1,V2,mask) -> shuffle(V1,V2,mask) if mask is a constant.
Chris Lattnera78fa8c2012-01-27 03:08:05 +0000628 if (Constant *Mask = dyn_cast<Constant>(II->getArgOperand(2))) {
629 assert(Mask->getType()->getVectorNumElements() == 16 &&
630 "Bad type for intrinsic!");
Jim Grosbach00e403a2012-02-03 00:07:04 +0000631
Chris Lattner753a2b42010-01-05 07:32:13 +0000632 // Check that all of the elements are integer constants or undefs.
633 bool AllEltsOk = true;
634 for (unsigned i = 0; i != 16; ++i) {
Chris Lattnera78fa8c2012-01-27 03:08:05 +0000635 Constant *Elt = Mask->getAggregateElement(i);
636 if (Elt == 0 ||
637 !(isa<ConstantInt>(Elt) || isa<UndefValue>(Elt))) {
Chris Lattner753a2b42010-01-05 07:32:13 +0000638 AllEltsOk = false;
639 break;
640 }
641 }
Jim Grosbach00e403a2012-02-03 00:07:04 +0000642
Chris Lattner753a2b42010-01-05 07:32:13 +0000643 if (AllEltsOk) {
644 // Cast the input vectors to byte vectors.
Gabor Greifa3997812010-07-22 10:37:47 +0000645 Value *Op0 = Builder->CreateBitCast(II->getArgOperand(0),
646 Mask->getType());
647 Value *Op1 = Builder->CreateBitCast(II->getArgOperand(1),
648 Mask->getType());
Chris Lattner753a2b42010-01-05 07:32:13 +0000649 Value *Result = UndefValue::get(Op0->getType());
Jim Grosbach00e403a2012-02-03 00:07:04 +0000650
Chris Lattner753a2b42010-01-05 07:32:13 +0000651 // Only extract each element once.
652 Value *ExtractedElts[32];
653 memset(ExtractedElts, 0, sizeof(ExtractedElts));
Jim Grosbach00e403a2012-02-03 00:07:04 +0000654
Chris Lattner753a2b42010-01-05 07:32:13 +0000655 for (unsigned i = 0; i != 16; ++i) {
Chris Lattnera78fa8c2012-01-27 03:08:05 +0000656 if (isa<UndefValue>(Mask->getAggregateElement(i)))
Chris Lattner753a2b42010-01-05 07:32:13 +0000657 continue;
Jim Grosbach00e403a2012-02-03 00:07:04 +0000658 unsigned Idx =
Chris Lattnera78fa8c2012-01-27 03:08:05 +0000659 cast<ConstantInt>(Mask->getAggregateElement(i))->getZExtValue();
Chris Lattner753a2b42010-01-05 07:32:13 +0000660 Idx &= 31; // Match the hardware behavior.
Jim Grosbach00e403a2012-02-03 00:07:04 +0000661
Chris Lattner753a2b42010-01-05 07:32:13 +0000662 if (ExtractedElts[Idx] == 0) {
Jim Grosbach00e403a2012-02-03 00:07:04 +0000663 ExtractedElts[Idx] =
Benjamin Kramera9390a42011-09-27 20:39:19 +0000664 Builder->CreateExtractElement(Idx < 16 ? Op0 : Op1,
665 Builder->getInt32(Idx&15));
Chris Lattner753a2b42010-01-05 07:32:13 +0000666 }
Jim Grosbach00e403a2012-02-03 00:07:04 +0000667
Chris Lattner753a2b42010-01-05 07:32:13 +0000668 // Insert this value into the result vector.
669 Result = Builder->CreateInsertElement(Result, ExtractedElts[Idx],
Benjamin Kramera9390a42011-09-27 20:39:19 +0000670 Builder->getInt32(i));
Chris Lattner753a2b42010-01-05 07:32:13 +0000671 }
672 return CastInst::Create(Instruction::BitCast, Result, CI.getType());
673 }
674 }
675 break;
676
Bob Wilson364f17c2010-10-22 21:41:48 +0000677 case Intrinsic::arm_neon_vld1:
678 case Intrinsic::arm_neon_vld2:
679 case Intrinsic::arm_neon_vld3:
680 case Intrinsic::arm_neon_vld4:
681 case Intrinsic::arm_neon_vld2lane:
682 case Intrinsic::arm_neon_vld3lane:
683 case Intrinsic::arm_neon_vld4lane:
684 case Intrinsic::arm_neon_vst1:
685 case Intrinsic::arm_neon_vst2:
686 case Intrinsic::arm_neon_vst3:
687 case Intrinsic::arm_neon_vst4:
688 case Intrinsic::arm_neon_vst2lane:
689 case Intrinsic::arm_neon_vst3lane:
690 case Intrinsic::arm_neon_vst4lane: {
Chris Lattnerae47be12010-12-25 20:52:04 +0000691 unsigned MemAlign = getKnownAlignment(II->getArgOperand(0), TD);
Bob Wilson364f17c2010-10-22 21:41:48 +0000692 unsigned AlignArg = II->getNumArgOperands() - 1;
693 ConstantInt *IntrAlign = dyn_cast<ConstantInt>(II->getArgOperand(AlignArg));
694 if (IntrAlign && IntrAlign->getZExtValue() < MemAlign) {
695 II->setArgOperand(AlignArg,
696 ConstantInt::get(Type::getInt32Ty(II->getContext()),
697 MemAlign, false));
698 return II;
699 }
700 break;
701 }
702
Lang Hames973f72a2012-05-01 00:20:38 +0000703 case Intrinsic::arm_neon_vmulls:
704 case Intrinsic::arm_neon_vmullu: {
705 Value *Arg0 = II->getArgOperand(0);
706 Value *Arg1 = II->getArgOperand(1);
707
708 // Handle mul by zero first:
709 if (isa<ConstantAggregateZero>(Arg0) || isa<ConstantAggregateZero>(Arg1)) {
710 return ReplaceInstUsesWith(CI, ConstantAggregateZero::get(II->getType()));
711 }
712
713 // Check for constant LHS & RHS - in this case we just simplify.
714 bool Zext = (II->getIntrinsicID() == Intrinsic::arm_neon_vmullu);
715 VectorType *NewVT = cast<VectorType>(II->getType());
716 unsigned NewWidth = NewVT->getElementType()->getIntegerBitWidth();
717 if (ConstantDataVector *CV0 = dyn_cast<ConstantDataVector>(Arg0)) {
718 if (ConstantDataVector *CV1 = dyn_cast<ConstantDataVector>(Arg1)) {
719 VectorType* VT = cast<VectorType>(CV0->getType());
720 SmallVector<Constant*, 4> NewElems;
721 for (unsigned i = 0; i < VT->getNumElements(); ++i) {
722 APInt CV0E =
723 (cast<ConstantInt>(CV0->getAggregateElement(i)))->getValue();
724 CV0E = Zext ? CV0E.zext(NewWidth) : CV0E.sext(NewWidth);
725 APInt CV1E =
726 (cast<ConstantInt>(CV1->getAggregateElement(i)))->getValue();
727 CV1E = Zext ? CV1E.zext(NewWidth) : CV1E.sext(NewWidth);
728 NewElems.push_back(
729 ConstantInt::get(NewVT->getElementType(), CV0E * CV1E));
730 }
731 return ReplaceInstUsesWith(CI, ConstantVector::get(NewElems));
732 }
733
734 // Couldn't simplify - cannonicalize constant to the RHS.
735 std::swap(Arg0, Arg1);
736 }
737
738 // Handle mul by one:
739 if (ConstantDataVector *CV1 = dyn_cast<ConstantDataVector>(Arg1)) {
740 if (ConstantInt *Splat =
741 dyn_cast_or_null<ConstantInt>(CV1->getSplatValue())) {
742 if (Splat->isOne()) {
743 if (Zext)
744 return CastInst::CreateZExtOrBitCast(Arg0, II->getType());
745 // else
746 return CastInst::CreateSExtOrBitCast(Arg0, II->getType());
747 }
748 }
749 }
750
751 break;
752 }
753
Chris Lattner753a2b42010-01-05 07:32:13 +0000754 case Intrinsic::stackrestore: {
755 // If the save is right next to the restore, remove the restore. This can
756 // happen when variable allocas are DCE'd.
Gabor Greifcea7ac72010-06-24 12:58:35 +0000757 if (IntrinsicInst *SS = dyn_cast<IntrinsicInst>(II->getArgOperand(0))) {
Chris Lattner753a2b42010-01-05 07:32:13 +0000758 if (SS->getIntrinsicID() == Intrinsic::stacksave) {
759 BasicBlock::iterator BI = SS;
760 if (&*++BI == II)
761 return EraseInstFromFunction(CI);
762 }
763 }
Jim Grosbach00e403a2012-02-03 00:07:04 +0000764
Chris Lattner753a2b42010-01-05 07:32:13 +0000765 // Scan down this block to see if there is another stack restore in the
766 // same block without an intervening call/alloca.
767 BasicBlock::iterator BI = II;
768 TerminatorInst *TI = II->getParent()->getTerminator();
769 bool CannotRemove = false;
770 for (++BI; &*BI != TI; ++BI) {
771 if (isa<AllocaInst>(BI) || isMalloc(BI)) {
772 CannotRemove = true;
773 break;
774 }
775 if (CallInst *BCI = dyn_cast<CallInst>(BI)) {
776 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(BCI)) {
777 // If there is a stackrestore below this one, remove this one.
778 if (II->getIntrinsicID() == Intrinsic::stackrestore)
779 return EraseInstFromFunction(CI);
780 // Otherwise, ignore the intrinsic.
781 } else {
782 // If we found a non-intrinsic call, we can't remove the stack
783 // restore.
784 CannotRemove = true;
785 break;
786 }
787 }
788 }
Jim Grosbach00e403a2012-02-03 00:07:04 +0000789
Bill Wendlingdccc03b2011-07-31 06:30:59 +0000790 // If the stack restore is in a return, resume, or unwind block and if there
791 // are no allocas or calls between the restore and the return, nuke the
792 // restore.
Bill Wendlingaa5abe82012-02-06 21:16:41 +0000793 if (!CannotRemove && (isa<ReturnInst>(TI) || isa<ResumeInst>(TI)))
Chris Lattner753a2b42010-01-05 07:32:13 +0000794 return EraseInstFromFunction(CI);
795 break;
796 }
Chris Lattner753a2b42010-01-05 07:32:13 +0000797 }
798
799 return visitCallSite(II);
800}
801
802// InvokeInst simplification
803//
804Instruction *InstCombiner::visitInvokeInst(InvokeInst &II) {
805 return visitCallSite(&II);
806}
807
Jim Grosbach00e403a2012-02-03 00:07:04 +0000808/// isSafeToEliminateVarargsCast - If this cast does not affect the value
Chris Lattner753a2b42010-01-05 07:32:13 +0000809/// passed through the varargs area, we can eliminate the use of the cast.
810static bool isSafeToEliminateVarargsCast(const CallSite CS,
811 const CastInst * const CI,
812 const TargetData * const TD,
813 const int ix) {
814 if (!CI->isLosslessCast())
815 return false;
816
817 // The size of ByVal arguments is derived from the type, so we
818 // can't change to a type with a different size. If the size were
819 // passed explicitly we could avoid this check.
Nick Lewycky173862e2011-11-20 19:09:04 +0000820 if (!CS.isByValArgument(ix))
Chris Lattner753a2b42010-01-05 07:32:13 +0000821 return true;
822
Jim Grosbach00e403a2012-02-03 00:07:04 +0000823 Type* SrcTy =
Chris Lattner753a2b42010-01-05 07:32:13 +0000824 cast<PointerType>(CI->getOperand(0)->getType())->getElementType();
Chris Lattnerdb125cf2011-07-18 04:54:35 +0000825 Type* DstTy = cast<PointerType>(CI->getType())->getElementType();
Chris Lattner753a2b42010-01-05 07:32:13 +0000826 if (!SrcTy->isSized() || !DstTy->isSized())
827 return false;
828 if (!TD || TD->getTypeAllocSize(SrcTy) != TD->getTypeAllocSize(DstTy))
829 return false;
830 return true;
831}
832
Benjamin Kramer0b6cb502010-03-12 09:27:41 +0000833namespace {
834class InstCombineFortifiedLibCalls : public SimplifyFortifiedLibCalls {
835 InstCombiner *IC;
836protected:
837 void replaceCall(Value *With) {
838 NewInstruction = IC->ReplaceInstUsesWith(*CI, With);
839 }
840 bool isFoldable(unsigned SizeCIOp, unsigned SizeArgOp, bool isString) const {
Benjamin Kramer8143a842011-01-06 14:22:52 +0000841 if (CI->getArgOperand(SizeCIOp) == CI->getArgOperand(SizeArgOp))
842 return true;
Gabor Greifa3997812010-07-22 10:37:47 +0000843 if (ConstantInt *SizeCI =
844 dyn_cast<ConstantInt>(CI->getArgOperand(SizeCIOp))) {
Benjamin Kramer0b6cb502010-03-12 09:27:41 +0000845 if (SizeCI->isAllOnesValue())
846 return true;
Eric Christopherb9b80c32011-03-15 00:25:41 +0000847 if (isString) {
848 uint64_t Len = GetStringLength(CI->getArgOperand(SizeArgOp));
849 // If the length is 0 we don't know how long it is and so we can't
850 // remove the check.
851 if (Len == 0) return false;
852 return SizeCI->getZExtValue() >= Len;
853 }
Gabor Greifa3997812010-07-22 10:37:47 +0000854 if (ConstantInt *Arg = dyn_cast<ConstantInt>(
855 CI->getArgOperand(SizeArgOp)))
Evan Cheng9d8f0022010-03-23 06:06:09 +0000856 return SizeCI->getZExtValue() >= Arg->getZExtValue();
Benjamin Kramer0b6cb502010-03-12 09:27:41 +0000857 }
858 return false;
859 }
860public:
861 InstCombineFortifiedLibCalls(InstCombiner *IC) : IC(IC), NewInstruction(0) { }
862 Instruction *NewInstruction;
863};
864} // end anonymous namespace
865
Eric Christopher27ceaa12010-03-06 10:50:38 +0000866// Try to fold some different type of calls here.
Jim Grosbach00e403a2012-02-03 00:07:04 +0000867// Currently we're only working with the checking functions, memcpy_chk,
Eric Christopher27ceaa12010-03-06 10:50:38 +0000868// mempcpy_chk, memmove_chk, memset_chk, strcpy_chk, stpcpy_chk, strncpy_chk,
869// strcat_chk and strncat_chk.
870Instruction *InstCombiner::tryOptimizeCall(CallInst *CI, const TargetData *TD) {
871 if (CI->getCalledFunction() == 0) return 0;
Eric Christopher27ceaa12010-03-06 10:50:38 +0000872
Benjamin Kramer0b6cb502010-03-12 09:27:41 +0000873 InstCombineFortifiedLibCalls Simplifier(this);
874 Simplifier.fold(CI, TD);
875 return Simplifier.NewInstruction;
Eric Christopher27ceaa12010-03-06 10:50:38 +0000876}
877
Duncan Sands4a544a72011-09-06 13:37:06 +0000878static IntrinsicInst *FindInitTrampolineFromAlloca(Value *TrampMem) {
879 // Strip off at most one level of pointer casts, looking for an alloca. This
880 // is good enough in practice and simpler than handling any number of casts.
881 Value *Underlying = TrampMem->stripPointerCasts();
882 if (Underlying != TrampMem &&
883 (!Underlying->hasOneUse() || *Underlying->use_begin() != TrampMem))
884 return 0;
885 if (!isa<AllocaInst>(Underlying))
886 return 0;
887
888 IntrinsicInst *InitTrampoline = 0;
889 for (Value::use_iterator I = TrampMem->use_begin(), E = TrampMem->use_end();
890 I != E; I++) {
891 IntrinsicInst *II = dyn_cast<IntrinsicInst>(*I);
892 if (!II)
893 return 0;
894 if (II->getIntrinsicID() == Intrinsic::init_trampoline) {
895 if (InitTrampoline)
896 // More than one init_trampoline writes to this value. Give up.
897 return 0;
898 InitTrampoline = II;
899 continue;
900 }
901 if (II->getIntrinsicID() == Intrinsic::adjust_trampoline)
902 // Allow any number of calls to adjust.trampoline.
903 continue;
904 return 0;
905 }
906
907 // No call to init.trampoline found.
908 if (!InitTrampoline)
909 return 0;
910
911 // Check that the alloca is being used in the expected way.
912 if (InitTrampoline->getOperand(0) != TrampMem)
913 return 0;
914
915 return InitTrampoline;
916}
917
918static IntrinsicInst *FindInitTrampolineFromBB(IntrinsicInst *AdjustTramp,
919 Value *TrampMem) {
920 // Visit all the previous instructions in the basic block, and try to find a
921 // init.trampoline which has a direct path to the adjust.trampoline.
922 for (BasicBlock::iterator I = AdjustTramp,
923 E = AdjustTramp->getParent()->begin(); I != E; ) {
924 Instruction *Inst = --I;
925 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(I))
926 if (II->getIntrinsicID() == Intrinsic::init_trampoline &&
927 II->getOperand(0) == TrampMem)
928 return II;
929 if (Inst->mayWriteToMemory())
930 return 0;
931 }
932 return 0;
933}
934
935// Given a call to llvm.adjust.trampoline, find and return the corresponding
936// call to llvm.init.trampoline if the call to the trampoline can be optimized
937// to a direct call to a function. Otherwise return NULL.
938//
939static IntrinsicInst *FindInitTrampoline(Value *Callee) {
940 Callee = Callee->stripPointerCasts();
941 IntrinsicInst *AdjustTramp = dyn_cast<IntrinsicInst>(Callee);
942 if (!AdjustTramp ||
943 AdjustTramp->getIntrinsicID() != Intrinsic::adjust_trampoline)
944 return 0;
945
946 Value *TrampMem = AdjustTramp->getOperand(0);
947
948 if (IntrinsicInst *IT = FindInitTrampolineFromAlloca(TrampMem))
949 return IT;
950 if (IntrinsicInst *IT = FindInitTrampolineFromBB(AdjustTramp, TrampMem))
951 return IT;
952 return 0;
953}
954
Chris Lattner753a2b42010-01-05 07:32:13 +0000955// visitCallSite - Improvements for call and invoke instructions.
956//
957Instruction *InstCombiner::visitCallSite(CallSite CS) {
958 bool Changed = false;
959
Chris Lattnerab215bc2010-12-20 08:25:06 +0000960 // If the callee is a pointer to a function, attempt to move any casts to the
961 // arguments of the call/invoke.
Chris Lattner753a2b42010-01-05 07:32:13 +0000962 Value *Callee = CS.getCalledValue();
Chris Lattnerab215bc2010-12-20 08:25:06 +0000963 if (!isa<Function>(Callee) && transformConstExprCastCall(CS))
964 return 0;
Chris Lattner753a2b42010-01-05 07:32:13 +0000965
966 if (Function *CalleeF = dyn_cast<Function>(Callee))
Chris Lattnerd5695612010-02-01 18:11:34 +0000967 // If the call and callee calling conventions don't match, this call must
968 // be unreachable, as the call is undefined.
969 if (CalleeF->getCallingConv() != CS.getCallingConv() &&
970 // Only do this for calls to a function with a body. A prototype may
971 // not actually end up matching the implementation's calling conv for a
972 // variety of reasons (e.g. it may be written in assembly).
973 !CalleeF->isDeclaration()) {
Chris Lattner753a2b42010-01-05 07:32:13 +0000974 Instruction *OldCall = CS.getInstruction();
Chris Lattner753a2b42010-01-05 07:32:13 +0000975 new StoreInst(ConstantInt::getTrue(Callee->getContext()),
Jim Grosbach00e403a2012-02-03 00:07:04 +0000976 UndefValue::get(Type::getInt1PtrTy(Callee->getContext())),
Chris Lattner753a2b42010-01-05 07:32:13 +0000977 OldCall);
978 // If OldCall dues not return void then replaceAllUsesWith undef.
979 // This allows ValueHandlers and custom metadata to adjust itself.
980 if (!OldCall->getType()->isVoidTy())
Eli Friedman3e22cb92011-05-18 00:32:01 +0000981 ReplaceInstUsesWith(*OldCall, UndefValue::get(OldCall->getType()));
Chris Lattner830f3f22010-02-01 18:04:58 +0000982 if (isa<CallInst>(OldCall))
Chris Lattner753a2b42010-01-05 07:32:13 +0000983 return EraseInstFromFunction(*OldCall);
Jim Grosbach00e403a2012-02-03 00:07:04 +0000984
Chris Lattner830f3f22010-02-01 18:04:58 +0000985 // We cannot remove an invoke, because it would change the CFG, just
986 // change the callee to a null pointer.
Gabor Greif654c06f2010-03-20 21:00:25 +0000987 cast<InvokeInst>(OldCall)->setCalledFunction(
Chris Lattner830f3f22010-02-01 18:04:58 +0000988 Constant::getNullValue(CalleeF->getType()));
Chris Lattner753a2b42010-01-05 07:32:13 +0000989 return 0;
990 }
991
992 if (isa<ConstantPointerNull>(Callee) || isa<UndefValue>(Callee)) {
993 // This instruction is not reachable, just remove it. We insert a store to
994 // undef so that we know that this code is not reachable, despite the fact
995 // that we can't modify the CFG here.
996 new StoreInst(ConstantInt::getTrue(Callee->getContext()),
997 UndefValue::get(Type::getInt1PtrTy(Callee->getContext())),
998 CS.getInstruction());
999
Gabor Greifcea7ac72010-06-24 12:58:35 +00001000 // If CS does not return void then replaceAllUsesWith undef.
Chris Lattner753a2b42010-01-05 07:32:13 +00001001 // This allows ValueHandlers and custom metadata to adjust itself.
1002 if (!CS.getInstruction()->getType()->isVoidTy())
Eli Friedman3e22cb92011-05-18 00:32:01 +00001003 ReplaceInstUsesWith(*CS.getInstruction(),
1004 UndefValue::get(CS.getInstruction()->getType()));
Chris Lattner753a2b42010-01-05 07:32:13 +00001005
1006 if (InvokeInst *II = dyn_cast<InvokeInst>(CS.getInstruction())) {
1007 // Don't break the CFG, insert a dummy cond branch.
1008 BranchInst::Create(II->getNormalDest(), II->getUnwindDest(),
1009 ConstantInt::getTrue(Callee->getContext()), II);
1010 }
1011 return EraseInstFromFunction(*CS.getInstruction());
1012 }
1013
Duncan Sands4a544a72011-09-06 13:37:06 +00001014 if (IntrinsicInst *II = FindInitTrampoline(Callee))
1015 return transformCallThroughTrampoline(CS, II);
Chris Lattner753a2b42010-01-05 07:32:13 +00001016
Chris Lattnerdb125cf2011-07-18 04:54:35 +00001017 PointerType *PTy = cast<PointerType>(Callee->getType());
1018 FunctionType *FTy = cast<FunctionType>(PTy->getElementType());
Chris Lattner753a2b42010-01-05 07:32:13 +00001019 if (FTy->isVarArg()) {
Eli Friedmanba78c882011-11-29 01:18:23 +00001020 int ix = FTy->getNumParams();
Chris Lattner753a2b42010-01-05 07:32:13 +00001021 // See if we can optimize any arguments passed through the varargs area of
1022 // the call.
1023 for (CallSite::arg_iterator I = CS.arg_begin()+FTy->getNumParams(),
1024 E = CS.arg_end(); I != E; ++I, ++ix) {
1025 CastInst *CI = dyn_cast<CastInst>(*I);
1026 if (CI && isSafeToEliminateVarargsCast(CS, CI, TD, ix)) {
1027 *I = CI->getOperand(0);
1028 Changed = true;
1029 }
1030 }
1031 }
1032
1033 if (isa<InlineAsm>(Callee) && !CS.doesNotThrow()) {
1034 // Inline asm calls cannot throw - mark them 'nounwind'.
1035 CS.setDoesNotThrow();
1036 Changed = true;
1037 }
1038
Eric Christopher27ceaa12010-03-06 10:50:38 +00001039 // Try to optimize the call if possible, we require TargetData for most of
1040 // this. None of these calls are seen as possibly dead so go ahead and
1041 // delete the instruction now.
1042 if (CallInst *CI = dyn_cast<CallInst>(CS.getInstruction())) {
1043 Instruction *I = tryOptimizeCall(CI, TD);
Eric Christopher7b323a32010-03-06 10:59:25 +00001044 // If we changed something return the result, etc. Otherwise let
1045 // the fallthrough check.
1046 if (I) return EraseInstFromFunction(*I);
Eric Christopher27ceaa12010-03-06 10:50:38 +00001047 }
1048
Chris Lattner753a2b42010-01-05 07:32:13 +00001049 return Changed ? CS.getInstruction() : 0;
1050}
1051
1052// transformConstExprCastCall - If the callee is a constexpr cast of a function,
1053// attempt to move the cast to the arguments of the call/invoke.
1054//
1055bool InstCombiner::transformConstExprCastCall(CallSite CS) {
Chris Lattnerab215bc2010-12-20 08:25:06 +00001056 Function *Callee =
1057 dyn_cast<Function>(CS.getCalledValue()->stripPointerCasts());
1058 if (Callee == 0)
Chris Lattner753a2b42010-01-05 07:32:13 +00001059 return false;
Chris Lattner753a2b42010-01-05 07:32:13 +00001060 Instruction *Caller = CS.getInstruction();
1061 const AttrListPtr &CallerPAL = CS.getAttributes();
1062
1063 // Okay, this is a cast from a function to a different type. Unless doing so
1064 // would cause a type conversion of one of our arguments, change this call to
1065 // be a direct call with arguments casted to the appropriate types.
1066 //
Chris Lattnerdb125cf2011-07-18 04:54:35 +00001067 FunctionType *FT = Callee->getFunctionType();
1068 Type *OldRetTy = Caller->getType();
1069 Type *NewRetTy = FT->getReturnType();
Chris Lattner753a2b42010-01-05 07:32:13 +00001070
Duncan Sands1df98592010-02-16 11:11:14 +00001071 if (NewRetTy->isStructTy())
Chris Lattner753a2b42010-01-05 07:32:13 +00001072 return false; // TODO: Handle multiple return values.
1073
1074 // Check to see if we are changing the return type...
1075 if (OldRetTy != NewRetTy) {
1076 if (Callee->isDeclaration() &&
1077 // Conversion is ok if changing from one pointer type to another or from
1078 // a pointer to an integer of the same size.
Duncan Sands1df98592010-02-16 11:11:14 +00001079 !((OldRetTy->isPointerTy() || !TD ||
Chris Lattner753a2b42010-01-05 07:32:13 +00001080 OldRetTy == TD->getIntPtrType(Caller->getContext())) &&
Duncan Sands1df98592010-02-16 11:11:14 +00001081 (NewRetTy->isPointerTy() || !TD ||
Chris Lattner753a2b42010-01-05 07:32:13 +00001082 NewRetTy == TD->getIntPtrType(Caller->getContext()))))
1083 return false; // Cannot transform this return value.
1084
1085 if (!Caller->use_empty() &&
1086 // void -> non-void is handled specially
1087 !NewRetTy->isVoidTy() && !CastInst::isCastable(NewRetTy, OldRetTy))
1088 return false; // Cannot transform this return value.
1089
1090 if (!CallerPAL.isEmpty() && !Caller->use_empty()) {
1091 Attributes RAttrs = CallerPAL.getRetAttributes();
1092 if (RAttrs & Attribute::typeIncompatible(NewRetTy))
1093 return false; // Attribute not compatible with transformed value.
1094 }
1095
1096 // If the callsite is an invoke instruction, and the return value is used by
1097 // a PHI node in a successor, we cannot change the return type of the call
1098 // because there is no place to put the cast instruction (without breaking
1099 // the critical edge). Bail out in this case.
1100 if (!Caller->use_empty())
1101 if (InvokeInst *II = dyn_cast<InvokeInst>(Caller))
1102 for (Value::use_iterator UI = II->use_begin(), E = II->use_end();
1103 UI != E; ++UI)
1104 if (PHINode *PN = dyn_cast<PHINode>(*UI))
1105 if (PN->getParent() == II->getNormalDest() ||
1106 PN->getParent() == II->getUnwindDest())
1107 return false;
1108 }
1109
1110 unsigned NumActualArgs = unsigned(CS.arg_end()-CS.arg_begin());
1111 unsigned NumCommonArgs = std::min(FT->getNumParams(), NumActualArgs);
1112
1113 CallSite::arg_iterator AI = CS.arg_begin();
1114 for (unsigned i = 0, e = NumCommonArgs; i != e; ++i, ++AI) {
Chris Lattnerdb125cf2011-07-18 04:54:35 +00001115 Type *ParamTy = FT->getParamType(i);
1116 Type *ActTy = (*AI)->getType();
Chris Lattner753a2b42010-01-05 07:32:13 +00001117
1118 if (!CastInst::isCastable(ActTy, ParamTy))
1119 return false; // Cannot transform this parameter value.
1120
Kostya Serebryany164b86b2012-01-20 17:56:17 +00001121 Attributes Attrs = CallerPAL.getParamAttributes(i + 1);
Chris Lattner2b9375e2010-12-20 08:36:38 +00001122 if (Attrs & Attribute::typeIncompatible(ParamTy))
Chris Lattner753a2b42010-01-05 07:32:13 +00001123 return false; // Attribute not compatible with transformed value.
Jim Grosbach00e403a2012-02-03 00:07:04 +00001124
Chris Lattner2b9375e2010-12-20 08:36:38 +00001125 // If the parameter is passed as a byval argument, then we have to have a
1126 // sized type and the sized type has to have the same size as the old type.
1127 if (ParamTy != ActTy && (Attrs & Attribute::ByVal)) {
Chris Lattnerdb125cf2011-07-18 04:54:35 +00001128 PointerType *ParamPTy = dyn_cast<PointerType>(ParamTy);
Chris Lattner2b9375e2010-12-20 08:36:38 +00001129 if (ParamPTy == 0 || !ParamPTy->getElementType()->isSized() || TD == 0)
1130 return false;
Jim Grosbach00e403a2012-02-03 00:07:04 +00001131
Chris Lattnerdb125cf2011-07-18 04:54:35 +00001132 Type *CurElTy = cast<PointerType>(ActTy)->getElementType();
Chris Lattner2b9375e2010-12-20 08:36:38 +00001133 if (TD->getTypeAllocSize(CurElTy) !=
1134 TD->getTypeAllocSize(ParamPTy->getElementType()))
1135 return false;
1136 }
Chris Lattner753a2b42010-01-05 07:32:13 +00001137
1138 // Converting from one pointer type to another or between a pointer and an
1139 // integer of the same size is safe even if we do not have a body.
1140 bool isConvertible = ActTy == ParamTy ||
Duncan Sands1df98592010-02-16 11:11:14 +00001141 (TD && ((ParamTy->isPointerTy() ||
Chris Lattner753a2b42010-01-05 07:32:13 +00001142 ParamTy == TD->getIntPtrType(Caller->getContext())) &&
Duncan Sands1df98592010-02-16 11:11:14 +00001143 (ActTy->isPointerTy() ||
Chris Lattner753a2b42010-01-05 07:32:13 +00001144 ActTy == TD->getIntPtrType(Caller->getContext()))));
1145 if (Callee->isDeclaration() && !isConvertible) return false;
1146 }
1147
Chris Lattner091b1e32011-02-24 05:10:56 +00001148 if (Callee->isDeclaration()) {
1149 // Do not delete arguments unless we have a function body.
1150 if (FT->getNumParams() < NumActualArgs && !FT->isVarArg())
1151 return false;
Chris Lattner753a2b42010-01-05 07:32:13 +00001152
Chris Lattner091b1e32011-02-24 05:10:56 +00001153 // If the callee is just a declaration, don't change the varargsness of the
1154 // call. We don't want to introduce a varargs call where one doesn't
1155 // already exist.
Chris Lattnerdb125cf2011-07-18 04:54:35 +00001156 PointerType *APTy = cast<PointerType>(CS.getCalledValue()->getType());
Chris Lattner091b1e32011-02-24 05:10:56 +00001157 if (FT->isVarArg()!=cast<FunctionType>(APTy->getElementType())->isVarArg())
1158 return false;
Jim Grosbachf3744862012-02-03 00:00:55 +00001159
1160 // If both the callee and the cast type are varargs, we still have to make
1161 // sure the number of fixed parameters are the same or we have the same
1162 // ABI issues as if we introduce a varargs call.
Jim Grosbach871a2052012-02-03 00:26:07 +00001163 if (FT->isVarArg() &&
1164 cast<FunctionType>(APTy->getElementType())->isVarArg() &&
1165 FT->getNumParams() !=
Jim Grosbachf3744862012-02-03 00:00:55 +00001166 cast<FunctionType>(APTy->getElementType())->getNumParams())
1167 return false;
Chris Lattner091b1e32011-02-24 05:10:56 +00001168 }
Jim Grosbach00e403a2012-02-03 00:07:04 +00001169
Jim Grosbachd5917f02012-02-03 00:00:50 +00001170 if (FT->getNumParams() < NumActualArgs && FT->isVarArg() &&
1171 !CallerPAL.isEmpty())
1172 // In this case we have more arguments than the new function type, but we
1173 // won't be dropping them. Check that these extra arguments have attributes
1174 // that are compatible with being a vararg call argument.
1175 for (unsigned i = CallerPAL.getNumSlots(); i; --i) {
1176 if (CallerPAL.getSlot(i - 1).Index <= FT->getNumParams())
1177 break;
1178 Attributes PAttrs = CallerPAL.getSlot(i - 1).Attrs;
1179 if (PAttrs & Attribute::VarArgsIncompatible)
1180 return false;
1181 }
Chris Lattner753a2b42010-01-05 07:32:13 +00001182
Jim Grosbach00e403a2012-02-03 00:07:04 +00001183
Chris Lattner753a2b42010-01-05 07:32:13 +00001184 // Okay, we decided that this is a safe thing to do: go ahead and start
Chris Lattner091b1e32011-02-24 05:10:56 +00001185 // inserting cast instructions as necessary.
Chris Lattner753a2b42010-01-05 07:32:13 +00001186 std::vector<Value*> Args;
1187 Args.reserve(NumActualArgs);
1188 SmallVector<AttributeWithIndex, 8> attrVec;
1189 attrVec.reserve(NumCommonArgs);
1190
1191 // Get any return attributes.
1192 Attributes RAttrs = CallerPAL.getRetAttributes();
1193
1194 // If the return value is not being used, the type may not be compatible
1195 // with the existing attributes. Wipe out any problematic attributes.
1196 RAttrs &= ~Attribute::typeIncompatible(NewRetTy);
1197
1198 // Add the new return attributes.
1199 if (RAttrs)
1200 attrVec.push_back(AttributeWithIndex::get(0, RAttrs));
1201
1202 AI = CS.arg_begin();
1203 for (unsigned i = 0; i != NumCommonArgs; ++i, ++AI) {
Chris Lattnerdb125cf2011-07-18 04:54:35 +00001204 Type *ParamTy = FT->getParamType(i);
Chris Lattner753a2b42010-01-05 07:32:13 +00001205 if ((*AI)->getType() == ParamTy) {
1206 Args.push_back(*AI);
1207 } else {
1208 Instruction::CastOps opcode = CastInst::getCastOpcode(*AI,
1209 false, ParamTy, false);
Benjamin Kramera9390a42011-09-27 20:39:19 +00001210 Args.push_back(Builder->CreateCast(opcode, *AI, ParamTy));
Chris Lattner753a2b42010-01-05 07:32:13 +00001211 }
1212
1213 // Add any parameter attributes.
1214 if (Attributes PAttrs = CallerPAL.getParamAttributes(i + 1))
1215 attrVec.push_back(AttributeWithIndex::get(i + 1, PAttrs));
1216 }
1217
1218 // If the function takes more arguments than the call was taking, add them
1219 // now.
1220 for (unsigned i = NumCommonArgs; i != FT->getNumParams(); ++i)
1221 Args.push_back(Constant::getNullValue(FT->getParamType(i)));
1222
1223 // If we are removing arguments to the function, emit an obnoxious warning.
1224 if (FT->getNumParams() < NumActualArgs) {
1225 if (!FT->isVarArg()) {
1226 errs() << "WARNING: While resolving call to function '"
1227 << Callee->getName() << "' arguments were dropped!\n";
1228 } else {
1229 // Add all of the arguments in their promoted form to the arg list.
1230 for (unsigned i = FT->getNumParams(); i != NumActualArgs; ++i, ++AI) {
Chris Lattnerdb125cf2011-07-18 04:54:35 +00001231 Type *PTy = getPromotedType((*AI)->getType());
Chris Lattner753a2b42010-01-05 07:32:13 +00001232 if (PTy != (*AI)->getType()) {
1233 // Must promote to pass through va_arg area!
1234 Instruction::CastOps opcode =
1235 CastInst::getCastOpcode(*AI, false, PTy, false);
Benjamin Kramera9390a42011-09-27 20:39:19 +00001236 Args.push_back(Builder->CreateCast(opcode, *AI, PTy));
Chris Lattner753a2b42010-01-05 07:32:13 +00001237 } else {
1238 Args.push_back(*AI);
1239 }
1240
1241 // Add any parameter attributes.
1242 if (Attributes PAttrs = CallerPAL.getParamAttributes(i + 1))
1243 attrVec.push_back(AttributeWithIndex::get(i + 1, PAttrs));
1244 }
1245 }
1246 }
1247
1248 if (Attributes FnAttrs = CallerPAL.getFnAttributes())
1249 attrVec.push_back(AttributeWithIndex::get(~0, FnAttrs));
1250
1251 if (NewRetTy->isVoidTy())
1252 Caller->setName(""); // Void type should not have a name.
1253
Chris Lattnerd509d0b2012-05-28 01:47:44 +00001254 const AttrListPtr &NewCallerPAL = AttrListPtr::get(attrVec);
Chris Lattner753a2b42010-01-05 07:32:13 +00001255
1256 Instruction *NC;
1257 if (InvokeInst *II = dyn_cast<InvokeInst>(Caller)) {
Eli Friedmanef819d02011-05-18 01:28:27 +00001258 NC = Builder->CreateInvoke(Callee, II->getNormalDest(),
Jay Foada3efbb12011-07-15 08:37:34 +00001259 II->getUnwindDest(), Args);
Eli Friedmanef819d02011-05-18 01:28:27 +00001260 NC->takeName(II);
Chris Lattner753a2b42010-01-05 07:32:13 +00001261 cast<InvokeInst>(NC)->setCallingConv(II->getCallingConv());
1262 cast<InvokeInst>(NC)->setAttributes(NewCallerPAL);
1263 } else {
Chris Lattner753a2b42010-01-05 07:32:13 +00001264 CallInst *CI = cast<CallInst>(Caller);
Jay Foada3efbb12011-07-15 08:37:34 +00001265 NC = Builder->CreateCall(Callee, Args);
Eli Friedmanef819d02011-05-18 01:28:27 +00001266 NC->takeName(CI);
Chris Lattner753a2b42010-01-05 07:32:13 +00001267 if (CI->isTailCall())
1268 cast<CallInst>(NC)->setTailCall();
1269 cast<CallInst>(NC)->setCallingConv(CI->getCallingConv());
1270 cast<CallInst>(NC)->setAttributes(NewCallerPAL);
1271 }
1272
1273 // Insert a cast of the return type as necessary.
1274 Value *NV = NC;
1275 if (OldRetTy != NV->getType() && !Caller->use_empty()) {
1276 if (!NV->getType()->isVoidTy()) {
Chris Lattnerab215bc2010-12-20 08:25:06 +00001277 Instruction::CastOps opcode =
1278 CastInst::getCastOpcode(NC, false, OldRetTy, false);
Benjamin Kramera9390a42011-09-27 20:39:19 +00001279 NV = NC = CastInst::Create(opcode, NC, OldRetTy);
Eli Friedmana311c342011-05-27 00:19:40 +00001280 NC->setDebugLoc(Caller->getDebugLoc());
Chris Lattner753a2b42010-01-05 07:32:13 +00001281
1282 // If this is an invoke instruction, we should insert it after the first
1283 // non-phi, instruction in the normal successor block.
1284 if (InvokeInst *II = dyn_cast<InvokeInst>(Caller)) {
Bill Wendling89d44112011-08-25 01:08:34 +00001285 BasicBlock::iterator I = II->getNormalDest()->getFirstInsertionPt();
Chris Lattner753a2b42010-01-05 07:32:13 +00001286 InsertNewInstBefore(NC, *I);
1287 } else {
Chris Lattnerab215bc2010-12-20 08:25:06 +00001288 // Otherwise, it's a call, just insert cast right after the call.
Chris Lattner753a2b42010-01-05 07:32:13 +00001289 InsertNewInstBefore(NC, *Caller);
1290 }
1291 Worklist.AddUsersToWorkList(*Caller);
1292 } else {
1293 NV = UndefValue::get(Caller->getType());
1294 }
1295 }
1296
Chris Lattner753a2b42010-01-05 07:32:13 +00001297 if (!Caller->use_empty())
Eli Friedman3e22cb92011-05-18 00:32:01 +00001298 ReplaceInstUsesWith(*Caller, NV);
1299
Chris Lattner753a2b42010-01-05 07:32:13 +00001300 EraseInstFromFunction(*Caller);
1301 return true;
1302}
1303
Duncan Sands4a544a72011-09-06 13:37:06 +00001304// transformCallThroughTrampoline - Turn a call to a function created by
1305// init_trampoline / adjust_trampoline intrinsic pair into a direct call to the
1306// underlying function.
Chris Lattner753a2b42010-01-05 07:32:13 +00001307//
Duncan Sands4a544a72011-09-06 13:37:06 +00001308Instruction *
1309InstCombiner::transformCallThroughTrampoline(CallSite CS,
1310 IntrinsicInst *Tramp) {
Chris Lattner753a2b42010-01-05 07:32:13 +00001311 Value *Callee = CS.getCalledValue();
Chris Lattnerdb125cf2011-07-18 04:54:35 +00001312 PointerType *PTy = cast<PointerType>(Callee->getType());
1313 FunctionType *FTy = cast<FunctionType>(PTy->getElementType());
Chris Lattner753a2b42010-01-05 07:32:13 +00001314 const AttrListPtr &Attrs = CS.getAttributes();
1315
1316 // If the call already has the 'nest' attribute somewhere then give up -
1317 // otherwise 'nest' would occur twice after splicing in the chain.
1318 if (Attrs.hasAttrSomewhere(Attribute::Nest))
1319 return 0;
1320
Duncan Sands4a544a72011-09-06 13:37:06 +00001321 assert(Tramp &&
1322 "transformCallThroughTrampoline called with incorrect CallSite.");
Chris Lattner753a2b42010-01-05 07:32:13 +00001323
Gabor Greifa3997812010-07-22 10:37:47 +00001324 Function *NestF =cast<Function>(Tramp->getArgOperand(1)->stripPointerCasts());
Chris Lattnerdb125cf2011-07-18 04:54:35 +00001325 PointerType *NestFPTy = cast<PointerType>(NestF->getType());
1326 FunctionType *NestFTy = cast<FunctionType>(NestFPTy->getElementType());
Chris Lattner753a2b42010-01-05 07:32:13 +00001327
1328 const AttrListPtr &NestAttrs = NestF->getAttributes();
1329 if (!NestAttrs.isEmpty()) {
1330 unsigned NestIdx = 1;
Jay Foad5fdd6c82011-07-12 14:06:48 +00001331 Type *NestTy = 0;
Chris Lattner753a2b42010-01-05 07:32:13 +00001332 Attributes NestAttr = Attribute::None;
1333
1334 // Look for a parameter marked with the 'nest' attribute.
1335 for (FunctionType::param_iterator I = NestFTy->param_begin(),
1336 E = NestFTy->param_end(); I != E; ++NestIdx, ++I)
1337 if (NestAttrs.paramHasAttr(NestIdx, Attribute::Nest)) {
1338 // Record the parameter type and any other attributes.
1339 NestTy = *I;
1340 NestAttr = NestAttrs.getParamAttributes(NestIdx);
1341 break;
1342 }
1343
1344 if (NestTy) {
1345 Instruction *Caller = CS.getInstruction();
1346 std::vector<Value*> NewArgs;
1347 NewArgs.reserve(unsigned(CS.arg_end()-CS.arg_begin())+1);
1348
1349 SmallVector<AttributeWithIndex, 8> NewAttrs;
1350 NewAttrs.reserve(Attrs.getNumSlots() + 1);
1351
1352 // Insert the nest argument into the call argument list, which may
1353 // mean appending it. Likewise for attributes.
1354
1355 // Add any result attributes.
1356 if (Attributes Attr = Attrs.getRetAttributes())
1357 NewAttrs.push_back(AttributeWithIndex::get(0, Attr));
1358
1359 {
1360 unsigned Idx = 1;
1361 CallSite::arg_iterator I = CS.arg_begin(), E = CS.arg_end();
1362 do {
1363 if (Idx == NestIdx) {
1364 // Add the chain argument and attributes.
Gabor Greifcea7ac72010-06-24 12:58:35 +00001365 Value *NestVal = Tramp->getArgOperand(2);
Chris Lattner753a2b42010-01-05 07:32:13 +00001366 if (NestVal->getType() != NestTy)
Eli Friedmane6f364b2011-05-18 23:58:37 +00001367 NestVal = Builder->CreateBitCast(NestVal, NestTy, "nest");
Chris Lattner753a2b42010-01-05 07:32:13 +00001368 NewArgs.push_back(NestVal);
1369 NewAttrs.push_back(AttributeWithIndex::get(NestIdx, NestAttr));
1370 }
1371
1372 if (I == E)
1373 break;
1374
1375 // Add the original argument and attributes.
1376 NewArgs.push_back(*I);
1377 if (Attributes Attr = Attrs.getParamAttributes(Idx))
1378 NewAttrs.push_back
1379 (AttributeWithIndex::get(Idx + (Idx >= NestIdx), Attr));
1380
1381 ++Idx, ++I;
1382 } while (1);
1383 }
1384
1385 // Add any function attributes.
1386 if (Attributes Attr = Attrs.getFnAttributes())
1387 NewAttrs.push_back(AttributeWithIndex::get(~0, Attr));
1388
1389 // The trampoline may have been bitcast to a bogus type (FTy).
1390 // Handle this by synthesizing a new function type, equal to FTy
1391 // with the chain parameter inserted.
1392
Jay Foad5fdd6c82011-07-12 14:06:48 +00001393 std::vector<Type*> NewTypes;
Chris Lattner753a2b42010-01-05 07:32:13 +00001394 NewTypes.reserve(FTy->getNumParams()+1);
1395
1396 // Insert the chain's type into the list of parameter types, which may
1397 // mean appending it.
1398 {
1399 unsigned Idx = 1;
1400 FunctionType::param_iterator I = FTy->param_begin(),
1401 E = FTy->param_end();
1402
1403 do {
1404 if (Idx == NestIdx)
1405 // Add the chain's type.
1406 NewTypes.push_back(NestTy);
1407
1408 if (I == E)
1409 break;
1410
1411 // Add the original type.
1412 NewTypes.push_back(*I);
1413
1414 ++Idx, ++I;
1415 } while (1);
1416 }
1417
1418 // Replace the trampoline call with a direct call. Let the generic
1419 // code sort out any function type mismatches.
Jim Grosbach00e403a2012-02-03 00:07:04 +00001420 FunctionType *NewFTy = FunctionType::get(FTy->getReturnType(), NewTypes,
Chris Lattner753a2b42010-01-05 07:32:13 +00001421 FTy->isVarArg());
1422 Constant *NewCallee =
1423 NestF->getType() == PointerType::getUnqual(NewFTy) ?
Jim Grosbach00e403a2012-02-03 00:07:04 +00001424 NestF : ConstantExpr::getBitCast(NestF,
Chris Lattner753a2b42010-01-05 07:32:13 +00001425 PointerType::getUnqual(NewFTy));
Chris Lattnerd509d0b2012-05-28 01:47:44 +00001426 const AttrListPtr &NewPAL = AttrListPtr::get(NewAttrs);
Chris Lattner753a2b42010-01-05 07:32:13 +00001427
1428 Instruction *NewCaller;
1429 if (InvokeInst *II = dyn_cast<InvokeInst>(Caller)) {
1430 NewCaller = InvokeInst::Create(NewCallee,
1431 II->getNormalDest(), II->getUnwindDest(),
Jay Foada3efbb12011-07-15 08:37:34 +00001432 NewArgs);
Chris Lattner753a2b42010-01-05 07:32:13 +00001433 cast<InvokeInst>(NewCaller)->setCallingConv(II->getCallingConv());
1434 cast<InvokeInst>(NewCaller)->setAttributes(NewPAL);
1435 } else {
Jay Foada3efbb12011-07-15 08:37:34 +00001436 NewCaller = CallInst::Create(NewCallee, NewArgs);
Chris Lattner753a2b42010-01-05 07:32:13 +00001437 if (cast<CallInst>(Caller)->isTailCall())
1438 cast<CallInst>(NewCaller)->setTailCall();
1439 cast<CallInst>(NewCaller)->
1440 setCallingConv(cast<CallInst>(Caller)->getCallingConv());
1441 cast<CallInst>(NewCaller)->setAttributes(NewPAL);
1442 }
Eli Friedman59f15912011-05-18 19:57:14 +00001443
1444 return NewCaller;
Chris Lattner753a2b42010-01-05 07:32:13 +00001445 }
1446 }
1447
1448 // Replace the trampoline call with a direct call. Since there is no 'nest'
1449 // parameter, there is no need to adjust the argument list. Let the generic
1450 // code sort out any function type mismatches.
1451 Constant *NewCallee =
Jim Grosbach00e403a2012-02-03 00:07:04 +00001452 NestF->getType() == PTy ? NestF :
Chris Lattner753a2b42010-01-05 07:32:13 +00001453 ConstantExpr::getBitCast(NestF, PTy);
1454 CS.setCalledFunction(NewCallee);
1455 return CS.getInstruction();
1456}